# Directory Structure
```
├── .env.example
├── .gitignore
├── config.py
├── gitlab_api.py
├── logging_config.py
├── main.py
├── README.md
├── requirements.txt
├── run-mcp.sh
├── test_tools.py
├── tools
│ ├── __init__.py
│ ├── get_branch_merge_requests.py
│ ├── get_commit_discussions.py
│ ├── get_merge_request_details.py
│ ├── get_merge_request_reviews.py
│ ├── list_merge_requests.py
│ └── reply_to_review_comment.py
└── utils.py
```
# Files
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
```
1 | # GitLab MCP Server Configuration
2 | # Copy this file to .env and fill in your actual values
3 |
4 | # Required Environment Variables
5 | GITLAB_PROJECT_ID=your-project-id
6 | GITLAB_ACCESS_TOKEN=your-access-token
7 |
8 | # Optional Environment Variables (defaults shown)
9 | GITLAB_URL=https://gitlab.com
10 | SERVER_NAME=gitlab-mcp-server
11 | SERVER_VERSION=1.0.0
12 |
13 | # Instructions:
14 | # 1. Copy this file: cp .env.example .env
15 | # 2. Edit .env with your actual GitLab project ID and access token
16 | # 3. For GitLab.com, you can leave GITLAB_URL as is
17 | # 4. For self-hosted GitLab, update GITLAB_URL to your instance URL
18 |
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .nox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 |
58 | # Flask stuff:
59 | instance/
60 | .webassets-cache
61 |
62 | # Scrapy stuff:
63 | .scrapy
64 |
65 | # Sphinx documentation
66 | docs/_build/
67 |
68 | # PyBuilder
69 | .target/
70 |
71 | # Jupyter Notebook
72 | .ipynb_checkpoints
73 |
74 | # IPython
75 | profile_default/
76 | ipython_config.py
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # pipenv
82 | Pipfile.lock
83 |
84 | # poetry
85 | poetry.lock
86 |
87 | # mypy
88 | .mypy_cache/
89 | .dmypy.json
90 |
91 | # Pyre type checker
92 | .pyre/
93 |
94 | # pytype static type analyzer
95 | .pytype/
96 |
97 | # Cython debug symbols
98 | cython_debug/
99 |
100 | # VS Code
101 | .vscode/
102 |
103 | # JetBrains IDEs
104 | .idea/
105 |
106 | # Mac OS
107 | .DS_Store
108 |
109 | # Environment variables
110 | .env
111 | .env.*
112 | !.env.example
113 |
114 | # MCP/AI logs
115 | *.log
116 |
117 | # System
118 | Thumbs.db
119 |
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | # GitLab MCP Server
2 |
3 | Connect your AI assistant to GitLab. Ask questions like _"List open merge requests"_, _"Show me reviews for MR #123"_, _"Get commit discussions for MR #456"_, or _"Find merge requests for the feature branch"_ directly in your chat.
4 |
5 | ## Table of Contents
6 |
7 | - [Quick Setup](#quick-setup)
8 | - [What You Can Do](#what-you-can-do)
9 | - [Configuration Options](#configuration-options)
10 | - [Troubleshooting](#troubleshooting)
11 | - [Tool Reference](#tool-reference)
12 | - [Development](#development)
13 | - [Security Notes](#security-notes)
14 | - [Support](#support)
15 |
16 | ## Quick Setup
17 |
18 | 1. **Install the server:**
19 |
20 | ```bash
21 | git clone https://github.com/amirsina-mandegari/gitlab-mcp-server.git
22 | cd gitlab-mcp-server
23 | python -m venv .venv && source .venv/bin/activate
24 | pip install -r requirements.txt
25 | chmod +x run-mcp.sh
26 | ```
27 |
28 | 2. **Get your GitLab token:**
29 |
30 | - Go to GitLab → Settings → Access Tokens
31 | - Create token with **`read_api`** scope
32 | - Copy the token
33 |
34 | 3. **Configure your project:**
35 | In your project directory, create `gitlab-mcp.env`:
36 |
37 | ```env
38 | GITLAB_PROJECT_ID=12345
39 | GITLAB_ACCESS_TOKEN=glpat-xxxxxxxxxxxxxxxxxxxx
40 | GITLAB_URL=https://gitlab.com
41 | ```
42 |
43 | 4. **Connect to Cursor:**
44 | Create `.cursor/mcp.json` in your project:
45 |
46 | ```json
47 | {
48 | "mcpServers": {
49 | "gitlab-mcp": {
50 | "command": "/path/to/gitlab-mcp-server/run-mcp.sh",
51 | "cwd": "/path/to/your-project"
52 | }
53 | }
54 | }
55 | ```
56 |
57 | 5. **Restart Cursor** and start asking GitLab questions!
58 |
59 | ## What You Can Do
60 |
61 | Once connected, try these commands in your chat:
62 |
63 | - _"List open merge requests"_
64 | - _"Show me details for merge request 456"_
65 | - _"Get reviews and discussions for MR #123"_
66 | - _"Show me commit discussions for MR #456"_
67 | - _"Get all comments on commits in merge request #789"_
68 | - _"Find merge requests for the feature/auth-improvements branch"_
69 | - _"Show me closed merge requests targeting main"_
70 | - _"Reply to discussion abc123 in MR #456 with 'Thanks for the feedback!'"_
71 | - _"Create a new review comment in MR #789 asking about the error handling"_
72 | - _"Resolve discussion def456 in MR #123"_
73 |
74 | ## Working with Review Comments
75 |
76 | The enhanced review tools allow you to interact with merge request discussions:
77 |
78 | 1. **First, get the reviews** to see discussion IDs:
79 |
80 | ```
81 | "Show me reviews for MR #123"
82 | ```
83 |
84 | 2. **Reply to specific discussions** using the discussion ID:
85 |
86 | ```
87 | "Reply to discussion abc123 in MR #456 with 'I'll fix this in the next commit'"
88 | ```
89 |
90 | 3. **Create new discussion threads** to start conversations:
91 |
92 | ```
93 | "Create a review comment in MR #789 asking 'Could you add error handling here?'"
94 | ```
95 |
96 | 4. **Resolve discussions** when issues are addressed:
97 | ```
98 | "Resolve discussion def456 in MR #123"
99 | ```
100 |
101 | **Note**: The `get_merge_request_reviews` tool now displays discussion IDs and note IDs in the output, making it easy to reference specific discussions when replying or resolving.
102 |
103 | ## Working with Commit Discussions
104 |
105 | The `get_commit_discussions` tool provides comprehensive insights into discussions and comments on individual commits within a merge request:
106 |
107 | 1. **View all commit discussions** for a merge request:
108 |
109 | ```
110 | "Show me commit discussions for MR #123"
111 | ```
112 |
113 | 2. **Get detailed commit conversation history**:
114 |
115 | ```
116 | "Get all comments on commits in merge request #456"
117 | ```
118 |
119 | This tool is particularly useful for:
120 |
121 | - **Code Review Tracking**: See all feedback on specific commits
122 | - **Discussion History**: Understand the evolution of code discussions
123 | - **Commit-Level Context**: View comments tied to specific code changes
124 | - **Review Progress**: Monitor which commits have been discussed
125 |
126 | **Technical Implementation:**
127 |
128 | - Uses `/projects/:project_id/merge_requests/:merge_request_iid/commits` to get all commits with proper pagination
129 | - Fetches ALL merge request discussions using `/projects/:project_id/merge_requests/:merge_request_iid/discussions` with pagination support
130 | - Filters discussions by commit SHA using position data to show commit-specific conversations
131 | - Handles both individual comments and discussion threads correctly
132 |
133 | The output includes:
134 |
135 | - Summary of total commits and discussion counts
136 | - Individual commit details (SHA, title, author, date)
137 | - All discussions and comments for each commit with file positions
138 | - Complete conversation threads with replies
139 | - File positions for diff-related comments
140 | - Thread conversations with replies
141 |
142 | ## Configuration Options
143 |
144 | ### Project-Level (Recommended)
145 |
146 | Each project gets its own `gitlab-mcp.env` file with its own GitLab configuration. Keep tokens out of version control.
147 |
148 | ### Global Configuration
149 |
150 | Set environment variables system-wide instead of per-project:
151 |
152 | ```bash
153 | export GITLAB_PROJECT_ID=12345
154 | export GITLAB_ACCESS_TOKEN=glpat-xxxxxxxxxxxxxxxxxxxx
155 | export GITLAB_URL=https://gitlab.com
156 | ```
157 |
158 | ### Find Your Project ID
159 |
160 | - Go to your GitLab project → Settings → General → Project ID
161 | - Or check the URL: `https://gitlab.com/username/project` (use the numeric ID)
162 |
163 | ## Troubleshooting
164 |
165 | **Authentication Error**: Verify your token has `read_api` permissions and is not expired.
166 |
167 | **Project Not Found**: Double-check your project ID is correct (it's a number, not the project name).
168 |
169 | **Connection Issues**: Make sure your GitLab URL is accessible and correct.
170 |
171 | **Script Not Found**: Ensure the path in your MCP config points to the actual server location and the script is executable.
172 |
173 | ## Tool Reference
174 |
175 | | Tool | Description | Parameters |
176 | | --------------------------- | ---------------------------- | ------------------------------------------------ |
177 | | `list_merge_requests` | List merge requests | `state`, `target_branch`, `limit` |
178 | | `get_merge_request_details` | Get MR details | `merge_request_iid` |
179 | | `get_merge_request_reviews` | Get reviews/discussions | `merge_request_iid` |
180 | | `get_commit_discussions` | Get discussions on commits | `merge_request_iid` |
181 | | `get_branch_merge_requests` | Find MRs for branch | `branch_name` |
182 | | `reply_to_review_comment` | Reply to existing discussion | `merge_request_iid`, `discussion_id`, `body` |
183 | | `create_review_comment` | Create new discussion thread | `merge_request_iid`, `body` |
184 | | `resolve_review_discussion` | Resolve/unresolve discussion | `merge_request_iid`, `discussion_id`, `resolved` |
185 |
186 | ## Development
187 |
188 | ### Project Structure
189 |
190 | ```
191 | gitlab-mcp-server/
192 | ├── main.py # MCP server entry point
193 | ├── config.py # Configuration management
194 | ├── gitlab_api.py # GitLab API client
195 | ├── utils.py # Utility functions
196 | ├── logging_config.py # Logging configuration
197 | ├── run-mcp.sh # Launch script
198 | └── tools/ # Tool implementations package
199 | ├── __init__.py # Package initialization
200 | ├── list_merge_requests.py
201 | ├── get_merge_request_details.py
202 | ├── get_merge_request_reviews.py
203 | ├── get_commit_discussions.py
204 | ├── get_branch_merge_requests.py
205 | └── reply_to_review_comment.py
206 | ```
207 |
208 | ### Adding Tools
209 |
210 | 1. Create new file in `tools/` directory
211 | 2. Add import and export to `tools/__init__.py`
212 | 3. Add to `list_tools()` in `main.py`
213 | 4. Add handler to `call_tool()` in `main.py`
214 |
215 | ### Testing
216 |
217 | ```bash
218 | python test_tools.py
219 | ```
220 |
221 | ## Security Notes
222 |
223 | - Add `gitlab-mcp.env` to your `.gitignore`
224 | - Never commit access tokens
225 | - Use project-specific tokens with minimal permissions
226 | - Rotate tokens regularly
227 |
228 | ## Support
229 |
230 | - Check [GitLab API documentation](https://docs.gitlab.com/ee/api/)
231 | - Open issues at [github.com/amirsina-mandegari/gitlab-mcp-server](https://github.com/amirsina-mandegari/gitlab-mcp-server)
232 |
233 | ## License
234 |
235 | MIT License - see LICENSE file for details.
236 |
```
--------------------------------------------------------------------------------
/logging_config.py:
--------------------------------------------------------------------------------
```python
1 | import logging
2 |
3 |
4 | def configure_logging():
5 | logging.basicConfig(
6 | level=logging.INFO,
7 | format="%(asctime)s [%(levelname)s] %(message)s"
8 | )
```
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
```
1 | aiohappyeyeballs==2.6.1
2 | aiohttp==3.12.14
3 | aiosignal==1.4.0
4 | annotated-types==0.7.0
5 | anyio==4.9.0
6 | attrs==25.3.0
7 | certifi==2025.7.14
8 | click==8.2.1
9 | frozenlist==1.7.0
10 | h11==0.16.0
11 | httpcore==1.0.9
12 | httpx==0.28.1
13 | httpx-sse==0.4.1
14 | idna==3.10
15 | jsonschema==4.24.0
16 | jsonschema-specifications==2025.4.1
17 | mcp==1.11.0
18 | multidict==6.6.3
19 | propcache==0.3.2
20 | pydantic==2.11.7
21 | pydantic-settings==2.10.1
22 | pydantic_core==2.33.2
23 | python-dotenv==1.1.1
24 | python-multipart==0.0.20
25 | python-decouple
26 | referencing==0.36.2
27 | rpds-py==0.26.0
28 | setuptools==62.1.0
29 | sniffio==1.3.1
30 | sse-starlette==2.4.1
31 | starlette==0.47.1
32 | typing-inspection==0.4.1
33 | typing_extensions==4.14.1
34 | uvicorn==0.35.0
35 | wheel==0.37.1
36 | yarl==1.20.1
37 |
```
--------------------------------------------------------------------------------
/run-mcp.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Get the script directory (where the MCP server is located)
3 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
4 |
5 | # Get the current working directory (where the script is being run from)
6 | CURRENT_DIR="$(pwd)"
7 |
8 | # Check if gitlab-mcp.env exists in the current working directory and load it
9 | if [ -f "$CURRENT_DIR/gitlab-mcp.env" ]; then
10 | echo "Loading environment variables from $CURRENT_DIR/gitlab-mcp.env"
11 | set -a
12 | source "$CURRENT_DIR/gitlab-mcp.env"
13 | set +a
14 | else
15 | echo "No gitlab-mcp.env found in $CURRENT_DIR, using existing environment variables"
16 | fi
17 |
18 | # Activate the virtual environment and run the MCP server
19 | source "$SCRIPT_DIR/.venv/bin/activate"
20 | python "$SCRIPT_DIR/main.py" 2>&1 | tee /tmp/gitlab-mcp-server.log
```
--------------------------------------------------------------------------------
/tools/__init__.py:
--------------------------------------------------------------------------------
```python
1 | """
2 | GitLab MCP Server Tools Package
3 |
4 | This package contains all the tool implementations for the GitLab MCP server.
5 | Each tool provides specific functionality for interacting with GitLab's API.
6 | """
7 |
8 | from .list_merge_requests import list_merge_requests
9 | from .get_merge_request_reviews import get_merge_request_reviews
10 | from .get_merge_request_details import get_merge_request_details
11 | from .get_branch_merge_requests import get_branch_merge_requests
12 | from .reply_to_review_comment import (
13 | reply_to_review_comment,
14 | create_review_comment,
15 | resolve_review_discussion
16 | )
17 | from .get_commit_discussions import get_commit_discussions
18 |
19 | __all__ = [
20 | "list_merge_requests",
21 | "get_merge_request_reviews",
22 | "get_merge_request_details",
23 | "get_branch_merge_requests",
24 | "reply_to_review_comment",
25 | "create_review_comment",
26 | "resolve_review_discussion",
27 | "get_commit_discussions"
28 | ]
```
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """Configuration management for GitLab MCP Server."""
3 |
4 | from decouple import config
5 |
6 |
7 | def get_gitlab_config():
8 | """Get GitLab configuration from environment variables."""
9 | gitlab_url = config("GITLAB_URL", default="https://gitlab.com")
10 | project_id = config("GITLAB_PROJECT_ID")
11 | access_token = config("GITLAB_ACCESS_TOKEN")
12 |
13 | if not project_id:
14 | raise ValueError(
15 | "GITLAB_PROJECT_ID environment variable is required"
16 | )
17 | if not access_token:
18 | raise ValueError(
19 | "GITLAB_ACCESS_TOKEN environment variable is required"
20 | )
21 |
22 | return {
23 | 'gitlab_url': gitlab_url,
24 | 'project_id': project_id,
25 | 'access_token': access_token,
26 | 'server_name': config("SERVER_NAME", default="gitlab-mcp-server"),
27 | 'server_version': config("SERVER_VERSION", default="1.0.0")
28 | }
29 |
30 |
31 | def get_headers(access_token):
32 | """Get HTTP headers for GitLab API requests."""
33 | return {
34 | "Private-Token": access_token,
35 | "Content-Type": "application/json"
36 | }
```
--------------------------------------------------------------------------------
/test_tools.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """Test script to verify GitLab MCP tools return proper CallToolResult objects."""
3 |
4 | import asyncio
5 | import sys
6 | from tools.list_merge_requests import list_merge_requests
7 | from tools.get_merge_request_details import get_merge_request_details
8 | from tools.get_merge_request_reviews import get_merge_request_reviews
9 | from mcp.types import CallToolResult
10 |
11 | from decouple import config
12 |
13 | GITLAB_URL = config('GITLAB_URL', default='https://git.partnerz.io')
14 | PROJECT_ID = config('GITLAB_PROJECT_ID', default='237')
15 | ACCESS_TOKEN = config('GITLAB_ACCESS_TOKEN', default='')
16 |
17 |
18 | async def test_list_merge_requests():
19 | """Test list_merge_requests tool."""
20 | print("Testing list_merge_requests...")
21 | args = {"state": "opened", "limit": 2}
22 | result = await list_merge_requests(GITLAB_URL, PROJECT_ID, ACCESS_TOKEN, args)
23 | print(f"Result type: {type(result)}")
24 | print(f"Is CallToolResult: {isinstance(result, CallToolResult)}")
25 | if isinstance(result, CallToolResult):
26 | print(f"isError: {result.isError}")
27 | print(f"Content type: {type(result.content)}")
28 | if result.content:
29 | print(f"First content item: {result.content[0].text[:100]}...")
30 | print("=" * 50)
31 |
32 |
33 | async def test_get_merge_request_details():
34 | """Test get_merge_request_details tool."""
35 | print("Testing get_merge_request_details...")
36 | args = {"merge_request_iid": 1047}
37 | result = await get_merge_request_details(
38 | GITLAB_URL, PROJECT_ID, ACCESS_TOKEN, args
39 | )
40 | print(f"Result type: {type(result)}")
41 | print(f"Is CallToolResult: {isinstance(result, CallToolResult)}")
42 | if isinstance(result, CallToolResult):
43 | print(f"isError: {result.isError}")
44 | print(f"Content type: {type(result.content)}")
45 | if result.content:
46 | print(f"First content item: {result.content[0].text[:100]}...")
47 | print("=" * 50)
48 |
49 |
50 | async def test_get_merge_request_reviews():
51 | """Test get_merge_request_reviews tool."""
52 | print("Testing get_merge_request_reviews...")
53 | args = {"merge_request_iid": 1047}
54 | result = await get_merge_request_reviews(
55 | GITLAB_URL, PROJECT_ID, ACCESS_TOKEN, args
56 | )
57 | print(f"Result type: {type(result)}")
58 | print(f"Is CallToolResult: {isinstance(result, CallToolResult)}")
59 | if isinstance(result, CallToolResult):
60 | print(f"isError: {result.isError}")
61 | print(f"Content type: {type(result.content)}")
62 | if result.content:
63 | print(f"Full content: {result.content[0].text}")
64 | print("=" * 50)
65 |
66 |
67 | async def main():
68 | """Run all tests."""
69 | if not ACCESS_TOKEN:
70 | print("Error: GITLAB_ACCESS_TOKEN not set")
71 | sys.exit(1)
72 |
73 | await test_list_merge_requests()
74 | await test_get_merge_request_details()
75 | await test_get_merge_request_reviews()
76 |
77 |
78 | if __name__ == "__main__":
79 | asyncio.run(main())
```
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
```python
1 | from datetime import datetime
2 |
3 |
4 | def format_date(iso_date_string):
5 | """Convert ISO date to human-readable format"""
6 | try:
7 | dt = datetime.fromisoformat(iso_date_string.replace('Z', '+00:00'))
8 | return dt.strftime('%Y-%m-%d %H:%M UTC')
9 | except ValueError:
10 | return iso_date_string
11 |
12 |
13 | def get_state_explanation(state):
14 | """Get human-readable explanation of MR state"""
15 | explanations = {
16 | 'opened': 'Ready for review',
17 | 'merged': 'Successfully merged',
18 | 'closed': 'Closed without merging',
19 | 'locked': 'Locked (no new discussions)',
20 | 'draft': 'Work in progress'
21 | }
22 | return explanations.get(state, state)
23 |
24 |
25 | def get_pipeline_status_icon(status):
26 | """Get emoji for pipeline status"""
27 | if not status:
28 | return '⚪'
29 |
30 | icons = {
31 | 'success': '✅',
32 | 'failed': '❌',
33 | 'running': '🔄',
34 | 'pending': '⏳',
35 | 'canceled': '⏹️',
36 | 'skipped': '⏭️',
37 | 'manual': '👤'
38 | }
39 | return icons.get(status, '❓')
40 |
41 |
42 | def calculate_change_stats(changes):
43 | """Calculate lines added/removed from changes"""
44 | if not changes or 'changes' not in changes:
45 | return "No changes"
46 |
47 | additions = 0
48 | deletions = 0
49 |
50 | for change in changes['changes']:
51 | if 'diff' in change:
52 | diff_lines = change['diff'].split('\n')
53 | for line in diff_lines:
54 | if line.startswith('+') and not line.startswith('+++'):
55 | additions += 1
56 | elif line.startswith('-') and not line.startswith('---'):
57 | deletions += 1
58 |
59 | return f"+{additions}/-{deletions}"
60 |
61 |
62 | def analyze_mr_readiness(mr_data, pipeline_data=None, approvals=None):
63 | """Analyze if MR is ready to merge and what's blocking it"""
64 | blockers = []
65 |
66 | if mr_data.get('draft') or mr_data.get('work_in_progress'):
67 | blockers.append("🚧 Draft/WIP status")
68 |
69 | if mr_data.get('has_conflicts'):
70 | blockers.append("⚠️ Merge conflicts")
71 |
72 | if pipeline_data and pipeline_data.get('status') == 'failed':
73 | blockers.append("❌ Pipeline failed")
74 | elif pipeline_data and pipeline_data.get('status') == 'running':
75 | blockers.append("🔄 Pipeline running")
76 |
77 | if approvals and 'approvals_required' in approvals:
78 | approved_count = len(approvals.get('approved_by', []))
79 | required_count = approvals.get('approvals_required', 0)
80 | if approved_count < required_count:
81 | msg = f"👥 Needs approval ({approved_count}/{required_count})"
82 | blockers.append(msg)
83 |
84 | if mr_data.get('merge_status') == 'cannot_be_merged':
85 | blockers.append("🚫 Cannot be merged")
86 |
87 | if not blockers:
88 | return "✅ Ready to merge"
89 | else:
90 | return f"🚫 Blocked by: {', '.join(blockers)}"
91 |
92 |
93 | def get_mr_priority(mr_data):
94 | """Determine MR priority based on labels and other factors"""
95 | labels = mr_data.get('labels', [])
96 |
97 | for label in labels:
98 | if 'critical' in label.lower() or 'urgent' in label.lower():
99 | return '🔴 Critical'
100 | elif 'high' in label.lower():
101 | return '🟡 High'
102 | elif 'low' in label.lower():
103 | return '🟢 Low'
104 |
105 | return '⚪ Normal'
```
--------------------------------------------------------------------------------
/tools/get_commit_discussions.py:
--------------------------------------------------------------------------------
```python
1 | from mcp.types import TextContent
2 | from gitlab_api import (
3 | get_merge_request_commits,
4 | get_merge_request_discussions_paginated
5 | )
6 | from utils import format_date
7 | import logging
8 |
9 |
10 | async def get_commit_discussions(
11 | gitlab_url, project_id, access_token, args
12 | ):
13 | """Get discussions/comments on commits within a merge request"""
14 | logging.info(f"get_commit_discussions called with args: {args}")
15 | mr_iid = args["merge_request_iid"]
16 |
17 | try:
18 | commits_result = await get_merge_request_commits(
19 | gitlab_url, project_id, access_token, mr_iid
20 | )
21 | commits_status, commits_data, commits_error = commits_result
22 |
23 | if commits_status != 200:
24 | logging.error(
25 | f"Error fetching merge request commits: "
26 | f"{commits_status} - {commits_error}"
27 | )
28 | raise Exception(
29 | f"Error fetching merge request commits: {commits_error}"
30 | )
31 |
32 | if not commits_data:
33 | return [TextContent(
34 | type="text",
35 | text="No commits found in this merge request."
36 | )]
37 |
38 | logging.info(f"Getting ALL MR discussions for MR #{mr_iid}...")
39 | discussions_result = await get_merge_request_discussions_paginated(
40 | gitlab_url, project_id, access_token, mr_iid
41 | )
42 | discussions_status, discussions_data, discussions_error = discussions_result
43 |
44 | if discussions_status != 200:
45 | logging.error(
46 | f"Error fetching MR discussions: "
47 | f"{discussions_status} - {discussions_error}"
48 | )
49 | discussions_data = []
50 |
51 | commit_map = {commit['id']: commit for commit in commits_data}
52 |
53 | commits_with_discussions = {}
54 | total_discussions = 0
55 |
56 | for discussion in discussions_data:
57 | notes = discussion.get('notes', [])
58 | for note in notes:
59 | position = note.get('position')
60 | if position and position.get('head_sha'):
61 | commit_sha = position['head_sha']
62 | if commit_sha in commit_map:
63 | if commit_sha not in commits_with_discussions:
64 | commits_with_discussions[commit_sha] = {
65 | 'commit': commit_map[commit_sha],
66 | 'discussions': []
67 | }
68 | commits_with_discussions[commit_sha]['discussions'].append({
69 | 'discussion_id': discussion.get('id'),
70 | 'note': note,
71 | 'position': position
72 | })
73 | total_discussions += 1
74 |
75 | if not commits_with_discussions:
76 | summary_text = (
77 | f"## Commit Discussions for MR #{mr_iid}\n\n"
78 | f"**Summary:**\n"
79 | f"- Total commits: {len(commits_data)}\n"
80 | f"- Commits with discussions: 0\n"
81 | f"- Total discussions: 0\n\n"
82 | f"No line-level discussions found on any commits in this "
83 | f"merge request. Found {len(discussions_data)} total MR discussions."
84 | )
85 | return [TextContent(type="text", text=summary_text)]
86 |
87 | response_text = (
88 | f"## Commit Discussions for MR #{mr_iid}\n\n"
89 | f"**Summary:**\n"
90 | f"- Total commits: {len(commits_data)}\n"
91 | f"- Commits with discussions: {len(commits_with_discussions)}\n"
92 | f"- Total line-level discussions: {total_discussions}\n"
93 | f"- Total MR discussions: {len(discussions_data)}\n\n"
94 | )
95 |
96 | for commit_sha, item in commits_with_discussions.items():
97 | commit = item['commit']
98 | discussions = item['discussions']
99 |
100 | response_text += f"### 📝 Commit: {commit['short_id']}\n"
101 | response_text += f"**Title:** {commit['title']}\n"
102 | response_text += f"**Author:** {commit['author_name']}\n"
103 | response_text += (
104 | f"**Date:** {format_date(commit['committed_date'])}\n"
105 | )
106 | response_text += f"**SHA:** `{commit['id']}`\n\n"
107 |
108 | for discussion_item in discussions:
109 | discussion_id = discussion_item['discussion_id']
110 | note = discussion_item['note']
111 | position = discussion_item['position']
112 |
113 | author_name = note['author']['name']
114 | response_text += f"**💬 Comment by {author_name}:**\n"
115 | response_text += f"{note['body']}\n"
116 |
117 | if position.get('new_path'):
118 | line_info = position.get('new_line', 'N/A')
119 | response_text += (
120 | f"*On file: {position['new_path']} "
121 | f"(line {line_info})*\n"
122 | )
123 |
124 | created_at = format_date(note['created_at'])
125 | response_text += f"*Posted: {created_at}*\n"
126 | response_text += f"*Discussion ID: {discussion_id}*\n\n"
127 |
128 | response_text += "---\n\n"
129 |
130 | return [TextContent(type="text", text=response_text)]
131 |
132 | except Exception as e:
133 | logging.error(f"Error in get_commit_discussions: {str(e)}")
134 | return [TextContent(
135 | type="text",
136 | text=f"Error retrieving commit discussions: {str(e)}"
137 | )]
```
--------------------------------------------------------------------------------
/tools/reply_to_review_comment.py:
--------------------------------------------------------------------------------
```python
1 | import logging
2 | from mcp.types import TextContent
3 | from gitlab_api import (
4 | reply_to_merge_request_discussion,
5 | create_merge_request_discussion,
6 | resolve_merge_request_discussion
7 | )
8 |
9 |
10 | async def reply_to_review_comment(
11 | gitlab_url, project_id, access_token, args
12 | ):
13 | """Reply to a specific discussion thread in a merge request review"""
14 | logging.info(f"reply_to_review_comment called with args: {args}")
15 |
16 | mr_iid = args["merge_request_iid"]
17 | discussion_id = args["discussion_id"]
18 | reply_body = args["body"]
19 |
20 | try:
21 | status, response_data, error_text = await reply_to_merge_request_discussion(
22 | gitlab_url, project_id, access_token, mr_iid, discussion_id, reply_body
23 | )
24 |
25 | if status == 201:
26 | author_name = response_data.get('author', {}).get('name', 'Unknown')
27 | note_id = response_data.get('id', 'unknown')
28 |
29 | result = "✅ **Reply posted successfully!**\n\n"
30 | result += f"**Merge Request**: !{mr_iid}\n"
31 | result += f"**Discussion ID**: `{discussion_id}`\n"
32 | result += f"**Note ID**: `{note_id}`\n"
33 | result += f"**Author**: {author_name}\n"
34 | reply_preview = reply_body[:100] + ('...' if len(reply_body) > 100 else '')
35 | result += f"**Reply**: {reply_preview}\n"
36 |
37 | return [TextContent(type="text", text=result)]
38 | else:
39 | error_msg = "❌ **Error posting reply**\n\n"
40 | error_msg += f"**Status**: {status}\n"
41 | error_msg += f"**Error**: {error_text}\n"
42 | error_msg += f"**MR**: !{mr_iid}\n"
43 | error_msg += f"**Discussion**: {discussion_id}\n"
44 |
45 | return [TextContent(type="text", text=error_msg)]
46 |
47 | except Exception as e:
48 | logging.error(f"Unexpected error in reply_to_review_comment: {e}")
49 | error_result = f"❌ **Unexpected error**\n\n"
50 | error_result += f"**Error**: {str(e)}\n"
51 | error_result += f"**MR**: !{mr_iid}\n"
52 | error_result += f"**Discussion**: {discussion_id}\n"
53 |
54 | return [TextContent(type="text", text=error_result)]
55 |
56 |
57 | async def create_review_comment(
58 | gitlab_url, project_id, access_token, args
59 | ):
60 | """Create a new discussion thread in a merge request review"""
61 | logging.info(f"create_review_comment called with args: {args}")
62 |
63 | mr_iid = args["merge_request_iid"]
64 | comment_body = args["body"]
65 |
66 | try:
67 | status, response_data, error_text = await create_merge_request_discussion(
68 | gitlab_url, project_id, access_token, mr_iid, comment_body
69 | )
70 |
71 | if status == 201:
72 | author_name = response_data.get('author', {}).get('name', 'Unknown')
73 | discussion_id = response_data.get('id', 'unknown')
74 |
75 | result = f"✅ **New discussion created!**\n\n"
76 | result += f"**Merge Request**: !{mr_iid}\n"
77 | result += f"**Discussion ID**: `{discussion_id}`\n"
78 | result += f"**Author**: {author_name}\n"
79 | result += f"**Comment**: {comment_body[:100]}{'...' if len(comment_body) > 100 else ''}\n"
80 |
81 | return [TextContent(type="text", text=result)]
82 | else:
83 | error_msg = f"❌ **Error creating discussion**\n\n"
84 | error_msg += f"**Status**: {status}\n"
85 | error_msg += f"**Error**: {error_text}\n"
86 | error_msg += f"**MR**: !{mr_iid}\n"
87 |
88 | return [TextContent(type="text", text=error_msg)]
89 |
90 | except Exception as e:
91 | logging.error(f"Unexpected error in create_review_comment: {e}")
92 | error_result = f"❌ **Unexpected error**\n\n"
93 | error_result += f"**Error**: {str(e)}\n"
94 | error_result += f"**MR**: !{mr_iid}\n"
95 |
96 | return [TextContent(type="text", text=error_result)]
97 |
98 |
99 | async def resolve_review_discussion(
100 | gitlab_url, project_id, access_token, args
101 | ):
102 | """Resolve or unresolve a discussion thread in a merge request review"""
103 | logging.info(f"resolve_review_discussion called with args: {args}")
104 |
105 | mr_iid = args["merge_request_iid"]
106 | discussion_id = args["discussion_id"]
107 | resolved = args.get("resolved", True)
108 |
109 | try:
110 | status, response_data, error_text = await resolve_merge_request_discussion(
111 | gitlab_url, project_id, access_token, mr_iid, discussion_id, resolved
112 | )
113 |
114 | if status == 200:
115 | action = "resolved" if resolved else "reopened"
116 |
117 | result = f"✅ **Discussion {action}!**\n\n"
118 | result += f"**Merge Request**: !{mr_iid}\n"
119 | result += f"**Discussion ID**: `{discussion_id}`\n"
120 | result += f"**Status**: {'✅ Resolved' if resolved else '🔄 Reopened'}\n"
121 |
122 | return [TextContent(type="text", text=result)]
123 | else:
124 | error_msg = f"❌ **Error {action} discussion**\n\n"
125 | error_msg += f"**Status**: {status}\n"
126 | error_msg += f"**Error**: {error_text}\n"
127 | error_msg += f"**MR**: !{mr_iid}\n"
128 | error_msg += f"**Discussion**: {discussion_id}\n"
129 |
130 | return [TextContent(type="text", text=error_msg)]
131 |
132 | except Exception as e:
133 | logging.error(f"Unexpected error in resolve_review_discussion: {e}")
134 | error_result = f"❌ **Unexpected error**\n\n"
135 | error_result += f"**Error**: {str(e)}\n"
136 | error_result += f"**MR**: !{mr_iid}\n"
137 | error_result += f"**Discussion**: {discussion_id}\n"
138 |
139 | return [TextContent(type="text", text=error_result)]
```
--------------------------------------------------------------------------------
/tools/get_branch_merge_requests.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 | from mcp.types import TextContent
3 | from gitlab_api import (
4 | get_branch_merge_requests as api_get_branch_merge_requests,
5 | get_merge_request_pipeline,
6 | get_merge_request_changes
7 | )
8 | from utils import (
9 | format_date,
10 | get_state_explanation,
11 | get_pipeline_status_icon,
12 | analyze_mr_readiness,
13 | get_mr_priority,
14 | calculate_change_stats
15 | )
16 | import logging
17 |
18 |
19 | async def get_enhanced_mr_data(gitlab_url, project_id, access_token, mr_iid):
20 | """Get enhanced data for a single MR using parallel API calls"""
21 | try:
22 | pipeline_task = get_merge_request_pipeline(
23 | gitlab_url, project_id, access_token, mr_iid
24 | )
25 | changes_task = get_merge_request_changes(
26 | gitlab_url, project_id, access_token, mr_iid
27 | )
28 |
29 | pipeline_result, changes_result = await asyncio.gather(
30 | pipeline_task, changes_task, return_exceptions=True
31 | )
32 |
33 | if isinstance(pipeline_result, Exception):
34 | pipeline_data = None
35 | logging.warning(
36 | f"Pipeline fetch failed for MR {mr_iid}: {pipeline_result}"
37 | )
38 | else:
39 | pipeline_status, pipeline_data, _ = pipeline_result
40 | if pipeline_status != 200:
41 | pipeline_data = None
42 |
43 | if isinstance(changes_result, Exception):
44 | changes_data = None
45 | logging.warning(
46 | f"Changes fetch failed for MR {mr_iid}: {changes_result}"
47 | )
48 | else:
49 | changes_status, changes_data, _ = changes_result
50 | if changes_status != 200:
51 | changes_data = None
52 |
53 | return pipeline_data, changes_data
54 |
55 | except Exception as e:
56 | logging.warning(
57 | f"Error fetching enhanced data for MR {mr_iid}: {e}"
58 | )
59 | return None, None
60 |
61 |
62 | async def get_branch_merge_requests(
63 | gitlab_url, project_id, access_token, args
64 | ):
65 | logging.info(f"get_branch_merge_requests called with args: {args}")
66 | branch_name = args["branch_name"]
67 |
68 | status, data, error = await api_get_branch_merge_requests(
69 | gitlab_url, project_id, access_token, branch_name
70 | )
71 |
72 | if status != 200:
73 | logging.error(
74 | f"Error fetching branch merge requests: {status} - {error}"
75 | )
76 | raise Exception(
77 | f"Error fetching branch merge requests: {status} - {error}"
78 | )
79 |
80 | result = f"# 🌿 Merge Requests for branch: **{branch_name}**\n"
81 | result += (
82 | f"*Found {len(data)} merge request"
83 | f"{'s' if len(data) != 1 else ''}*\n\n"
84 | )
85 |
86 | if not data:
87 | result += "📭 No merge requests found for this branch.\n"
88 | result += (
89 | "💡 **Tip**: Create a merge request to start the "
90 | "review process.\n"
91 | )
92 | return [TextContent(type="text", text=result)]
93 |
94 | enhanced_data_tasks = []
95 | for mr in data:
96 | task = get_enhanced_mr_data(
97 | gitlab_url, project_id, access_token, mr['iid']
98 | )
99 | enhanced_data_tasks.append(task)
100 |
101 | try:
102 | enhanced_results = await asyncio.gather(*enhanced_data_tasks)
103 | except Exception as e:
104 | logging.warning(f"Error in parallel enhanced data fetch: {e}")
105 | enhanced_results = [(None, None)] * len(data)
106 |
107 | for i, mr in enumerate(data):
108 | pipeline_data, changes_data = enhanced_results[i]
109 |
110 | if mr['state'] == 'merged':
111 | state_icon = "✅"
112 | elif mr['state'] == 'opened':
113 | state_icon = "🔄"
114 | else:
115 | state_icon = "❌"
116 | result += f"## {state_icon} !{mr['iid']}: {mr['title']}\n"
117 |
118 | author_name = mr['author']['name']
119 | author_username = mr['author']['username']
120 | result += f"**👤 Author**: {author_name} (@{author_username})\n"
121 |
122 | result += f"**📊 Status**: {mr['state']} "
123 | result += f"({get_state_explanation(mr['state'])})\n"
124 |
125 | priority = get_mr_priority(mr)
126 | readiness = analyze_mr_readiness(mr, pipeline_data)
127 | result += f"**🏷️ Priority**: {priority}\n"
128 | result += f"**🚦 Merge Status**: {readiness}\n"
129 |
130 | result += f"**📅 Created**: {format_date(mr['created_at'])}\n"
131 | result += f"**🔄 Updated**: {format_date(mr['updated_at'])}\n"
132 |
133 | source_branch = mr['source_branch']
134 | target_branch = mr['target_branch']
135 | result += f"**🌿 Branches**: `{source_branch}` → `{target_branch}`\n"
136 |
137 | if pipeline_data:
138 | pipeline_status = pipeline_data.get('status')
139 | pipeline_icon = get_pipeline_status_icon(pipeline_status)
140 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_status}\n"
141 |
142 | if pipeline_data.get('web_url'):
143 | result += f" *[View Pipeline]({pipeline_data['web_url']})*\n"
144 | elif mr.get('pipeline'):
145 | pipeline_status = mr['pipeline'].get('status')
146 | pipeline_icon = get_pipeline_status_icon(pipeline_status)
147 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_status or 'unknown'}\n"
148 |
149 | if changes_data:
150 | change_stats = calculate_change_stats(changes_data)
151 | result += f"**📈 Changes**: {change_stats}\n"
152 |
153 | if mr.get('labels'):
154 | labels_str = ', '.join(f"`{label}`" for label in mr['labels'])
155 | result += f"**🏷️ Labels**: {labels_str}\n"
156 |
157 | if mr.get('draft') or mr.get('work_in_progress'):
158 | result += "**⚠️ Status**: 🚧 Draft/Work in Progress\n"
159 |
160 | if mr.get('has_conflicts'):
161 | result += "**⚠️ Warning**: 🔥 Has merge conflicts\n"
162 |
163 | result += f"**🔗 Actions**: [View MR]({mr['web_url']})"
164 | if mr['state'] == 'opened':
165 | result += f" | [Review & Approve]({mr['web_url']})"
166 | result += "\n"
167 |
168 | result += "\n---\n\n"
169 |
170 | result += "## 📊 Summary\n"
171 |
172 | state_counts = {}
173 | for mr in data:
174 | state = mr['state']
175 | state_counts[state] = state_counts.get(state, 0) + 1
176 |
177 | result += "**State Breakdown**:\n"
178 | for state, count in state_counts.items():
179 | icon = "✅" if state == 'merged' else "🔄" if state == 'opened' else "❌"
180 | result += f" • {icon} {state.title()}: {count}\n"
181 |
182 | result += "\n**🎯 Action Items**:\n"
183 | opened_mrs = [mr for mr in data if mr['state'] == 'opened']
184 |
185 | if opened_mrs:
186 | has_conflicts = sum(1 for mr in opened_mrs if mr.get('has_conflicts'))
187 | drafts = sum(1 for mr in opened_mrs if mr.get('draft') or mr.get('work_in_progress'))
188 |
189 | if has_conflicts:
190 | result += f" • 🔥 {has_conflicts} MR{'s' if has_conflicts > 1 else ''} with merge conflicts\n"
191 | if drafts:
192 | result += f" • 🚧 {drafts} draft MR{'s' if drafts > 1 else ''} in progress\n"
193 |
194 | ready_count = len(opened_mrs) - has_conflicts - drafts
195 | if ready_count > 0:
196 | result += f" • ✅ {ready_count} MR{'s' if ready_count > 1 else ''} ready for review\n"
197 | else:
198 | result += " • 🎉 No open merge requests - branch is clean!\n"
199 |
200 | return [TextContent(type="text", text=result)]
```
--------------------------------------------------------------------------------
/tools/get_merge_request_details.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 | from mcp.types import TextContent
3 | from gitlab_api import (
4 | get_merge_request_details as api_get_merge_request_details,
5 | get_merge_request_pipeline,
6 | get_merge_request_changes,
7 | get_merge_request_reviews
8 | )
9 | from utils import (
10 | format_date,
11 | get_state_explanation,
12 | get_pipeline_status_icon,
13 | calculate_change_stats,
14 | analyze_mr_readiness,
15 | get_mr_priority
16 | )
17 | import logging
18 |
19 |
20 | async def get_merge_request_details(
21 | gitlab_url, project_id, access_token, args
22 | ):
23 | logging.info(f"get_merge_request_details called with args: {args}")
24 | mr_iid = args["merge_request_iid"]
25 |
26 | tasks = [
27 | api_get_merge_request_details(gitlab_url, project_id, access_token, mr_iid),
28 | get_merge_request_pipeline(gitlab_url, project_id, access_token, mr_iid),
29 | get_merge_request_changes(gitlab_url, project_id, access_token, mr_iid),
30 | get_merge_request_reviews(gitlab_url, project_id, access_token, mr_iid)
31 | ]
32 |
33 | try:
34 | details_result, pipeline_result, changes_result, reviews_result = await asyncio.gather(*tasks)
35 | except Exception as e:
36 | logging.error(f"Error in parallel API calls: {e}")
37 | raise Exception(f"Error fetching merge request data: {e}")
38 |
39 | mr_status, mr_data, mr_error = details_result
40 | pipeline_status, pipeline_data, pipeline_error = pipeline_result
41 | changes_status, changes_data, changes_error = changes_result
42 |
43 | if mr_status != 200:
44 | logging.error(f"Error fetching merge request details: {mr_status} - {mr_error}")
45 | raise Exception(f"Error fetching merge request details: {mr_status} - {mr_error}")
46 |
47 | state_icon = "✅" if mr_data['state'] == 'merged' else "🔄" if mr_data['state'] == 'opened' else "❌"
48 | result = f"# {state_icon} Merge Request !{mr_data['iid']}: {mr_data['title']}\n\n"
49 |
50 | result += "## 📋 Overview\n"
51 | result += f"**👤 Author**: {mr_data['author']['name']} (@{mr_data['author']['username']})\n"
52 | result += f"**📊 Status**: {mr_data['state']} ({get_state_explanation(mr_data['state'])})\n"
53 | result += f"**🏷️ Priority**: {get_mr_priority(mr_data)}\n"
54 | result += f"**📅 Created**: {format_date(mr_data['created_at'])}\n"
55 | result += f"**🔄 Updated**: {format_date(mr_data['updated_at'])}\n"
56 | result += f"**🌿 Branches**: `{mr_data['source_branch']}` → `{mr_data['target_branch']}`\n"
57 |
58 | if pipeline_status == 200 and pipeline_data:
59 | pipeline_icon = get_pipeline_status_icon(pipeline_data.get('status'))
60 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_data.get('status', 'unknown')}\n"
61 | if pipeline_data.get('web_url'):
62 | result += f" *[View Pipeline]({pipeline_data['web_url']})*\n"
63 | elif mr_data.get('pipeline'):
64 | pipeline_status = mr_data['pipeline'].get('status')
65 | pipeline_icon = get_pipeline_status_icon(pipeline_status)
66 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_status or 'unknown'}\n"
67 |
68 | if changes_status == 200:
69 | change_stats = calculate_change_stats(changes_data)
70 | result += f"**📈 Changes**: {change_stats}\n"
71 |
72 | readiness = analyze_mr_readiness(mr_data, pipeline_data)
73 | result += f"**🚦 Merge Status**: {readiness}\n"
74 |
75 | if mr_data.get('labels'):
76 | labels_str = ', '.join(f"`{label}`" for label in mr_data['labels'])
77 | result += f"**🏷️ Labels**: {labels_str}\n"
78 |
79 | if mr_data.get('draft') or mr_data.get('work_in_progress'):
80 | result += "**⚠️ Status**: 🚧 Draft/Work in Progress\n"
81 |
82 | if mr_data.get('has_conflicts'):
83 | result += "**⚠️ Warning**: 🔥 Has merge conflicts\n"
84 |
85 | result += f"**🔗 URL**: {mr_data['web_url']}\n\n"
86 |
87 | if mr_data.get('description'):
88 | result += "## 📝 Description\n"
89 | result += f"{mr_data['description']}\n\n"
90 |
91 | result += "## 🔧 Technical Details\n"
92 |
93 | if mr_data.get('merge_commit_sha'):
94 | result += f"**📦 Merge Commit**: `{mr_data['merge_commit_sha'][:8]}`\n"
95 |
96 | if mr_data.get('squash_commit_sha'):
97 | result += f"**🔄 Squash Commit**: `{mr_data['squash_commit_sha'][:8]}`\n"
98 |
99 | merge_options = []
100 | if mr_data.get('squash'):
101 | merge_options.append("🔄 Squash commits")
102 | if mr_data.get('remove_source_branch'):
103 | merge_options.append("🗑️ Remove source branch")
104 | if mr_data.get('force_remove_source_branch'):
105 | merge_options.append("🗑️ Force remove source branch")
106 |
107 | if merge_options:
108 | result += f"**⚙️ Merge Options**: {', '.join(merge_options)}\n"
109 |
110 | if mr_data.get('assignees'):
111 | assignees = ', '.join(f"@{user['username']}" for user in mr_data['assignees'])
112 | result += f"**👥 Assignees**: {assignees}\n"
113 |
114 | if mr_data.get('reviewers'):
115 | reviewers = ', '.join(f"@{user['username']}" for user in mr_data['reviewers'])
116 | result += f"**👀 Reviewers**: {reviewers}\n"
117 |
118 | if mr_data.get('milestone'):
119 | result += f"**🎯 Milestone**: {mr_data['milestone']['title']}\n"
120 |
121 | result += "\n"
122 |
123 | if reviews_result and 'discussions' in reviews_result:
124 | discussions_status, discussions, _ = reviews_result['discussions']
125 | approvals_status, approvals, _ = reviews_result['approvals']
126 |
127 | result += "## 💬 Reviews Summary\n"
128 |
129 | if discussions_status == 200 and discussions:
130 | total_discussions = len(discussions)
131 | resolved_count = sum(1 for d in discussions if d.get('resolved'))
132 | unresolved_count = total_discussions - resolved_count
133 |
134 | result += f"**Discussions**: {total_discussions} total, {resolved_count} resolved, {unresolved_count} unresolved\n"
135 |
136 | if unresolved_count > 0:
137 | result += f"⚠️ **{unresolved_count} unresolved discussion{'s' if unresolved_count > 1 else ''}**\n"
138 |
139 | if approvals_status == 200 and approvals:
140 | approved_by = approvals.get('approved_by', [])
141 | approvals_left = approvals.get('approvals_left', 0)
142 |
143 | if approved_by:
144 | result += f"**Approvals**: ✅ {len(approved_by)} approval{'s' if len(approved_by) > 1 else ''}\n"
145 |
146 | if approvals_left > 0:
147 | result += f"**Needed**: ⏳ {approvals_left} more approval{'s' if approvals_left > 1 else ''}\n"
148 |
149 | result += "\n"
150 |
151 | result += "## 📊 Action Items\n"
152 | action_items = []
153 |
154 | if mr_data.get('draft') or mr_data.get('work_in_progress'):
155 | action_items.append("🚧 Remove draft/WIP status")
156 |
157 | if mr_data.get('has_conflicts'):
158 | action_items.append("⚠️ Resolve merge conflicts")
159 |
160 | if pipeline_status == 200 and pipeline_data and pipeline_data.get('status') == 'failed':
161 | action_items.append("❌ Fix failing pipeline")
162 | elif pipeline_status == 200 and pipeline_data and pipeline_data.get('status') == 'running':
163 | action_items.append("🔄 Wait for pipeline completion")
164 |
165 | if reviews_result and 'discussions' in reviews_result:
166 | discussions_status, discussions, _ = reviews_result['discussions']
167 | approvals_status, approvals, _ = reviews_result['approvals']
168 |
169 | if discussions_status == 200 and discussions:
170 | unresolved_count = sum(1 for d in discussions if not d.get('resolved'))
171 | if unresolved_count > 0:
172 | action_items.append(f"💬 Resolve {unresolved_count} pending discussion{'s' if unresolved_count > 1 else ''}")
173 |
174 | if approvals_status == 200 and approvals and approvals.get('approvals_left', 0) > 0:
175 | action_items.append(f"👥 Obtain {approvals['approvals_left']} more approval{'s' if approvals['approvals_left'] > 1 else ''}")
176 |
177 | if mr_data['state'] == 'opened' and not action_items:
178 | action_items.append("✅ Ready to merge!")
179 |
180 | if action_items:
181 | for item in action_items:
182 | result += f"• {item}\n"
183 | else:
184 | result += "✅ No action items identified\n"
185 |
186 | result += "\n## 🚀 Quick Actions\n"
187 | if mr_data['state'] == 'opened':
188 | result += f"• [📝 Edit MR]({mr_data['web_url']}/edit)\n"
189 | result += f"• [💬 Add Comment]({mr_data['web_url']}#note_form)\n"
190 | result += f"• [🔄 View Changes]({mr_data['web_url']}/diffs)\n"
191 | if pipeline_data and pipeline_data.get('web_url'):
192 | result += f"• [🔧 View Pipeline]({pipeline_data['web_url']})\n"
193 |
194 | return [TextContent(type="text", text=result)]
```
--------------------------------------------------------------------------------
/tools/get_merge_request_reviews.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 | from gitlab_api import (
3 | get_merge_request_reviews as api_get_merge_request_reviews,
4 | get_merge_request_details,
5 | get_merge_request_pipeline,
6 | get_merge_request_changes
7 | )
8 | from utils import (
9 | format_date,
10 | get_state_explanation,
11 | get_pipeline_status_icon,
12 | analyze_mr_readiness,
13 | get_mr_priority,
14 | calculate_change_stats
15 | )
16 | from mcp.types import TextContent
17 | import logging
18 |
19 |
20 | def get_review_type_icon(note):
21 | """Get appropriate icon for review type"""
22 | if note.get('resolvable'):
23 | return '💬'
24 | elif note.get('position'):
25 | return '📝'
26 | elif 'approved' in note.get('body', '').lower():
27 | return '✅'
28 | elif any(word in note.get('body', '').lower()
29 | for word in ['reject', 'needs work', 'changes requested']):
30 | return '❌'
31 | else:
32 | return '💭'
33 |
34 |
35 | def get_approval_summary(approvals):
36 | """Generate enhanced approval summary"""
37 | if not approvals:
38 | return "## 👥 Approvals\n❓ No approval information available\n\n"
39 |
40 | result = "## 👥 Approvals\n"
41 |
42 | approved_by = approvals.get('approved_by', [])
43 | approvals_required = approvals.get('approvals_required', 0)
44 | approvals_left = approvals.get('approvals_left', 0)
45 |
46 | if approved_by:
47 | result += f"**✅ Approved by ({len(approved_by)} reviewer"
48 | result += f"{'s' if len(approved_by) > 1 else ''}):**\n"
49 | for approval in approved_by:
50 | user = approval['user']
51 | result += f" • **{user['name']}** (@{user['username']})\n"
52 | result += "\n"
53 |
54 | if approvals_required > 0:
55 | if approvals_left == 0:
56 | status = "✅ Approval requirements met"
57 | else:
58 | plural = 's' if approvals_left > 1 else ''
59 | status = f"⏳ {approvals_left} approval{plural} needed"
60 | result += f"**Status**: {status}\n"
61 | received_count = len(approved_by)
62 | result += f"**Required**: {approvals_required} | **Received**: {received_count}\n\n"
63 | elif not approved_by:
64 | result += "📝 No approvals yet\n\n"
65 |
66 | return result
67 |
68 |
69 | def get_discussion_summary(discussions):
70 | """Generate enhanced discussion summary with counts and status"""
71 | if not discussions:
72 | return "## 💬 Discussions\n❓ No discussion information available\n\n"
73 |
74 | total_discussions = len(discussions)
75 | resolved_count = sum(1 for d in discussions if d.get('resolved'))
76 | unresolved_count = total_discussions - resolved_count
77 |
78 | result = "## 💬 Discussions & Reviews\n"
79 | result += f"**Total**: {total_discussions} | **Resolved**: {resolved_count} | **Unresolved**: {unresolved_count}\n\n"
80 |
81 | if unresolved_count > 0:
82 | result += f"⚠️ **{unresolved_count} unresolved discussion{'s' if unresolved_count > 1 else ''}** - action needed\n\n"
83 | elif total_discussions > 0:
84 | result += "✅ All discussions resolved\n\n"
85 |
86 | return result
87 |
88 |
89 | def format_discussion_thread(discussion):
90 | """Format a single discussion thread with enhanced formatting"""
91 | if not discussion.get('notes'):
92 | return ""
93 |
94 | result = ""
95 | thread_resolved = discussion.get('resolved', False)
96 | thread_icon = "✅" if thread_resolved else "🟡"
97 | discussion_id = discussion.get('id', 'unknown')
98 |
99 | result += f"### {thread_icon} Discussion Thread\n"
100 | result += f"**Discussion ID**: `{discussion_id}`\n"
101 | if thread_resolved:
102 | result += "*Resolved*\n"
103 | else:
104 | result += "*Unresolved*\n"
105 |
106 | for note in discussion['notes']:
107 | if note.get('system'):
108 | continue
109 |
110 | author_name = note['author']['name']
111 | author_username = note['author']['username']
112 | note_icon = get_review_type_icon(note)
113 | note_id = note.get('id', 'unknown')
114 |
115 | result += f"\n{note_icon} **{author_name}** (@{author_username})\n"
116 | timestamp = format_date(note['created_at'])
117 | result += f"*{timestamp}* | Note ID: `{note_id}`\n"
118 |
119 | if note.get('position'):
120 | pos = note['position']
121 | if pos.get('new_path'):
122 | result += f"📁 **File**: `{pos['new_path']}`\n"
123 | if pos.get('new_line'):
124 | result += f"📍 **Line**: {pos['new_line']}\n"
125 |
126 | body = note.get('body', '').strip()
127 | if body:
128 | result += f"\n{body}\n"
129 |
130 | result += "\n---\n"
131 |
132 | return result + "\n"
133 |
134 |
135 | async def get_merge_request_reviews(
136 | gitlab_url, project_id, access_token, args
137 | ):
138 | logging.info(f"get_merge_request_reviews called with args: {args}")
139 | mr_iid = args["merge_request_iid"]
140 |
141 | tasks = [
142 | api_get_merge_request_reviews(gitlab_url, project_id, access_token, mr_iid),
143 | get_merge_request_details(gitlab_url, project_id, access_token, mr_iid),
144 | get_merge_request_pipeline(gitlab_url, project_id, access_token, mr_iid),
145 | get_merge_request_changes(gitlab_url, project_id, access_token, mr_iid)
146 | ]
147 |
148 | try:
149 | reviews_result, details_result, pipeline_result, changes_result = await asyncio.gather(*tasks)
150 | except Exception as e:
151 | logging.error(f"Error in parallel API calls: {e}")
152 | raise Exception(f"Error fetching merge request data: {e}")
153 |
154 | discussions_status, discussions, discussions_text = reviews_result["discussions"]
155 | approvals_status, approvals, approvals_text = reviews_result["approvals"]
156 |
157 | details_status, mr_details, details_text = details_result
158 | pipeline_status, pipeline_data, pipeline_text = pipeline_result
159 | changes_status, changes_data, changes_text = changes_result
160 |
161 | if discussions_status != 200:
162 | logging.error(f"Error fetching discussions {discussions_status}: {discussions_text}")
163 | raise Exception(f"Error fetching discussions: {discussions_status} - {discussions_text}")
164 |
165 | result = f"# 🔍 Reviews & Discussions for MR !{mr_iid}\n\n"
166 |
167 | if details_status == 200:
168 | result += f"## 📋 Merge Request Overview\n"
169 | result += f"**Title**: {mr_details.get('title', 'N/A')}\n"
170 | result += f"**Status**: {mr_details.get('state', 'N/A')} ({get_state_explanation(mr_details.get('state', 'N/A'))})\n"
171 | result += f"**Author**: {mr_details.get('author', {}).get('name', 'N/A')} (@{mr_details.get('author', {}).get('username', 'N/A')})\n"
172 | result += f"**Priority**: {get_mr_priority(mr_details)}\n"
173 |
174 | if pipeline_status == 200 and pipeline_data:
175 | pipeline_icon = get_pipeline_status_icon(pipeline_data.get('status'))
176 | result += f"**Pipeline**: {pipeline_icon} {pipeline_data.get('status', 'unknown')}\n"
177 |
178 | if changes_status == 200:
179 | change_stats = calculate_change_stats(changes_data)
180 | result += f"**Changes**: {change_stats}\n"
181 |
182 | readiness = analyze_mr_readiness(mr_details, pipeline_data, approvals)
183 | result += f"**Merge Status**: {readiness}\n"
184 |
185 | result += f"**Updated**: {format_date(mr_details.get('updated_at', 'N/A'))}\n\n"
186 |
187 | result += get_approval_summary(approvals)
188 |
189 | result += get_discussion_summary(discussions)
190 |
191 | if discussions:
192 | result += "## 📝 Detailed Discussions\n\n"
193 | for discussion in discussions:
194 | thread_content = format_discussion_thread(discussion)
195 | if thread_content:
196 | result += thread_content
197 | else:
198 | result += "💬 No discussions found\n\n"
199 |
200 | result += "## 📊 Action Items\n"
201 | action_items = []
202 |
203 | if discussions:
204 | unresolved_count = sum(1 for d in discussions if not d.get('resolved'))
205 | if unresolved_count > 0:
206 | action_items.append(f"🟡 Resolve {unresolved_count} pending discussion{'s' if unresolved_count > 1 else ''}")
207 |
208 | if approvals and approvals.get('approvals_left', 0) > 0:
209 | action_items.append(f"👥 Obtain {approvals['approvals_left']} more approval{'s' if approvals['approvals_left'] > 1 else ''}")
210 |
211 | if pipeline_status == 200 and pipeline_data and pipeline_data.get('status') == 'failed':
212 | action_items.append("❌ Fix failing pipeline")
213 |
214 | if details_status == 200 and mr_details.get('has_conflicts'):
215 | action_items.append("⚠️ Resolve merge conflicts")
216 |
217 | if action_items:
218 | for item in action_items:
219 | result += f"• {item}\n"
220 | else:
221 | result += "✅ No action items - ready for next steps\n"
222 |
223 | return [TextContent(type="text", text=result)]
```
--------------------------------------------------------------------------------
/tools/list_merge_requests.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 | from mcp.types import TextContent
3 | from gitlab_api import (
4 | get_merge_requests,
5 | get_merge_request_pipeline,
6 | get_merge_request_changes
7 | )
8 | from utils import (
9 | format_date,
10 | get_state_explanation,
11 | get_pipeline_status_icon,
12 | analyze_mr_readiness,
13 | get_mr_priority,
14 | calculate_change_stats
15 | )
16 | import logging
17 |
18 |
19 | async def get_enhanced_mr_data(gitlab_url, project_id, access_token, mr_iid):
20 | """Get enhanced data for a single MR using parallel API calls"""
21 | try:
22 | pipeline_task = get_merge_request_pipeline(
23 | gitlab_url, project_id, access_token, mr_iid
24 | )
25 | changes_task = get_merge_request_changes(
26 | gitlab_url, project_id, access_token, mr_iid
27 | )
28 |
29 | pipeline_result, changes_result = await asyncio.gather(
30 | pipeline_task, changes_task, return_exceptions=True
31 | )
32 |
33 | if isinstance(pipeline_result, Exception):
34 | pipeline_data = None
35 | logging.warning(f"Pipeline fetch failed for MR {mr_iid}: {pipeline_result}")
36 | else:
37 | pipeline_status, pipeline_data, _ = pipeline_result
38 | if pipeline_status != 200:
39 | pipeline_data = None
40 |
41 | if isinstance(changes_result, Exception):
42 | changes_data = None
43 | logging.warning(f"Changes fetch failed for MR {mr_iid}: {changes_result}")
44 | else:
45 | changes_status, changes_data, _ = changes_result
46 | if changes_status != 200:
47 | changes_data = None
48 |
49 | return pipeline_data, changes_data
50 |
51 | except Exception as e:
52 | logging.warning(f"Error fetching enhanced data for MR {mr_iid}: {e}")
53 | return None, None
54 |
55 |
56 | async def list_merge_requests(gitlab_url, project_id, access_token, args):
57 | logging.info(f"list_merge_requests called with args: {args}")
58 |
59 | state = args.get("state", "opened")
60 | target_branch = args.get("target_branch")
61 | limit = args.get("limit", 10)
62 |
63 | params = {
64 | "state": state,
65 | "per_page": limit,
66 | "order_by": "updated_at",
67 | "sort": "desc"
68 | }
69 |
70 | if target_branch:
71 | params["target_branch"] = target_branch
72 |
73 | status, data, error = await get_merge_requests(
74 | gitlab_url, project_id, access_token, params
75 | )
76 |
77 | if status != 200:
78 | logging.error(f"Error listing merge requests: {status} - {error}")
79 | raise Exception(f"Error listing merge requests: {status} - {error}")
80 |
81 | state_filter = f" ({state})" if state != "all" else ""
82 | result = f"# 📋 Merge Requests{state_filter}\n"
83 | result += f"*Found {len(data)} merge request{'s' if len(data) != 1 else ''}*\n\n"
84 |
85 | if not data:
86 | result += "📭 No merge requests found.\n"
87 | if state == "opened":
88 | result += "💡 **Tip**: Create a merge request to start the development workflow.\n"
89 | return [TextContent(type="text", text=result)]
90 |
91 | enhanced_data_tasks = []
92 | for mr in data[:5]:
93 | task = get_enhanced_mr_data(
94 | gitlab_url, project_id, access_token, mr['iid']
95 | )
96 | enhanced_data_tasks.append(task)
97 |
98 | try:
99 | enhanced_results = await asyncio.gather(*enhanced_data_tasks)
100 | except Exception as e:
101 | logging.warning(f"Error in parallel enhanced data fetch: {e}")
102 | enhanced_results = [(None, None)] * len(data[:5])
103 |
104 | for i, mr in enumerate(data):
105 | if i < len(enhanced_results):
106 | pipeline_data, changes_data = enhanced_results[i]
107 | else:
108 | pipeline_data, changes_data = None, None
109 |
110 | if mr['state'] == 'merged':
111 | state_icon = "✅"
112 | elif mr['state'] == 'opened':
113 | state_icon = "🔄"
114 | else:
115 | state_icon = "❌"
116 |
117 | result += f"## {state_icon} !{mr['iid']}: {mr['title']}\n"
118 |
119 | author_name = mr['author']['name']
120 | author_username = mr['author']['username']
121 | result += f"**👤 Author**: {author_name} (@{author_username})\n"
122 | result += f"**📊 Status**: {mr['state']} ({get_state_explanation(mr['state'])})\n"
123 |
124 | priority = get_mr_priority(mr)
125 | readiness = analyze_mr_readiness(mr, pipeline_data)
126 | result += f"**🏷️ Priority**: {priority}\n"
127 | result += f"**🚦 Merge Status**: {readiness}\n"
128 |
129 | result += f"**📅 Created**: {format_date(mr['created_at'])}\n"
130 | result += f"**🔄 Updated**: {format_date(mr['updated_at'])}\n"
131 |
132 | source_branch = mr['source_branch']
133 | target_branch = mr['target_branch']
134 | result += f"**🌿 Branches**: `{source_branch}` → `{target_branch}`\n"
135 |
136 | if pipeline_data:
137 | pipeline_status = pipeline_data.get('status')
138 | pipeline_icon = get_pipeline_status_icon(pipeline_status)
139 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_status}\n"
140 |
141 | if pipeline_data.get('web_url'):
142 | result += f" *[View Pipeline]({pipeline_data['web_url']})*\n"
143 | elif mr.get('pipeline'):
144 | pipeline_status = mr['pipeline'].get('status')
145 | pipeline_icon = get_pipeline_status_icon(pipeline_status)
146 | result += f"**🔧 Pipeline**: {pipeline_icon} {pipeline_status or 'unknown'}\n"
147 |
148 | if changes_data:
149 | change_stats = calculate_change_stats(changes_data)
150 | result += f"**📈 Changes**: {change_stats}\n"
151 |
152 | if mr.get('labels'):
153 | labels_str = ', '.join(f"`{label}`" for label in mr['labels'])
154 | result += f"**🏷️ Labels**: {labels_str}\n"
155 |
156 | if mr.get('draft') or mr.get('work_in_progress'):
157 | result += "**⚠️ Status**: 🚧 Draft/Work in Progress\n"
158 |
159 | if mr.get('has_conflicts'):
160 | result += "**⚠️ Warning**: 🔥 Has merge conflicts\n"
161 |
162 | result += f"**🔗 Actions**: [View MR]({mr['web_url']})"
163 | if mr['state'] == 'opened':
164 | result += f" | [Review]({mr['web_url']})"
165 | result += "\n\n"
166 |
167 | result += "## 📊 Summary\n"
168 |
169 | state_counts = {}
170 | for mr in data:
171 | state = mr['state']
172 | state_counts[state] = state_counts.get(state, 0) + 1
173 |
174 | result += "**State Breakdown**:\n"
175 | for state, count in state_counts.items():
176 | if state == 'merged':
177 | icon = "✅"
178 | elif state == 'opened':
179 | icon = "🔄"
180 | else:
181 | icon = "❌"
182 | result += f" • {icon} {state.title()}: {count}\n"
183 |
184 | priority_counts = {}
185 | for mr in data:
186 | priority = get_mr_priority(mr)
187 | priority_counts[priority] = priority_counts.get(priority, 0) + 1
188 |
189 | if len(priority_counts) > 1:
190 | result += "\n**Priority Breakdown**:\n"
191 | for priority, count in priority_counts.items():
192 | result += f" • {priority}: {count}\n"
193 |
194 | opened_mrs = [mr for mr in data if mr['state'] == 'opened']
195 |
196 | if opened_mrs:
197 | result += "\n**🎯 Action Items**:\n"
198 |
199 | has_conflicts = sum(1 for mr in opened_mrs if mr.get('has_conflicts'))
200 | drafts = sum(1 for mr in opened_mrs if mr.get('draft') or mr.get('work_in_progress'))
201 |
202 | failed_pipelines = 0
203 | for i, mr in enumerate(opened_mrs):
204 | if i < len(enhanced_results):
205 | pipeline_data, _ = enhanced_results[i]
206 | if pipeline_data and pipeline_data.get('status') == 'failed':
207 | failed_pipelines += 1
208 |
209 | if has_conflicts:
210 | result += f" • 🔥 {has_conflicts} MR{'s' if has_conflicts > 1 else ''} with merge conflicts\n"
211 | if drafts:
212 | result += f" • 🚧 {drafts} draft MR{'s' if drafts > 1 else ''} in progress\n"
213 | if failed_pipelines:
214 | result += f" • ❌ {failed_pipelines} MR{'s' if failed_pipelines > 1 else ''} with failed pipelines\n"
215 |
216 | ready_count = len(opened_mrs) - has_conflicts - drafts - failed_pipelines
217 | if ready_count > 0:
218 | result += f" • ✅ {ready_count} MR{'s' if ready_count > 1 else ''} ready for review\n"
219 |
220 | result += "\n**📋 Next Steps**:\n"
221 | if has_conflicts:
222 | result += " • 🔧 Resolve merge conflicts to unblock development\n"
223 | if failed_pipelines:
224 | result += " • 🔧 Fix failing pipelines to ensure quality\n"
225 | if ready_count > 0:
226 | result += " • 👀 Review and approve ready merge requests\n"
227 | else:
228 | result += "\n**🎯 Action Items**:\n"
229 | if state == "opened":
230 | result += " • 🎉 No open merge requests - ready for new features!\n"
231 | else:
232 | result += " • 📊 Consider filtering by 'opened' state to see active work\n"
233 |
234 | return [TextContent(type="text", text=result)]
```
--------------------------------------------------------------------------------
/gitlab_api.py:
--------------------------------------------------------------------------------
```python
1 | import aiohttp
2 |
3 |
4 | def _headers(access_token):
5 | return {"Private-Token": access_token, "Content-Type": "application/json"}
6 |
7 |
8 | async def get_merge_requests(gitlab_url, project_id, access_token, params):
9 | url = f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests"
10 | headers = _headers(access_token)
11 | async with aiohttp.ClientSession() as session:
12 | async with session.get(
13 | url, headers=headers, params=params
14 | ) as response:
15 | return (
16 | response.status,
17 | await response.json(),
18 | await response.text()
19 | )
20 |
21 |
22 | async def get_merge_request_pipeline(
23 | gitlab_url, project_id, access_token, mr_iid
24 | ):
25 | """Get the latest pipeline for a merge request"""
26 | url = (
27 | f"{gitlab_url}/api/v4/projects/{project_id}/"
28 | f"merge_requests/{mr_iid}/pipelines"
29 | )
30 | headers = _headers(access_token)
31 | async with aiohttp.ClientSession() as session:
32 | params = {"per_page": 1}
33 | async with session.get(url, headers=headers, params=params) as response:
34 | data = await response.json()
35 | return (
36 | response.status,
37 | data[0] if data else None,
38 | await response.text()
39 | )
40 |
41 |
42 | async def get_merge_request_changes(
43 | gitlab_url, project_id, access_token, mr_iid
44 | ):
45 | """Get changes/diff stats for a merge request"""
46 | url = (
47 | f"{gitlab_url}/api/v4/projects/{project_id}/"
48 | f"merge_requests/{mr_iid}/changes"
49 | )
50 | headers = _headers(access_token)
51 | async with aiohttp.ClientSession() as session:
52 | async with session.get(url, headers=headers) as response:
53 | return (
54 | response.status,
55 | await response.json(),
56 | await response.text()
57 | )
58 |
59 |
60 | async def get_project_info(gitlab_url, project_id, access_token):
61 | """Get project information to check for merge conflicts"""
62 | url = f"{gitlab_url}/api/v4/projects/{project_id}"
63 | headers = _headers(access_token)
64 | async with aiohttp.ClientSession() as session:
65 | async with session.get(url, headers=headers) as response:
66 | return (
67 | response.status,
68 | await response.json(),
69 | await response.text()
70 | )
71 |
72 |
73 | async def get_merge_request_reviews(
74 | gitlab_url, project_id, access_token, mr_iid
75 | ):
76 | discussions_result = await get_merge_request_discussions_paginated(
77 | gitlab_url, project_id, access_token, mr_iid
78 | )
79 | discussions_status, discussions, discussions_text = discussions_result
80 |
81 | approvals_url = (
82 | f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/{mr_iid}/approvals"
83 | )
84 | headers = _headers(access_token)
85 | async with aiohttp.ClientSession() as session:
86 | async with session.get(approvals_url, headers=headers) as approvals_response:
87 | if approvals_response.status == 200:
88 | approvals = await approvals_response.json()
89 | else:
90 | approvals = None
91 | approvals_status = approvals_response.status
92 | approvals_text = await approvals_response.text()
93 |
94 | return {
95 | "discussions": (
96 | discussions_status, discussions, discussions_text
97 | ),
98 | "approvals": (
99 | approvals_status, approvals, approvals_text
100 | ),
101 | }
102 |
103 |
104 | async def get_merge_request_details(
105 | gitlab_url, project_id, access_token, mr_iid
106 | ):
107 | url = f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/{mr_iid}"
108 | headers = _headers(access_token)
109 | async with aiohttp.ClientSession() as session:
110 | async with session.get(url, headers=headers) as response:
111 | return (
112 | response.status,
113 | await response.json(),
114 | await response.text()
115 | )
116 |
117 |
118 | async def create_merge_request_discussion(
119 | gitlab_url, project_id, access_token, mr_iid, body
120 | ):
121 | """Create a new discussion/comment on a merge request"""
122 | url = (
123 | f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/"
124 | f"{mr_iid}/discussions"
125 | )
126 | headers = _headers(access_token)
127 | data = {"body": body}
128 |
129 | async with aiohttp.ClientSession() as session:
130 | async with session.post(
131 | url, headers=headers, json=data
132 | ) as response:
133 | json_data = (
134 | await response.json()
135 | if response.content_type == 'application/json' else {}
136 | )
137 | return (
138 | response.status,
139 | json_data,
140 | await response.text()
141 | )
142 |
143 |
144 | async def reply_to_merge_request_discussion(
145 | gitlab_url, project_id, access_token, mr_iid, discussion_id, body
146 | ):
147 | """Reply to an existing discussion on a merge request"""
148 | url = (
149 | f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/"
150 | f"{mr_iid}/discussions/{discussion_id}/notes"
151 | )
152 | headers = _headers(access_token)
153 | data = {"body": body}
154 |
155 | async with aiohttp.ClientSession() as session:
156 | async with session.post(
157 | url, headers=headers, json=data
158 | ) as response:
159 | json_data = (
160 | await response.json()
161 | if response.content_type == 'application/json' else {}
162 | )
163 | return (
164 | response.status,
165 | json_data,
166 | await response.text()
167 | )
168 |
169 |
170 | async def resolve_merge_request_discussion(
171 | gitlab_url, project_id, access_token, mr_iid, discussion_id, resolved
172 | ):
173 | """Resolve or unresolve a discussion on a merge request"""
174 | url = (
175 | f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/"
176 | f"{mr_iid}/discussions/{discussion_id}"
177 | )
178 | headers = _headers(access_token)
179 | data = {"resolved": resolved}
180 |
181 | async with aiohttp.ClientSession() as session:
182 | async with session.put(
183 | url, headers=headers, json=data
184 | ) as response:
185 | json_data = (
186 | await response.json()
187 | if response.content_type == 'application/json' else {}
188 | )
189 | return (
190 | response.status,
191 | json_data,
192 | await response.text()
193 | )
194 |
195 |
196 | async def get_branch_merge_requests(
197 | gitlab_url, project_id, access_token, branch_name
198 | ):
199 | """Get merge requests for a specific branch"""
200 | params = {
201 | "source_branch": branch_name,
202 | "state": "all",
203 | "per_page": 100
204 | }
205 |
206 | url = f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests"
207 | headers = _headers(access_token)
208 |
209 | async with aiohttp.ClientSession() as session:
210 | async with session.get(
211 | url, headers=headers, params=params
212 | ) as response:
213 | return (
214 | response.status,
215 | await response.json(),
216 | await response.text()
217 | )
218 |
219 |
220 | async def get_merge_request_commits(
221 | gitlab_url, project_id, access_token, mr_iid
222 | ):
223 | """Get all commits in a merge request (handles pagination)"""
224 | base_url = (
225 | f"{gitlab_url}/api/v4/projects/{project_id}/"
226 | f"merge_requests/{mr_iid}/commits"
227 | )
228 | headers = _headers(access_token)
229 | all_commits = []
230 | page = 1
231 | per_page = 100 # Maximum allowed per page
232 |
233 | async with aiohttp.ClientSession() as session:
234 | while True:
235 | params = {"page": page, "per_page": per_page}
236 | async with session.get(
237 | base_url, headers=headers, params=params
238 | ) as response:
239 | if response.status != 200:
240 | return (
241 | response.status,
242 | await response.json(),
243 | await response.text()
244 | )
245 |
246 | page_data = await response.json()
247 | if not page_data: # No more results
248 | break
249 |
250 | all_commits.extend(page_data)
251 |
252 | # If we got fewer results than per_page, we're done
253 | if len(page_data) < per_page:
254 | break
255 |
256 | page += 1
257 |
258 | return (200, all_commits, "Success")
259 |
260 |
261 | async def get_commit_comments(
262 | gitlab_url, project_id, access_token, commit_sha
263 | ):
264 | """Get simple comments for a specific commit"""
265 | url = (
266 | f"{gitlab_url}/api/v4/projects/{project_id}/"
267 | f"repository/commits/{commit_sha}/comments"
268 | )
269 | headers = _headers(access_token)
270 | async with aiohttp.ClientSession() as session:
271 | async with session.get(url, headers=headers) as response:
272 | return (
273 | response.status,
274 | await response.json(),
275 | await response.text()
276 | )
277 |
278 |
279 | async def get_commit_discussions(
280 | gitlab_url, project_id, access_token, commit_sha
281 | ):
282 | """Get discussions/comments for a specific commit"""
283 | url = (
284 | f"{gitlab_url}/api/v4/projects/{project_id}/"
285 | f"repository/commits/{commit_sha}/discussions"
286 | )
287 | headers = _headers(access_token)
288 | async with aiohttp.ClientSession() as session:
289 | async with session.get(url, headers=headers) as response:
290 | return (
291 | response.status,
292 | await response.json(),
293 | await response.text()
294 | )
295 |
296 |
297 | async def get_commit_all_comments_and_discussions(
298 | gitlab_url, project_id, access_token, commit_sha
299 | ):
300 | """Get both comments and discussions for a commit, combining them"""
301 | discussions_result = await get_commit_discussions(
302 | gitlab_url, project_id, access_token, commit_sha
303 | )
304 | discussions_status, discussions_data, discussions_error = discussions_result
305 |
306 | comments_result = await get_commit_comments(
307 | gitlab_url, project_id, access_token, commit_sha
308 | )
309 | comments_status, comments_data, comments_error = (
310 | comments_result
311 | )
312 |
313 | combined_data = []
314 |
315 | if discussions_status == 200 and discussions_data:
316 | combined_data.extend(discussions_data)
317 |
318 | if comments_status == 200 and comments_data:
319 | for comment in comments_data:
320 | discussion_format = {
321 | "id": f"comment_{comment.get('id', 'unknown')}",
322 | "individual_note": True,
323 | "notes": [{
324 | "id": comment.get("id"),
325 | "body": comment.get("note", ""),
326 | "author": comment.get("author", {}),
327 | "created_at": comment.get("created_at"),
328 | "updated_at": comment.get("created_at"),
329 | "system": False,
330 | "noteable_type": "Commit",
331 | "noteable_id": commit_sha,
332 | "resolvable": False,
333 | "position": {
334 | "new_path": comment.get("path"),
335 | "new_line": comment.get("line"),
336 | "line_type": comment.get("line_type")
337 | } if comment.get("path") else None
338 | }]
339 | }
340 | combined_data.append(discussion_format)
341 |
342 | if combined_data:
343 | return (200, combined_data, "Success")
344 | elif discussions_status == 200 or comments_status == 200:
345 | return (200, [], "No comments or discussions found")
346 | else:
347 | return discussions_result
348 |
349 |
350 | async def get_merge_request_discussions_paginated(
351 | gitlab_url, project_id, access_token, mr_iid
352 | ):
353 | """Get all discussions from a merge request with pagination"""
354 | all_discussions = []
355 | page = 1
356 | per_page = 100 # Maximum allowed per page
357 |
358 | async with aiohttp.ClientSession() as session:
359 | headers = _headers(access_token)
360 |
361 | while True:
362 | url = (
363 | f"{gitlab_url}/api/v4/projects/{project_id}/"
364 | f"merge_requests/{mr_iid}/discussions"
365 | )
366 | params = {"page": page, "per_page": per_page}
367 |
368 | async with session.get(url, headers=headers, params=params) as response:
369 | if response.status != 200:
370 | return (
371 | response.status,
372 | await response.json(),
373 | await response.text()
374 | )
375 |
376 | discussions = await response.json()
377 | if not discussions: # No more results
378 | break
379 |
380 | all_discussions.extend(discussions)
381 |
382 | link_header = response.headers.get('Link', '')
383 | if 'rel="next"' not in link_header:
384 | break
385 |
386 | page += 1
387 |
388 | return (200, all_discussions, "Success")
```
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | import asyncio
3 | import logging
4 | from typing import Any, Dict, List
5 | from mcp.server import Server
6 | from mcp.server.models import InitializationOptions
7 | from mcp.server.stdio import stdio_server
8 | from mcp.types import (
9 | Tool,
10 | TextContent,
11 | INTERNAL_ERROR,
12 | INVALID_PARAMS,
13 | METHOD_NOT_FOUND,
14 | JSONRPCError
15 | )
16 | from logging_config import configure_logging
17 | from config import get_gitlab_config
18 | from tools import (
19 | list_merge_requests,
20 | get_merge_request_reviews,
21 | get_merge_request_details,
22 | get_branch_merge_requests,
23 | reply_to_review_comment,
24 | create_review_comment,
25 | resolve_review_discussion,
26 | get_commit_discussions
27 | )
28 |
29 |
30 | class GitLabMCPServer:
31 | def __init__(self):
32 | configure_logging()
33 | logging.info("Initializing GitLabMCPServer")
34 |
35 | self.config = get_gitlab_config()
36 |
37 | self.server = Server(self.config['server_name'])
38 | self.setup_handlers()
39 |
40 | def setup_handlers(self):
41 | @self.server.list_tools()
42 | async def list_tools() -> List[Tool]:
43 | logging.info("list_tools called")
44 | tools = [
45 | Tool(
46 | name="list_merge_requests",
47 | description="List merge requests for the GitLab project",
48 | inputSchema={
49 | "type": "object",
50 | "properties": {
51 | "state": {
52 | "type": "string",
53 | "enum": ["opened", "closed", "merged", "all"],
54 | "default": "opened",
55 | "description": "Filter by merge request state"
56 | },
57 | "target_branch": {
58 | "type": "string",
59 | "description": (
60 | "Filter by target branch (optional)"
61 | )
62 | },
63 | "limit": {
64 | "type": "integer",
65 | "default": 10,
66 | "minimum": 1,
67 | "maximum": 100,
68 | "description": "Maximum number of results"
69 | }
70 | },
71 | "additionalProperties": False
72 | }
73 | ),
74 | Tool(
75 | name="get_merge_request_reviews",
76 | description=(
77 | "Get reviews and discussions for a specific "
78 | "merge request"
79 | ),
80 | inputSchema={
81 | "type": "object",
82 | "properties": {
83 | "merge_request_iid": {
84 | "type": "integer",
85 | "minimum": 1,
86 | "description": (
87 | "Internal ID of the merge request"
88 | )
89 | }
90 | },
91 | "required": ["merge_request_iid"],
92 | "additionalProperties": False
93 | }
94 | ),
95 | Tool(
96 | name="get_merge_request_details",
97 | description=(
98 | "Get detailed information about a specific "
99 | "merge request"
100 | ),
101 | inputSchema={
102 | "type": "object",
103 | "properties": {
104 | "merge_request_iid": {
105 | "type": "integer",
106 | "minimum": 1,
107 | "description": (
108 | "Internal ID of the merge request"
109 | )
110 | }
111 | },
112 | "required": ["merge_request_iid"],
113 | "additionalProperties": False
114 | }
115 | ),
116 | Tool(
117 | name="get_branch_merge_requests",
118 | description=(
119 | "Get all merge requests for a specific branch"
120 | ),
121 | inputSchema={
122 | "type": "object",
123 | "properties": {
124 | "branch_name": {
125 | "type": "string",
126 | "description": "Name of the branch"
127 | }
128 | },
129 | "required": ["branch_name"],
130 | "additionalProperties": False
131 | }
132 | ),
133 | Tool(
134 | name="reply_to_review_comment",
135 | description=(
136 | "Reply to a specific discussion thread in a "
137 | "merge request review"
138 | ),
139 | inputSchema={
140 | "type": "object",
141 | "properties": {
142 | "merge_request_iid": {
143 | "type": "integer",
144 | "minimum": 1,
145 | "description": (
146 | "Internal ID of the merge request"
147 | )
148 | },
149 | "discussion_id": {
150 | "type": "string",
151 | "description": (
152 | "ID of the discussion thread to reply to"
153 | )
154 | },
155 | "body": {
156 | "type": "string",
157 | "description": "Content of the reply comment"
158 | }
159 | },
160 | "required": [
161 | "merge_request_iid", "discussion_id", "body"
162 | ],
163 | "additionalProperties": False
164 | }
165 | ),
166 | Tool(
167 | name="create_review_comment",
168 | description=(
169 | "Create a new discussion thread in a "
170 | "merge request review"
171 | ),
172 | inputSchema={
173 | "type": "object",
174 | "properties": {
175 | "merge_request_iid": {
176 | "type": "integer",
177 | "minimum": 1,
178 | "description": (
179 | "Internal ID of the merge request"
180 | )
181 | },
182 | "body": {
183 | "type": "string",
184 | "description": (
185 | "Content of the new discussion comment"
186 | )
187 | }
188 | },
189 | "required": ["merge_request_iid", "body"],
190 | "additionalProperties": False
191 | }
192 | ),
193 | Tool(
194 | name="resolve_review_discussion",
195 | description=(
196 | "Resolve or unresolve a discussion thread in a "
197 | "merge request review"
198 | ),
199 | inputSchema={
200 | "type": "object",
201 | "properties": {
202 | "merge_request_iid": {
203 | "type": "integer",
204 | "minimum": 1,
205 | "description": (
206 | "Internal ID of the merge request"
207 | )
208 | },
209 | "discussion_id": {
210 | "type": "string",
211 | "description": (
212 | "ID of the discussion thread to "
213 | "resolve/unresolve"
214 | )
215 | },
216 | "resolved": {
217 | "type": "boolean",
218 | "default": True,
219 | "description": (
220 | "Whether to resolve (true) or unresolve "
221 | "(false) the discussion"
222 | )
223 | }
224 | },
225 | "required": ["merge_request_iid", "discussion_id"],
226 | "additionalProperties": False
227 | }
228 | ),
229 | Tool(
230 | name="get_commit_discussions",
231 | description=(
232 | "Get discussions and comments on commits within a "
233 | "specific merge request"
234 | ),
235 | inputSchema={
236 | "type": "object",
237 | "properties": {
238 | "merge_request_iid": {
239 | "type": "integer",
240 | "minimum": 1,
241 | "description": (
242 | "Internal ID of the merge request"
243 | )
244 | }
245 | },
246 | "required": ["merge_request_iid"],
247 | "additionalProperties": False
248 | }
249 | )
250 | ]
251 | tool_names = [t.name for t in tools]
252 | logging.info(f"Returning {len(tools)} tools: {tool_names}")
253 | return tools
254 |
255 | @self.server.call_tool()
256 | async def call_tool(
257 | name: str, arguments: Dict[str, Any]
258 | ) -> List[TextContent]:
259 | logging.info(
260 | f"call_tool called: {name} with arguments: {arguments}"
261 | )
262 |
263 | try:
264 | if name not in [
265 | "list_merge_requests",
266 | "get_merge_request_reviews",
267 | "get_merge_request_details",
268 | "get_branch_merge_requests",
269 | "reply_to_review_comment",
270 | "create_review_comment",
271 | "resolve_review_discussion",
272 | "get_commit_discussions"
273 | ]:
274 | logging.warning(f"Unknown tool called: {name}")
275 | raise JSONRPCError(
276 | METHOD_NOT_FOUND,
277 | f"Unknown tool: {name}"
278 | )
279 |
280 | if name == "list_merge_requests":
281 | return await list_merge_requests(
282 | self.config['gitlab_url'],
283 | self.config['project_id'],
284 | self.config['access_token'],
285 | arguments
286 | )
287 | elif name == "get_merge_request_reviews":
288 | return await get_merge_request_reviews(
289 | self.config['gitlab_url'],
290 | self.config['project_id'],
291 | self.config['access_token'],
292 | arguments
293 | )
294 | elif name == "get_merge_request_details":
295 | return await get_merge_request_details(
296 | self.config['gitlab_url'],
297 | self.config['project_id'],
298 | self.config['access_token'],
299 | arguments
300 | )
301 | elif name == "get_branch_merge_requests":
302 | return await get_branch_merge_requests(
303 | self.config['gitlab_url'],
304 | self.config['project_id'],
305 | self.config['access_token'],
306 | arguments
307 | )
308 | elif name == "reply_to_review_comment":
309 | return await reply_to_review_comment(
310 | self.config['gitlab_url'],
311 | self.config['project_id'],
312 | self.config['access_token'],
313 | arguments
314 | )
315 | elif name == "create_review_comment":
316 | return await create_review_comment(
317 | self.config['gitlab_url'],
318 | self.config['project_id'],
319 | self.config['access_token'],
320 | arguments
321 | )
322 | elif name == "resolve_review_discussion":
323 | return await resolve_review_discussion(
324 | self.config['gitlab_url'],
325 | self.config['project_id'],
326 | self.config['access_token'],
327 | arguments
328 | )
329 | elif name == "get_commit_discussions":
330 | return await get_commit_discussions(
331 | self.config['gitlab_url'],
332 | self.config['project_id'],
333 | self.config['access_token'],
334 | arguments
335 | )
336 |
337 | except JSONRPCError:
338 | raise
339 | except ValueError as e:
340 | logging.error(f"Validation error in {name}: {e}")
341 | raise JSONRPCError(
342 | INVALID_PARAMS,
343 | f"Invalid parameters: {str(e)}"
344 | )
345 | except Exception as e:
346 | logging.error(
347 | f"Unexpected error in call_tool for {name}: {e}",
348 | exc_info=True
349 | )
350 | raise JSONRPCError(
351 | INTERNAL_ERROR,
352 | f"Internal server error: {str(e)}"
353 | )
354 |
355 | async def run(self):
356 | logging.info("Starting MCP stdio server")
357 | try:
358 | async with stdio_server() as (read_stream, write_stream):
359 | logging.info("stdio_server context entered successfully")
360 | await self.server.run(
361 | read_stream,
362 | write_stream,
363 | InitializationOptions(
364 | server_name=self.config['server_name'],
365 | server_version=self.config['server_version'],
366 | capabilities={
367 | "tools": {},
368 | "logging": {}
369 | }
370 | )
371 | )
372 | except Exception as e:
373 | logging.error(f"Error in stdio_server: {e}", exc_info=True)
374 | raise
375 |
376 |
377 | async def main():
378 | try:
379 | logging.info("Starting main function")
380 | server = GitLabMCPServer()
381 | logging.info("GitLabMCPServer created successfully")
382 | await server.run()
383 | except Exception as e:
384 | logging.error(f"Error starting server: {e}", exc_info=True)
385 | print(f"Error starting server: {e}")
386 | return 1
387 |
388 |
389 | if __name__ == "__main__":
390 | asyncio.run(main())
```