# Directory Structure
```
├── .gitignore
├── index.ts
├── package-lock.json
├── package.json
├── README.md
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
build/
gcp-oauth.keys.json
.*-server-credentials.json
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
.DS_Store
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
<h2 align="center">
📢 <strong>COMMUNITY SERVER NOTICE</strong><br/>
This is a community-maintained MCP Server.<br/>
👉 For the <strong>official</strong> MongoDB MCP Server, visit
<a href="https://github.com/mongodb-js/mongodb-mcp-server">mongodb-js/mongodb-mcp-server</a>
</h2>
# MongoDB MCP Server
A Model Context Protocol server that provides read-only access to MongoDB databases. This server enables LLMs to inspect collection schemas and execute aggregation pipelines.
## Components
### Tools
- **aggregate**
- Execute MongoDB aggregation pipelines against the connected database
- Input:
- `collection` (string): The collection to query
- `pipeline` (array): MongoDB aggregation pipeline stages
- `options` (object): Optional aggregation settings
- `allowDiskUse` (boolean): Allow operations that require disk usage
- `maxTimeMS` (number): Maximum execution time in milliseconds
- `comment` (string): Comment to identify the operation
- Default limit of 1000 documents if no limit stage is specified
- Default timeout of 30 seconds
- **explain**
- Get execution plans for aggregation pipelines
- Input:
- `collection` (string): The collection to analyze
- `pipeline` (array): MongoDB aggregation pipeline stages
- `verbosity` (string): Detail level of the explanation
- Options: "queryPlanner", "executionStats", "allPlansExecution"
- Default: "queryPlanner"
### Resources
The server provides schema information for each collection in the database:
- **Collection Schemas** (`mongodb://<host>/<collection>/schema`)
- Inferred JSON schema information for each collection
- Includes field names and data types
- Schema is derived from sampling collection documents
## Usage with Claude Desktop
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
```json
"mongodb": {
"command": "npx",
"args": [
"-y" ,
"@pash1986/mcp-server-mongodb"
],
"env" : {
"MONGODB_URI" : "mongodb+srv://<yourcluster>" // 'mongodb://localhost:27017'
}
}
```
Replace `mydb` with your database name and adjust the connection string as needed.
## Example Usage
### Basic Aggregation
```javascript
{
"collection": "users",
"pipeline": [
{ "$match": { "age": { "$gt": 21 } } },
{ "$group": {
"_id": "$city",
"avgAge": { "$avg": "$age" },
"count": { "$sum": 1 }
}},
{ "$sort": { "count": -1 } },
{ "$limit": 10 }
],
"options": {
"allowDiskUse": true,
"maxTimeMS": 60000,
"comment": "City-wise user statistics"
}
}
```
### Query Explanation
```javascript
{
"collection": "users",
"pipeline": [
{ "$match": { "age": { "$gt": 21 } } },
{ "$sort": { "age": 1 } }
],
"verbosity": "executionStats"
}
```
## Safety Features
- Automatic limit of 1000 documents if no limit is specified in the pipeline
- Default timeout of 30 seconds for all operations
- Read-only operations only
- Safe schema inference from collection samples
## License
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
```
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
```json
{
"compilerOptions": {
"target": "ES2022",
"module": "Node16",
"moduleResolution": "Node16",
"outDir": "./build",
"rootDir": "./",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
},
"include": ["**/*"],
"exclude": ["node_modules"]
}
```
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
```json
{
"name": "@pash1986/mongodb-mcp-server",
"version": "0.1.7",
"description": "MongoDB MCP server for local mongodb queries",
"type": "module",
"bin": {
"mcp-server-mongodb": "build/index.js"
},
"engines": {
"node": ">=18.0.0"
},
"author": "Pavel Duchovny, MongoDB Inc.",
"homepage": "https://modelcontextprotocol.io",
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
"files": [
"build",
"src",
"README.md",
"LICENSE"
],
"scripts": {
"start": "node ./build/index.js",
"build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"",
"prepare": "npm run build",
"watch": "tsc --watch",
"inspector": "npx @modelcontextprotocol/inspector build/index.js"
},
"dependencies": {
"@modelcontextprotocol/sdk": "0.6.0",
"axios": "^1.7.8",
"dotenv": "^16.4.5",
"mongodb": "^6.11.0"
},
"devDependencies": {
"@types/mongodb": "^4.0.7",
"@types/node": "^20.11.24",
"typescript": "^5.3.3"
}
}
```
--------------------------------------------------------------------------------
/index.ts:
--------------------------------------------------------------------------------
```typescript
#!/usr/bin/env node
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolRequestSchema,
ListResourcesRequestSchema,
ListToolsRequestSchema,
ReadResourceRequestSchema,
ErrorCode,
McpError
} from "@modelcontextprotocol/sdk/types.js";
import { MongoClient, Db, Collection, Document, AggregateOptions } from "mongodb";
//import dotenv from "dotenv";
import * as dotenv from 'dotenv';
dotenv.config();
const MONGODB_URI = process.env.MONGODB_URI;
if (!MONGODB_URI) {
throw new Error("MONGODB_URI environment variable is required");
}
interface AggregateToolArguments {
collection: string;
pipeline: Document[];
options?: AggregateOptions & {
allowDiskUse?: boolean;
maxTimeMS?: number;
comment?: string;
};
}
interface ExplainToolArguments {
collection: string;
pipeline: Document[];
verbosity?: "queryPlanner" | "executionStats" | "allPlansExecution";
}
interface SampleDocumentsArguments {
collection: string;
count?: number;
}
class MongoDBServer {
private server: Server;
private client!: MongoClient;
private db!: Db;
constructor() {
this.server = new Server(
{
name: "example-servers/mongodb",
version: "0.1.0",
description: "MongoDB MCP server providing secure access to MongoDB databases",
},
{
capabilities: {
resources: {
description: "MongoDB collections and their schemas",
mimeTypes: ["application/json"],
},
tools: {
description: "MongoDB aggregation and analysis tools",
},
},
}
);
this.setupHandlers();
this.setupErrorHandling();
}
private setupErrorHandling(): void {
this.server.onerror = (error) => {
console.error("[MCP Error]", error);
};
// Handle both SIGINT (Ctrl+C) and SIGTERM (process termination)
const cleanup = async () => {
console.log("Shutting down MongoDB MCP server...");
try {
await this.close();
process.exit(0);
} catch (error) {
console.error("Error during cleanup:", error);
process.exit(1);
}
};
process.on('SIGINT', cleanup);
process.on('SIGTERM', cleanup);
// Handle uncaught exceptions and unhandled rejections
process.on('uncaughtException', async (error) => {
console.error('Uncaught Exception:', error);
await cleanup();
});
process.on('unhandledRejection', async (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
await cleanup();
});
}
private setupHandlers(): void {
this.setupResourceHandlers();
this.setupToolHandlers();
}
private setupResourceHandlers(): void {
this.server.setRequestHandler(ListResourcesRequestSchema, async () => {
const collections = await this.db.listCollections().toArray();
return {
resources: collections.map((collection: Document) => ({
uri: `mcp-mongodb://${collection.name}/schema`,
mimeType: "application/json",
name: `"${collection.name}" collection schema`,
description: `Schema information for the ${collection.name} collection`,
})),
};
});
this.server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
const uri = request.params.uri;
const match = uri.match(/^mcp-mongodb:\/\/([^/]+)\/schema$/);
if (!match) {
throw new McpError(
ErrorCode.InvalidRequest,
"Invalid resource URI"
);
}
const collectionName = match[1];
try {
const sampleDoc = await this.db.collection(collectionName).findOne();
if (!sampleDoc) {
return {
contents: [
{
uri: request.params.uri,
mimeType: "application/json",
text: JSON.stringify({ message: "Collection is empty" }, null, 2),
},
],
};
}
const documentSchema = Object.entries(sampleDoc).map(([key, value]) => ({
field_name: key,
field_type: typeof value,
description: `Field ${key} of type ${typeof value}`,
}));
return {
contents: [
{
uri: request.params.uri,
mimeType: "application/json",
text: JSON.stringify(documentSchema, null, 2),
},
],
};
} catch (error) {
throw new McpError(
ErrorCode.InternalError,
`MongoDB error: ${error instanceof Error ? error.message : 'Unknown error'}`
);
}
});
}
private setupToolHandlers(): void {
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
{
name: "aggregate",
description: "Run a MongoDB aggregation pipeline",
inputSchema: {
type: "object",
properties: {
collection: {
type: "string",
description: "Name of the collection to query",
},
pipeline: {
type: "array",
items: { type: "object" },
description: "MongoDB aggregation pipeline stages (e.g., $match, $group, $sort)",
},
options: {
type: "object",
description: "Optional aggregation options",
properties: {
allowDiskUse: {
type: "boolean",
description: "Allow writing to temporary files",
},
maxTimeMS: {
type: "number",
description: "Maximum execution time in milliseconds",
},
comment: {
type: "string",
description: "Optional comment to help trace operations",
}
}
}
},
required: ["collection", "pipeline"],
},
examples: [
{
name: "Count documents by status",
arguments: {
collection: "orders",
pipeline: [
{ $group: { _id: "$status", count: { $sum: 1 } } },
{ $sort: { count: -1 } }
]
}
}
]
},
{
name: "explain",
description: "Get the execution plan for an aggregation pipeline",
inputSchema: {
type: "object",
properties: {
collection: {
type: "string",
description: "Name of the collection to analyze",
},
pipeline: {
type: "array",
items: { type: "object" },
description: "MongoDB aggregation pipeline stages to analyze",
},
verbosity: {
type: "string",
enum: ["queryPlanner", "executionStats", "allPlansExecution"],
default: "queryPlanner",
description: "Level of detail in the execution plan",
}
},
required: ["collection", "pipeline"],
},
examples: [
{
name: "Analyze index usage",
arguments: {
collection: "users",
pipeline: [
{ $match: { status: "active" } },
{ $sort: { lastLogin: -1 } }
],
verbosity: "executionStats"
}
}
]
},
{
name: "sample",
description: "Get random sample documents from a collection",
inputSchema: {
type: "object",
properties: {
collection: {
type: "string",
description: "Name of the collection to sample from",
},
count: {
type: "number",
description: "Number of documents to sample (default: 5, max: 10)",
minimum: 1,
maximum: 10,
default: 5,
}
},
required: ["collection"],
},
examples: [
{
name: "Get 5 random documents",
arguments: {
collection: "listings",
count: 5
}
}
]
}
],
}));
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
switch (request.params.name) {
case "aggregate": {
if (!this.isAggregateToolArguments(request.params.arguments)) {
return {
content: [{ type: "text", text: "Invalid arguments: expected collection and pipeline parameters" }],
isError: true,
};
}
const { collection, pipeline, options = {} } = request.params.arguments;
try {
const hasLimit = pipeline.some(stage => "$limit" in stage);
const safePipeline = hasLimit ? pipeline : [...pipeline, { $limit: 1000 }];
const result = await this.db
.collection(collection)
.aggregate(safePipeline, {
...options,
maxTimeMS: options.maxTimeMS || 30000
})
.toArray();
return {
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
isError: false,
};
} catch (error) {
return {
content: [{
type: "text",
text: `Aggregation error: ${error instanceof Error ? error.message : 'Unknown error'}`
}],
isError: true,
};
}
}
case "explain": {
if (!this.isExplainToolArguments(request.params.arguments)) {
return {
content: [{ type: "text", text: "Invalid arguments: expected collection and pipeline parameters" }],
isError: true,
};
}
const { collection, pipeline } = request.params.arguments;
try {
const result = await this.db
.collection(collection)
.aggregate(pipeline, { explain: true });
return {
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
isError: false,
};
} catch (error) {
return {
content: [{
type: "text",
text: `Explain error: ${error instanceof Error ? error.message : 'Unknown error'}`
}],
isError: true,
};
}
}
case "sample": {
if (!this.isSampleDocumentsArguments(request.params.arguments)) {
return {
content: [{ type: "text", text: "Invalid arguments: expected collection name" }],
isError: true,
};
}
const { collection, count = 5 } = request.params.arguments;
const safeCount = Math.min(Math.max(1, count), 10);
try {
const result = await this.db
.collection(collection)
.aggregate([
{ $sample: { size: safeCount } }
])
.toArray();
return {
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
isError: false,
};
} catch (error) {
return {
content: [{
type: "text",
text: `Sample error: ${error instanceof Error ? error.message : 'Unknown error'}`
}],
isError: true,
};
}
}
default:
throw new McpError(
ErrorCode.MethodNotFound,
`Unknown tool: ${request.params.name}`
);
}
});
}
private isAggregateToolArguments(value: unknown): value is AggregateToolArguments {
if (!value || typeof value !== 'object') return false;
const obj = value as Record<string, unknown>;
return (
typeof obj.collection === 'string' &&
Array.isArray(obj.pipeline) &&
(!obj.options || typeof obj.options === 'object')
);
}
private isExplainToolArguments(value: unknown): value is ExplainToolArguments {
if (!value || typeof value !== 'object') return false;
const obj = value as Record<string, unknown>;
return (
typeof obj.collection === 'string' &&
Array.isArray(obj.pipeline) &&
(!obj.verbosity || ["queryPlanner", "executionStats", "allPlansExecution"].includes(obj.verbosity as string))
);
}
private isSampleDocumentsArguments(value: unknown): value is SampleDocumentsArguments {
if (!value || typeof value !== 'object') return false;
const obj = value as Record<string, unknown>;
return (
typeof obj.collection === 'string' &&
(!obj.count || (typeof obj.count === 'number' && obj.count > 0 && obj.count <= 10))
);
}
async connect(): Promise<void> {
try {
this.client = new MongoClient(MONGODB_URI!);
await this.client.connect();
this.db = this.client.db();
} catch (error) {
console.error("Failed to connect to MongoDB:", error);
throw error;
}
}
async close(): Promise<void> {
if (this.client) {
await this.client.close();
}
}
async run(): Promise<void> {
await this.connect();
const transport = new StdioServerTransport();
await this.server.connect(transport);
}
}
const server = new MongoDBServer();
server.run().catch((error) => {
console.error(error);
server.close().catch(console.error);
process.exit(1);
});
```