# Directory Structure ``` ├── .gitignore ├── Dockerfile ├── LICENSE ├── package.json ├── README.md ├── src │ └── index.ts └── tsconfig.json ``` # Files -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- ``` 1 | # Dependencies 2 | node_modules/ 3 | package-lock.json 4 | yarn.lock 5 | pnpm-lock.yaml 6 | 7 | # TypeScript build output 8 | dist/ 9 | build/ 10 | *.tsbuildinfo 11 | 12 | # IDE 13 | .idea/ 14 | .vscode/ 15 | *.swp 16 | *.swo 17 | .DS_Store 18 | 19 | # Environment variables 20 | .env 21 | .env.local 22 | .env.*.local 23 | .env.development 24 | .env.test 25 | .env.production 26 | 27 | # Logs 28 | logs/ 29 | *.log 30 | npm-debug.log* 31 | yarn-debug.log* 32 | yarn-error.log* 33 | 34 | # Coverage 35 | coverage/ 36 | 37 | # Optional npm cache directory 38 | .npm 39 | 40 | # Optional eslint cache 41 | .eslintcache 42 | 43 | # Optional REPL history 44 | .node_repl_history 45 | 46 | # Output of 'npm pack' 47 | *.tgz 48 | 49 | # Yarn Integrity file 50 | .yarn-integrity 51 | docs/ ``` -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- ```markdown 1 | # DynamoDB MCP Server 2 | 3 | A [Model Context Protocol server](https://modelcontextprotocol.io/) for managing Amazon DynamoDB resources. This server provides tools for table management, capacity management, and data operations. 4 | 5 | ## Author 6 | 7 | Iman Kamyabi ([email protected]) 8 | 9 | ## Features 10 | 11 | ### Table Management 12 | - Create new DynamoDB tables with customizable configurations 13 | - List existing tables 14 | - Get detailed table information 15 | - Configure table settings 16 | 17 | ### Index Management 18 | - Create and manage Global Secondary Indexes (GSI) 19 | - Update GSI capacity 20 | - Create Local Secondary Indexes (LSI) 21 | 22 | ### Capacity Management 23 | - Update provisioned read/write capacity units 24 | - Manage table throughput settings 25 | 26 | ### Data Operations 27 | - Insert or replace items in tables 28 | - Retrieve items by primary key 29 | - Update specific item attributes 30 | - Query tables with conditions 31 | - Scan tables with filters 32 | 33 | > **Note**: Delete operations are not supported to prevent accidental data loss. 34 | 35 | ## Setup 36 | 37 | 1. Install dependencies: 38 | ```bash 39 | npm install 40 | ``` 41 | 42 | 2. Configure AWS credentials as environment variables: 43 | ```bash 44 | export AWS_ACCESS_KEY_ID="your_access_key" 45 | export AWS_SECRET_ACCESS_KEY="your_secret_key" 46 | export AWS_REGION="your_region" 47 | ``` 48 | 49 | 3. Build the server: 50 | ```bash 51 | npm run build 52 | ``` 53 | 54 | 4. Start the server: 55 | ```bash 56 | npm start 57 | ``` 58 | 59 | ## Tools 60 | 61 | ### create_table 62 | Creates a new DynamoDB table with specified configuration. 63 | 64 | Parameters: 65 | - `tableName`: Name of the table to create 66 | - `partitionKey`: Name of the partition key 67 | - `partitionKeyType`: Type of partition key (S=String, N=Number, B=Binary) 68 | - `sortKey`: (Optional) Name of the sort key 69 | - `sortKeyType`: (Optional) Type of sort key 70 | - `readCapacity`: Provisioned read capacity units 71 | - `writeCapacity`: Provisioned write capacity units 72 | 73 | Example: 74 | ```json 75 | { 76 | "tableName": "Users", 77 | "partitionKey": "userId", 78 | "partitionKeyType": "S", 79 | "readCapacity": 5, 80 | "writeCapacity": 5 81 | } 82 | ``` 83 | 84 | ### list_tables 85 | Lists all DynamoDB tables in the account. 86 | 87 | Parameters: 88 | - `limit`: (Optional) Maximum number of tables to return 89 | - `exclusiveStartTableName`: (Optional) Name of the table to start from for pagination 90 | 91 | Example: 92 | ```json 93 | { 94 | "limit": 10 95 | } 96 | ``` 97 | 98 | ### describe_table 99 | Gets detailed information about a DynamoDB table. 100 | 101 | Parameters: 102 | - `tableName`: Name of the table to describe 103 | 104 | Example: 105 | ```json 106 | { 107 | "tableName": "Users" 108 | } 109 | ``` 110 | 111 | ### create_gsi 112 | Creates a global secondary index on a table. 113 | 114 | Parameters: 115 | - `tableName`: Name of the table 116 | - `indexName`: Name of the new index 117 | - `partitionKey`: Partition key for the index 118 | - `partitionKeyType`: Type of partition key 119 | - `sortKey`: (Optional) Sort key for the index 120 | - `sortKeyType`: (Optional) Type of sort key 121 | - `projectionType`: Type of projection (ALL, KEYS_ONLY, INCLUDE) 122 | - `nonKeyAttributes`: (Optional) Non-key attributes to project 123 | - `readCapacity`: Provisioned read capacity units 124 | - `writeCapacity`: Provisioned write capacity units 125 | 126 | Example: 127 | ```json 128 | { 129 | "tableName": "Users", 130 | "indexName": "EmailIndex", 131 | "partitionKey": "email", 132 | "partitionKeyType": "S", 133 | "projectionType": "ALL", 134 | "readCapacity": 5, 135 | "writeCapacity": 5 136 | } 137 | ``` 138 | 139 | ### update_gsi 140 | Updates the provisioned capacity of a global secondary index. 141 | 142 | Parameters: 143 | - `tableName`: Name of the table 144 | - `indexName`: Name of the index to update 145 | - `readCapacity`: New read capacity units 146 | - `writeCapacity`: New write capacity units 147 | 148 | Example: 149 | ```json 150 | { 151 | "tableName": "Users", 152 | "indexName": "EmailIndex", 153 | "readCapacity": 10, 154 | "writeCapacity": 10 155 | } 156 | ``` 157 | 158 | ### create_lsi 159 | Creates a local secondary index on a table (must be done during table creation). 160 | 161 | Parameters: 162 | - `tableName`: Name of the table 163 | - `indexName`: Name of the new index 164 | - `partitionKey`: Partition key for the table 165 | - `partitionKeyType`: Type of partition key 166 | - `sortKey`: Sort key for the index 167 | - `sortKeyType`: Type of sort key 168 | - `projectionType`: Type of projection (ALL, KEYS_ONLY, INCLUDE) 169 | - `nonKeyAttributes`: (Optional) Non-key attributes to project 170 | - `readCapacity`: (Optional) Provisioned read capacity units 171 | - `writeCapacity`: (Optional) Provisioned write capacity units 172 | 173 | Example: 174 | ```json 175 | { 176 | "tableName": "Users", 177 | "indexName": "CreatedAtIndex", 178 | "partitionKey": "userId", 179 | "partitionKeyType": "S", 180 | "sortKey": "createdAt", 181 | "sortKeyType": "N", 182 | "projectionType": "ALL" 183 | } 184 | ``` 185 | 186 | ### update_capacity 187 | Updates the provisioned capacity of a table. 188 | 189 | Parameters: 190 | - `tableName`: Name of the table 191 | - `readCapacity`: New read capacity units 192 | - `writeCapacity`: New write capacity units 193 | 194 | Example: 195 | ```json 196 | { 197 | "tableName": "Users", 198 | "readCapacity": 10, 199 | "writeCapacity": 10 200 | } 201 | ``` 202 | 203 | ### put_item 204 | Inserts or replaces an item in a table. 205 | 206 | Parameters: 207 | - `tableName`: Name of the table 208 | - `item`: Item to put into the table (as JSON object) 209 | 210 | Example: 211 | ```json 212 | { 213 | "tableName": "Users", 214 | "item": { 215 | "userId": "123", 216 | "name": "John Doe", 217 | "email": "[email protected]" 218 | } 219 | } 220 | ``` 221 | 222 | ### get_item 223 | Retrieves an item from a table by its primary key. 224 | 225 | Parameters: 226 | - `tableName`: Name of the table 227 | - `key`: Primary key of the item to retrieve 228 | 229 | Example: 230 | ```json 231 | { 232 | "tableName": "Users", 233 | "key": { 234 | "userId": "123" 235 | } 236 | } 237 | ``` 238 | 239 | ### update_item 240 | Updates specific attributes of an item in a table. 241 | 242 | Parameters: 243 | - `tableName`: Name of the table 244 | - `key`: Primary key of the item to update 245 | - `updateExpression`: Update expression 246 | - `expressionAttributeNames`: Attribute name mappings 247 | - `expressionAttributeValues`: Values for the update expression 248 | - `conditionExpression`: (Optional) Condition for update 249 | - `returnValues`: (Optional) What values to return 250 | 251 | Example: 252 | ```json 253 | { 254 | "tableName": "Users", 255 | "key": { 256 | "userId": "123" 257 | }, 258 | "updateExpression": "SET #n = :name", 259 | "expressionAttributeNames": { 260 | "#n": "name" 261 | }, 262 | "expressionAttributeValues": { 263 | ":name": "Jane Doe" 264 | } 265 | } 266 | ``` 267 | 268 | ### query_table 269 | Queries a table using key conditions and optional filters. 270 | 271 | Parameters: 272 | - `tableName`: Name of the table 273 | - `keyConditionExpression`: Key condition expression 274 | - `expressionAttributeValues`: Values for the key condition expression 275 | - `expressionAttributeNames`: (Optional) Attribute name mappings 276 | - `filterExpression`: (Optional) Filter expression for results 277 | - `limit`: (Optional) Maximum number of items to return 278 | 279 | Example: 280 | ```json 281 | { 282 | "tableName": "Users", 283 | "keyConditionExpression": "userId = :id", 284 | "expressionAttributeValues": { 285 | ":id": "123" 286 | } 287 | } 288 | ``` 289 | 290 | ### scan_table 291 | Scans an entire table with optional filters. 292 | 293 | Parameters: 294 | - `tableName`: Name of the table 295 | - `filterExpression`: (Optional) Filter expression 296 | - `expressionAttributeValues`: (Optional) Values for the filter expression 297 | - `expressionAttributeNames`: (Optional) Attribute name mappings 298 | - `limit`: (Optional) Maximum number of items to return 299 | 300 | Example: 301 | ```json 302 | { 303 | "tableName": "Users", 304 | "filterExpression": "age > :minAge", 305 | "expressionAttributeValues": { 306 | ":minAge": 21 307 | } 308 | } 309 | ``` 310 | 311 | ## Sample Questions 312 | 313 | Here are some example questions you can ask Claude when using this DynamoDB MCP server: 314 | 315 | ### Table Management 316 | - "Create a new DynamoDB table called 'Products' with a partition key 'productId' (string) and sort key 'timestamp' (number)" 317 | - "List all DynamoDB tables in my account" 318 | - "What's the current configuration of the Users table?" 319 | - "Add a global secondary index on the email field of the Users table" 320 | 321 | ### Capacity Management 322 | - "Update the Users table capacity to 20 read units and 15 write units" 323 | - "Scale up the EmailIndex GSI capacity on the Users table" 324 | - "What's the current provisioned capacity for the Orders table?" 325 | 326 | ### Data Operations 327 | - "Insert a new user with ID '123', name 'John Doe', and email '[email protected]'" 328 | - "Get the user with ID '123'" 329 | - "Update the email address for user '123' to '[email protected]'" 330 | - "Find all orders placed by user '123'" 331 | - "List all users who are over 21 years old" 332 | - "Query the EmailIndex to find the user with email '[email protected]'" 333 | 334 | ## Configuration 335 | 336 | ### Setting up AWS Credentials 337 | 338 | 1. Obtain AWS access key ID, secret access key, and region from the AWS Management Console. 339 | 2. If using temporary credentials (e.g., IAM role), also obtain a session token. 340 | 3. Ensure these credentials have appropriate permissions for DynamoDB operations. 341 | 342 | ### Usage with Claude Desktop 343 | 344 | Add this to your `claude_desktop_config.json`: 345 | 346 | #### Docker (Recommended) 347 | 348 | ```json 349 | { 350 | "mcpServers": { 351 | "dynamodb": { 352 | "command": "docker", 353 | "args": [ "run", "-i", "--rm", "-e", "AWS_ACCESS_KEY_ID", "-e", "AWS_SECRET_ACCESS_KEY", "-e", "AWS_REGION", "-e", "AWS_SESSION_TOKEN", "mcp/dynamodb-mcp-server" ], 354 | "env": { 355 | "AWS_ACCESS_KEY_ID": "your_access_key", 356 | "AWS_SECRET_ACCESS_KEY": "your_secret_key", 357 | "AWS_REGION": "your_region", 358 | "AWS_SESSION_TOKEN": "your_session_token" 359 | } 360 | } 361 | } 362 | } 363 | ``` 364 | 365 | ## Building 366 | 367 | Docker: 368 | ```sh 369 | docker build -t mcp/dynamodb-mcp-server -f Dockerfile . 370 | ``` 371 | 372 | ## Development 373 | 374 | To run in development mode with auto-reloading: 375 | ```bash 376 | npm run dev 377 | ``` 378 | 379 | ## License 380 | 381 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 382 | ``` -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- ```json 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "esModuleInterop": true, 7 | "strict": true, 8 | "outDir": "dist", 9 | "skipLibCheck": true, 10 | "forceConsistentCasingInFileNames": true, 11 | "resolveJsonModule": true, 12 | "declaration": true, 13 | "sourceMap": true 14 | }, 15 | "include": ["src/**/*.ts"], 16 | "exclude": ["node_modules", "dist"] 17 | } 18 | ``` -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- ```dockerfile 1 | FROM node:22.12-alpine as builder 2 | 3 | COPY . /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] 25 | ``` -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- ```json 1 | { 2 | "name": "dynamodb-mcp-server", 3 | "version": "0.1.0", 4 | "description": "DynamoDB MCP server for managing DynamoDB resources", 5 | "author": "Iman Kamyabi <[email protected]>", 6 | "license": "MIT", 7 | "type": "module", 8 | "bin": { 9 | "mcp-server-dynamodb": "dist/index.js" 10 | }, 11 | "files": [ 12 | "dist" 13 | ], 14 | "scripts": { 15 | "build": "tsc && shx chmod +x dist/*.js", 16 | "prepare": "npm run build", 17 | "watch": "tsc --watch" 18 | }, 19 | "dependencies": { 20 | "@aws-sdk/client-dynamodb": "^3.723.0", 21 | "@aws-sdk/util-dynamodb": "^3.723.0", 22 | "@modelcontextprotocol/sdk": "0.5.0" 23 | }, 24 | "devDependencies": { 25 | "@types/node": "^22.10.5", 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } 30 | ``` -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- ```typescript 1 | #!/usr/bin/env node 2 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 4 | import { 5 | CallToolRequestSchema, 6 | ListToolsRequestSchema, 7 | Tool, 8 | } from "@modelcontextprotocol/sdk/types.js"; 9 | import { 10 | DynamoDBClient, 11 | CreateTableCommand, 12 | ListTablesCommand, 13 | DescribeTableCommand, 14 | UpdateTableCommand, 15 | PutItemCommand, 16 | GetItemCommand, 17 | UpdateItemCommand, 18 | QueryCommand, 19 | ScanCommand, 20 | } from "@aws-sdk/client-dynamodb"; 21 | import { marshall, unmarshall } from "@aws-sdk/util-dynamodb"; 22 | 23 | // AWS client initialization 24 | const credentials: { 25 | accessKeyId: string; 26 | secretAccessKey: string; 27 | sessionToken?: string; 28 | } = { 29 | accessKeyId: process.env.AWS_ACCESS_KEY_ID!, 30 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!, 31 | }; 32 | 33 | if (process.env.AWS_SESSION_TOKEN) { 34 | credentials.sessionToken = process.env.AWS_SESSION_TOKEN; 35 | } 36 | 37 | const dynamoClient = new DynamoDBClient({ 38 | region: process.env.AWS_REGION, 39 | credentials, 40 | }); 41 | 42 | // Define tools 43 | const CREATE_TABLE_TOOL: Tool = { 44 | name: "create_table", 45 | description: "Creates a new DynamoDB table with specified configuration", 46 | inputSchema: { 47 | type: "object", 48 | properties: { 49 | tableName: { type: "string", description: "Name of the table to create" }, 50 | partitionKey: { type: "string", description: "Name of the partition key" }, 51 | partitionKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of partition key (S=String, N=Number, B=Binary)" }, 52 | sortKey: { type: "string", description: "Name of the sort key (optional)" }, 53 | sortKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of sort key (optional)" }, 54 | readCapacity: { type: "number", description: "Provisioned read capacity units" }, 55 | writeCapacity: { type: "number", description: "Provisioned write capacity units" }, 56 | }, 57 | required: ["tableName", "partitionKey", "partitionKeyType", "readCapacity", "writeCapacity"], 58 | }, 59 | }; 60 | 61 | const LIST_TABLES_TOOL: Tool = { 62 | name: "list_tables", 63 | description: "Lists all DynamoDB tables in the account", 64 | inputSchema: { 65 | type: "object", 66 | properties: { 67 | limit: { type: "number", description: "Maximum number of tables to return (optional)" }, 68 | exclusiveStartTableName: { type: "string", description: "Name of the table to start from for pagination (optional)" }, 69 | }, 70 | }, 71 | }; 72 | 73 | const CREATE_GSI_TOOL: Tool = { 74 | name: "create_gsi", 75 | description: "Creates a global secondary index on a table", 76 | inputSchema: { 77 | type: "object", 78 | properties: { 79 | tableName: { type: "string", description: "Name of the table" }, 80 | indexName: { type: "string", description: "Name of the new index" }, 81 | partitionKey: { type: "string", description: "Partition key for the index" }, 82 | partitionKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of partition key" }, 83 | sortKey: { type: "string", description: "Sort key for the index (optional)" }, 84 | sortKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of sort key (optional)" }, 85 | projectionType: { type: "string", enum: ["ALL", "KEYS_ONLY", "INCLUDE"], description: "Type of projection" }, 86 | nonKeyAttributes: { type: "array", items: { type: "string" }, description: "Non-key attributes to project (optional)" }, 87 | readCapacity: { type: "number", description: "Provisioned read capacity units" }, 88 | writeCapacity: { type: "number", description: "Provisioned write capacity units" }, 89 | }, 90 | required: ["tableName", "indexName", "partitionKey", "partitionKeyType", "projectionType", "readCapacity", "writeCapacity"], 91 | }, 92 | }; 93 | 94 | const UPDATE_GSI_TOOL: Tool = { 95 | name: "update_gsi", 96 | description: "Updates the provisioned capacity of a global secondary index", 97 | inputSchema: { 98 | type: "object", 99 | properties: { 100 | tableName: { type: "string", description: "Name of the table" }, 101 | indexName: { type: "string", description: "Name of the index to update" }, 102 | readCapacity: { type: "number", description: "New read capacity units" }, 103 | writeCapacity: { type: "number", description: "New write capacity units" }, 104 | }, 105 | required: ["tableName", "indexName", "readCapacity", "writeCapacity"], 106 | }, 107 | }; 108 | 109 | const CREATE_LSI_TOOL: Tool = { 110 | name: "create_lsi", 111 | description: "Creates a local secondary index on a table (must be done during table creation)", 112 | inputSchema: { 113 | type: "object", 114 | properties: { 115 | tableName: { type: "string", description: "Name of the table" }, 116 | indexName: { type: "string", description: "Name of the new index" }, 117 | partitionKey: { type: "string", description: "Partition key for the table" }, 118 | partitionKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of partition key" }, 119 | sortKey: { type: "string", description: "Sort key for the index" }, 120 | sortKeyType: { type: "string", enum: ["S", "N", "B"], description: "Type of sort key" }, 121 | projectionType: { type: "string", enum: ["ALL", "KEYS_ONLY", "INCLUDE"], description: "Type of projection" }, 122 | nonKeyAttributes: { type: "array", items: { type: "string" }, description: "Non-key attributes to project (optional)" }, 123 | readCapacity: { type: "number", description: "Provisioned read capacity units (optional, default: 5)" }, 124 | writeCapacity: { type: "number", description: "Provisioned write capacity units (optional, default: 5)" }, 125 | }, 126 | required: ["tableName", "indexName", "partitionKey", "partitionKeyType", "sortKey", "sortKeyType", "projectionType"], 127 | }, 128 | }; 129 | 130 | const UPDATE_ITEM_TOOL: Tool = { 131 | name: "update_item", 132 | description: "Updates specific attributes of an item in a table", 133 | inputSchema: { 134 | type: "object", 135 | properties: { 136 | tableName: { type: "string", description: "Name of the table" }, 137 | key: { type: "object", description: "Primary key of the item to update" }, 138 | updateExpression: { type: "string", description: "Update expression (e.g., 'SET #n = :name')" }, 139 | expressionAttributeNames: { type: "object", description: "Attribute name mappings" }, 140 | expressionAttributeValues: { type: "object", description: "Values for the update expression" }, 141 | conditionExpression: { type: "string", description: "Condition for update (optional)" }, 142 | returnValues: { type: "string", enum: ["NONE", "ALL_OLD", "UPDATED_OLD", "ALL_NEW", "UPDATED_NEW"], description: "What values to return" }, 143 | }, 144 | required: ["tableName", "key", "updateExpression", "expressionAttributeNames", "expressionAttributeValues"], 145 | }, 146 | }; 147 | 148 | const UPDATE_CAPACITY_TOOL: Tool = { 149 | name: "update_capacity", 150 | description: "Updates the provisioned capacity of a table", 151 | inputSchema: { 152 | type: "object", 153 | properties: { 154 | tableName: { type: "string", description: "Name of the table" }, 155 | readCapacity: { type: "number", description: "New read capacity units" }, 156 | writeCapacity: { type: "number", description: "New write capacity units" }, 157 | }, 158 | required: ["tableName", "readCapacity", "writeCapacity"], 159 | }, 160 | }; 161 | 162 | const PUT_ITEM_TOOL: Tool = { 163 | name: "put_item", 164 | description: "Inserts or replaces an item in a table", 165 | inputSchema: { 166 | type: "object", 167 | properties: { 168 | tableName: { type: "string", description: "Name of the table" }, 169 | item: { type: "object", description: "Item to put into the table" }, 170 | }, 171 | required: ["tableName", "item"], 172 | }, 173 | }; 174 | 175 | const GET_ITEM_TOOL: Tool = { 176 | name: "get_item", 177 | description: "Retrieves an item from a table by its primary key", 178 | inputSchema: { 179 | type: "object", 180 | properties: { 181 | tableName: { type: "string", description: "Name of the table" }, 182 | key: { type: "object", description: "Primary key of the item to retrieve" }, 183 | }, 184 | required: ["tableName", "key"], 185 | }, 186 | }; 187 | 188 | const QUERY_TABLE_TOOL: Tool = { 189 | name: "query_table", 190 | description: "Queries a table using key conditions and optional filters", 191 | inputSchema: { 192 | type: "object", 193 | properties: { 194 | tableName: { type: "string", description: "Name of the table" }, 195 | keyConditionExpression: { type: "string", description: "Key condition expression" }, 196 | expressionAttributeValues: { type: "object", description: "Values for the key condition expression" }, 197 | expressionAttributeNames: { type: "object", description: "Attribute name mappings", optional: true }, 198 | filterExpression: { type: "string", description: "Filter expression for results", optional: true }, 199 | limit: { type: "number", description: "Maximum number of items to return", optional: true }, 200 | }, 201 | required: ["tableName", "keyConditionExpression", "expressionAttributeValues"], 202 | }, 203 | }; 204 | 205 | const SCAN_TABLE_TOOL: Tool = { 206 | name: "scan_table", 207 | description: "Scans an entire table with optional filters", 208 | inputSchema: { 209 | type: "object", 210 | properties: { 211 | tableName: { type: "string", description: "Name of the table" }, 212 | filterExpression: { type: "string", description: "Filter expression", optional: true }, 213 | expressionAttributeValues: { type: "object", description: "Values for the filter expression", optional: true }, 214 | expressionAttributeNames: { type: "object", description: "Attribute name mappings", optional: true }, 215 | limit: { type: "number", description: "Maximum number of items to return", optional: true }, 216 | }, 217 | required: ["tableName"], 218 | }, 219 | }; 220 | 221 | const DESCRIBE_TABLE_TOOL: Tool = { 222 | name: "describe_table", 223 | description: "Gets detailed information about a DynamoDB table", 224 | inputSchema: { 225 | type: "object", 226 | properties: { 227 | tableName: { type: "string", description: "Name of the table to describe" }, 228 | }, 229 | required: ["tableName"], 230 | }, 231 | }; 232 | 233 | // Implementation functions 234 | async function createTable(params: any) { 235 | try { 236 | const command = new CreateTableCommand({ 237 | TableName: params.tableName, 238 | AttributeDefinitions: [ 239 | { AttributeName: params.partitionKey, AttributeType: params.partitionKeyType }, 240 | ...(params.sortKey ? [{ AttributeName: params.sortKey, AttributeType: params.sortKeyType }] : []), 241 | ], 242 | KeySchema: [ 243 | { AttributeName: params.partitionKey, KeyType: "HASH" as const }, 244 | ...(params.sortKey ? [{ AttributeName: params.sortKey, KeyType: "RANGE" as const }] : []), 245 | ], 246 | ProvisionedThroughput: { 247 | ReadCapacityUnits: params.readCapacity, 248 | WriteCapacityUnits: params.writeCapacity, 249 | }, 250 | }); 251 | 252 | const response = await dynamoClient.send(command); 253 | return { 254 | success: true, 255 | message: `Table ${params.tableName} created successfully`, 256 | details: response.TableDescription, 257 | }; 258 | } catch (error) { 259 | console.error("Error creating table:", error); 260 | return { 261 | success: false, 262 | message: `Failed to create table: ${error}`, 263 | }; 264 | } 265 | } 266 | 267 | async function listTables(params: any) { 268 | try { 269 | const command = new ListTablesCommand({ 270 | Limit: params.limit, 271 | ExclusiveStartTableName: params.exclusiveStartTableName, 272 | }); 273 | 274 | const response = await dynamoClient.send(command); 275 | return { 276 | success: true, 277 | message: "Tables listed successfully", 278 | tables: response.TableNames, 279 | lastEvaluatedTable: response.LastEvaluatedTableName, 280 | }; 281 | } catch (error) { 282 | console.error("Error listing tables:", error); 283 | return { 284 | success: false, 285 | message: `Failed to list tables: ${error}`, 286 | }; 287 | } 288 | } 289 | 290 | async function createGSI(params: any) { 291 | try { 292 | const command = new UpdateTableCommand({ 293 | TableName: params.tableName, 294 | AttributeDefinitions: [ 295 | { AttributeName: params.partitionKey, AttributeType: params.partitionKeyType }, 296 | ...(params.sortKey ? [{ AttributeName: params.sortKey, AttributeType: params.sortKeyType }] : []), 297 | ], 298 | GlobalSecondaryIndexUpdates: [ 299 | { 300 | Create: { 301 | IndexName: params.indexName, 302 | KeySchema: [ 303 | { AttributeName: params.partitionKey, KeyType: "HASH" as const }, 304 | ...(params.sortKey ? [{ AttributeName: params.sortKey, KeyType: "RANGE" as const }] : []), 305 | ], 306 | Projection: { 307 | ProjectionType: params.projectionType, 308 | ...(params.projectionType === "INCLUDE" ? { NonKeyAttributes: params.nonKeyAttributes } : {}), 309 | }, 310 | ProvisionedThroughput: { 311 | ReadCapacityUnits: params.readCapacity, 312 | WriteCapacityUnits: params.writeCapacity, 313 | }, 314 | }, 315 | }, 316 | ], 317 | }); 318 | 319 | const response = await dynamoClient.send(command); 320 | return { 321 | success: true, 322 | message: `GSI ${params.indexName} creation initiated on table ${params.tableName}`, 323 | details: response.TableDescription, 324 | }; 325 | } catch (error) { 326 | console.error("Error creating GSI:", error); 327 | return { 328 | success: false, 329 | message: `Failed to create GSI: ${error}`, 330 | }; 331 | } 332 | } 333 | 334 | async function updateGSI(params: any) { 335 | try { 336 | const command = new UpdateTableCommand({ 337 | TableName: params.tableName, 338 | GlobalSecondaryIndexUpdates: [ 339 | { 340 | Update: { 341 | IndexName: params.indexName, 342 | ProvisionedThroughput: { 343 | ReadCapacityUnits: params.readCapacity, 344 | WriteCapacityUnits: params.writeCapacity, 345 | }, 346 | }, 347 | }, 348 | ], 349 | }); 350 | 351 | const response = await dynamoClient.send(command); 352 | return { 353 | success: true, 354 | message: `GSI ${params.indexName} capacity updated on table ${params.tableName}`, 355 | details: response.TableDescription, 356 | }; 357 | } catch (error) { 358 | console.error("Error updating GSI:", error); 359 | return { 360 | success: false, 361 | message: `Failed to update GSI: ${error}`, 362 | }; 363 | } 364 | } 365 | 366 | async function createLSI(params: any) { 367 | try { 368 | // Note: LSIs must be created during table creation, so we need the table's primary key info 369 | const command = new CreateTableCommand({ 370 | TableName: params.tableName, 371 | AttributeDefinitions: [ 372 | { AttributeName: params.partitionKey, AttributeType: params.partitionKeyType }, 373 | { AttributeName: params.sortKey, AttributeType: params.sortKeyType }, 374 | ], 375 | KeySchema: [ 376 | { AttributeName: params.partitionKey, KeyType: "HASH" as const }, 377 | ], 378 | LocalSecondaryIndexes: [ 379 | { 380 | IndexName: params.indexName, 381 | KeySchema: [ 382 | { AttributeName: params.partitionKey, KeyType: "HASH" as const }, 383 | { AttributeName: params.sortKey, KeyType: "RANGE" as const }, 384 | ], 385 | Projection: { 386 | ProjectionType: params.projectionType, 387 | ...(params.projectionType === "INCLUDE" ? { NonKeyAttributes: params.nonKeyAttributes } : {}), 388 | }, 389 | }, 390 | ], 391 | ProvisionedThroughput: { 392 | ReadCapacityUnits: params.readCapacity || 5, 393 | WriteCapacityUnits: params.writeCapacity || 5, 394 | }, 395 | }); 396 | 397 | const response = await dynamoClient.send(command); 398 | return { 399 | success: true, 400 | message: `LSI ${params.indexName} created on table ${params.tableName}`, 401 | details: response.TableDescription, 402 | }; 403 | } catch (error) { 404 | console.error("Error creating LSI:", error); 405 | return { 406 | success: false, 407 | message: `Failed to create LSI: ${error}`, 408 | }; 409 | } 410 | } 411 | 412 | async function updateItem(params: any) { 413 | try { 414 | const command = new UpdateItemCommand({ 415 | TableName: params.tableName, 416 | Key: marshall(params.key), 417 | UpdateExpression: params.updateExpression, 418 | ExpressionAttributeNames: params.expressionAttributeNames, 419 | ExpressionAttributeValues: marshall(params.expressionAttributeValues), 420 | ConditionExpression: params.conditionExpression, 421 | ReturnValues: params.returnValues || "NONE", 422 | }); 423 | 424 | const response = await dynamoClient.send(command); 425 | return { 426 | success: true, 427 | message: `Item updated successfully in table ${params.tableName}`, 428 | attributes: response.Attributes ? unmarshall(response.Attributes) : null, 429 | }; 430 | } catch (error) { 431 | console.error("Error updating item:", error); 432 | return { 433 | success: false, 434 | message: `Failed to update item: ${error}`, 435 | }; 436 | } 437 | } 438 | 439 | async function updateCapacity(params: any) { 440 | try { 441 | const command = new UpdateTableCommand({ 442 | TableName: params.tableName, 443 | ProvisionedThroughput: { 444 | ReadCapacityUnits: params.readCapacity, 445 | WriteCapacityUnits: params.writeCapacity, 446 | }, 447 | }); 448 | 449 | const response = await dynamoClient.send(command); 450 | return { 451 | success: true, 452 | message: `Capacity updated successfully for table ${params.tableName}`, 453 | details: response.TableDescription, 454 | }; 455 | } catch (error) { 456 | console.error("Error updating capacity:", error); 457 | return { 458 | success: false, 459 | message: `Failed to update capacity: ${error}`, 460 | }; 461 | } 462 | } 463 | 464 | async function putItem(params: any) { 465 | try { 466 | const command = new PutItemCommand({ 467 | TableName: params.tableName, 468 | Item: marshall(params.item), 469 | }); 470 | 471 | await dynamoClient.send(command); 472 | return { 473 | success: true, 474 | message: `Item added successfully to table ${params.tableName}`, 475 | item: params.item, 476 | }; 477 | } catch (error) { 478 | console.error("Error putting item:", error); 479 | return { 480 | success: false, 481 | message: `Failed to put item: ${error}`, 482 | }; 483 | } 484 | } 485 | 486 | async function getItem(params: any) { 487 | try { 488 | const command = new GetItemCommand({ 489 | TableName: params.tableName, 490 | Key: marshall(params.key), 491 | }); 492 | 493 | const response = await dynamoClient.send(command); 494 | return { 495 | success: true, 496 | message: `Item retrieved successfully from table ${params.tableName}`, 497 | item: response.Item ? unmarshall(response.Item) : null, 498 | }; 499 | } catch (error) { 500 | console.error("Error getting item:", error); 501 | return { 502 | success: false, 503 | message: `Failed to get item: ${error}`, 504 | }; 505 | } 506 | } 507 | 508 | async function queryTable(params: any) { 509 | try { 510 | const command = new QueryCommand({ 511 | TableName: params.tableName, 512 | KeyConditionExpression: params.keyConditionExpression, 513 | ExpressionAttributeValues: marshall(params.expressionAttributeValues), 514 | ExpressionAttributeNames: params.expressionAttributeNames, 515 | FilterExpression: params.filterExpression, 516 | Limit: params.limit, 517 | }); 518 | 519 | const response = await dynamoClient.send(command); 520 | return { 521 | success: true, 522 | message: `Query executed successfully on table ${params.tableName}`, 523 | items: response.Items ? response.Items.map(item => unmarshall(item)) : [], 524 | count: response.Count, 525 | scannedCount: response.ScannedCount, 526 | }; 527 | } catch (error) { 528 | console.error("Error querying table:", error); 529 | return { 530 | success: false, 531 | message: `Failed to query table: ${error}`, 532 | }; 533 | } 534 | } 535 | 536 | async function scanTable(params: any) { 537 | try { 538 | const command = new ScanCommand({ 539 | TableName: params.tableName, 540 | FilterExpression: params.filterExpression, 541 | ExpressionAttributeValues: params.expressionAttributeValues ? marshall(params.expressionAttributeValues) : undefined, 542 | ExpressionAttributeNames: params.expressionAttributeNames, 543 | Limit: params.limit, 544 | }); 545 | 546 | const response = await dynamoClient.send(command); 547 | return { 548 | success: true, 549 | message: `Scan executed successfully on table ${params.tableName}`, 550 | items: response.Items ? response.Items.map(item => unmarshall(item)) : [], 551 | count: response.Count, 552 | scannedCount: response.ScannedCount, 553 | }; 554 | } catch (error) { 555 | console.error("Error scanning table:", error); 556 | return { 557 | success: false, 558 | message: `Failed to scan table: ${error}`, 559 | }; 560 | } 561 | } 562 | 563 | async function describeTable(params: any) { 564 | try { 565 | const command = new DescribeTableCommand({ 566 | TableName: params.tableName, 567 | }); 568 | 569 | const response = await dynamoClient.send(command); 570 | return { 571 | success: true, 572 | message: `Table ${params.tableName} described successfully`, 573 | table: response.Table, 574 | }; 575 | } catch (error) { 576 | console.error("Error describing table:", error); 577 | return { 578 | success: false, 579 | message: `Failed to describe table: ${error}`, 580 | }; 581 | } 582 | } 583 | 584 | // Server setup 585 | const server = new Server( 586 | { 587 | name: "dynamodb-mcp-server", 588 | version: "0.1.0", 589 | }, 590 | { 591 | capabilities: { 592 | tools: {}, 593 | }, 594 | }, 595 | ); 596 | 597 | // Request handlers 598 | server.setRequestHandler(ListToolsRequestSchema, async () => ({ 599 | tools: [CREATE_TABLE_TOOL, UPDATE_CAPACITY_TOOL, PUT_ITEM_TOOL, GET_ITEM_TOOL, QUERY_TABLE_TOOL, SCAN_TABLE_TOOL, DESCRIBE_TABLE_TOOL, LIST_TABLES_TOOL, CREATE_GSI_TOOL, UPDATE_GSI_TOOL, CREATE_LSI_TOOL, UPDATE_ITEM_TOOL], 600 | })); 601 | 602 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 603 | const { name, arguments: args } = request.params; 604 | 605 | try { 606 | let result; 607 | switch (name) { 608 | case "create_table": 609 | result = await createTable(args); 610 | break; 611 | case "list_tables": 612 | result = await listTables(args); 613 | break; 614 | case "create_gsi": 615 | result = await createGSI(args); 616 | break; 617 | case "update_gsi": 618 | result = await updateGSI(args); 619 | break; 620 | case "create_lsi": 621 | result = await createLSI(args); 622 | break; 623 | case "update_item": 624 | result = await updateItem(args); 625 | break; 626 | case "update_capacity": 627 | result = await updateCapacity(args); 628 | break; 629 | case "put_item": 630 | result = await putItem(args); 631 | break; 632 | case "get_item": 633 | result = await getItem(args); 634 | break; 635 | case "query_table": 636 | result = await queryTable(args); 637 | break; 638 | case "scan_table": 639 | result = await scanTable(args); 640 | break; 641 | case "describe_table": 642 | result = await describeTable(args); 643 | break; 644 | default: 645 | return { 646 | content: [{ type: "text", text: `Unknown tool: ${name}` }], 647 | isError: true, 648 | }; 649 | } 650 | 651 | return { 652 | content: [{ type: "text", text: JSON.stringify(result, null, 2) }], 653 | }; 654 | } catch (error) { 655 | return { 656 | content: [{ type: "text", text: `Error occurred: ${error}` }], 657 | isError: true, 658 | }; 659 | } 660 | }); 661 | 662 | // Server startup 663 | async function runServer() { 664 | const transport = new StdioServerTransport(); 665 | await server.connect(transport); 666 | console.error("DynamoDB Server running on stdio"); 667 | } 668 | 669 | runServer().catch((error) => { 670 | console.error("Fatal error running server:", error); 671 | process.exit(1); 672 | }); 673 | ```