This is page 2 of 3. Use http://codebase.md/neondatabase-labs/mcp-server-neon?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .bun-version
├── .dockerignore
├── .env.example
├── .github
│ └── workflows
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── koyeb-preview.yml
│ ├── koyeb-prod.yml
│ └── pr.yml
├── .gitignore
├── .npmrc
├── .nvmrc
├── .prettierignore
├── .prettierrc
├── AGENTS.md
├── bun.lock
├── CHANGELOG.md
├── CLAUDE.md
├── Dockerfile
├── eslint.config.js
├── landing
│ ├── .gitignore
│ ├── app
│ │ ├── globals.css
│ │ ├── layout.tsx
│ │ └── page.tsx
│ ├── components
│ │ ├── CodeSnippet.tsx
│ │ ├── CopyableUrl.tsx
│ │ ├── DescriptionItem.tsx
│ │ ├── ExternalIcon.tsx
│ │ ├── ExternalLink.tsx
│ │ ├── Header.tsx
│ │ ├── Introduction.tsx
│ │ ├── ThemeProvider.tsx
│ │ └── ui
│ │ ├── accordion.tsx
│ │ ├── alert.tsx
│ │ └── button.tsx
│ ├── components.json
│ ├── eslint.config.mjs
│ ├── icons
│ │ ├── github.svg
│ │ └── neon.svg
│ ├── lib
│ │ ├── description.ts
│ │ └── utils.ts
│ ├── next.config.ts
│ ├── package-lock.json
│ ├── package.json
│ ├── postcss.config.mjs
│ ├── public
│ │ └── favicon.ico
│ ├── README.md
│ └── tsconfig.json
├── LICENSE
├── mcp-client
│ ├── .env.example
│ ├── package-lock.json
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ ├── bin.ts
│ │ ├── cli-client.ts
│ │ ├── index.ts
│ │ ├── logger.ts
│ │ └── neon-cli-client.ts
│ └── tsconfig.json
├── package-lock.json
├── package.json
├── public
│ └── logo.png
├── PUBLISH.md
├── README.md
├── remote.Dockerfile
├── scripts
│ └── before-publish.ts
├── src
│ ├── analytics
│ │ └── analytics.ts
│ ├── constants.ts
│ ├── describeUtils.ts
│ ├── index.ts
│ ├── initConfig.ts
│ ├── oauth
│ │ ├── client.ts
│ │ ├── cookies.ts
│ │ ├── kv-store.ts
│ │ ├── model.ts
│ │ ├── server.ts
│ │ └── utils.ts
│ ├── prompts.ts
│ ├── resources.ts
│ ├── sentry
│ │ ├── instrument.ts
│ │ └── utils.ts
│ ├── server
│ │ ├── api.ts
│ │ ├── errors.ts
│ │ └── index.ts
│ ├── tools
│ │ ├── definitions.ts
│ │ ├── handlers
│ │ │ ├── connection-string.ts
│ │ │ ├── decribe-project.ts
│ │ │ ├── describe-branch.ts
│ │ │ ├── fetch.ts
│ │ │ ├── list-orgs.ts
│ │ │ ├── list-projects.ts
│ │ │ ├── neon-auth.ts
│ │ │ ├── search.ts
│ │ │ ├── urls.ts
│ │ │ └── utils.ts
│ │ ├── index.ts
│ │ ├── state.ts
│ │ ├── tools.ts
│ │ ├── toolsSchema.ts
│ │ ├── types.ts
│ │ └── utils.ts
│ ├── tools-evaluations
│ │ ├── evalUtils.ts
│ │ └── prepare-database-migration.eval.ts
│ ├── transports
│ │ ├── sse-express.ts
│ │ ├── stdio.ts
│ │ └── stream.ts
│ ├── types
│ │ ├── auth.ts
│ │ ├── context.ts
│ │ ├── express.d.ts
│ │ └── helpers.ts
│ ├── utils
│ │ ├── client-application.ts
│ │ ├── logger.ts
│ │ └── polyfills.ts
│ └── views
│ ├── approval-dialog.pug
│ └── styles.css
├── tsconfig.json
└── tsconfig.test.json
```
# Files
--------------------------------------------------------------------------------
/src/transports/sse-express.ts:
--------------------------------------------------------------------------------
```typescript
1 | import '../sentry/instrument.js';
2 | import { setupExpressErrorHandler } from '@sentry/node';
3 | import express, { Request, Response, RequestHandler } from 'express';
4 | import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js';
5 | import { createMcpServer } from '../server/index.js';
6 | import { logger, morganConfig, errorHandler } from '../utils/logger.js';
7 | import { authRouter } from '../oauth/server.js';
8 | import { SERVER_PORT, SERVER_HOST } from '../constants.js';
9 | import { ensureCorsHeaders, requiresAuth } from '../oauth/utils.js';
10 | import bodyParser from 'body-parser';
11 | import cookieParser from 'cookie-parser';
12 | import { track } from '../analytics/analytics.js';
13 | import { AppContext } from '../types/context.js';
14 | import { createStreamTransport } from './stream.js';
15 |
16 | export const createSseTransport = (appContext: AppContext) => {
17 | const app = express();
18 |
19 | app.use(morganConfig);
20 | app.use(errorHandler);
21 | app.use(cookieParser());
22 | app.use(ensureCorsHeaders());
23 | app.use(express.static('public'));
24 | app.set('view engine', 'pug');
25 | app.set('views', 'src/views');
26 | const streamHandler = createStreamTransport({
27 | ...appContext,
28 | transport: 'stream',
29 | });
30 | app.use('/mcp', streamHandler);
31 | app.use('/', authRouter);
32 |
33 | // to support multiple simultaneous connections we have a lookup object from
34 | // sessionId to transport
35 | const transports = new Map<string, SSEServerTransport>();
36 |
37 | app.get(
38 | '/sse',
39 | bodyParser.raw(),
40 | requiresAuth(),
41 | async (req: Request, res: Response) => {
42 | const auth = req.auth;
43 | if (!auth) {
44 | res.status(401).send('Unauthorized');
45 | return;
46 | }
47 | const transport = new SSEServerTransport('/messages', res);
48 | transports.set(transport.sessionId, transport);
49 | logger.info('new sse connection', {
50 | sessionId: transport.sessionId,
51 | });
52 |
53 | res.on('close', () => {
54 | logger.info('SSE connection closed', {
55 | sessionId: transport.sessionId,
56 | });
57 | transports.delete(transport.sessionId);
58 | });
59 |
60 | try {
61 | const server = createMcpServer({
62 | apiKey: auth.token,
63 | client: auth.extra.client,
64 | account: auth.extra.account,
65 | app: appContext,
66 | readOnly: auth.extra.readOnly,
67 | userAgent: req.headers['user-agent'],
68 | });
69 | await server.connect(transport);
70 | } catch (error: unknown) {
71 | logger.error('Failed to connect to MCP server:', {
72 | message: error instanceof Error ? error.message : 'Unknown error',
73 | error,
74 | });
75 | track({
76 | userId: auth.extra.account.id,
77 | event: 'sse_connection_errored',
78 | properties: { error },
79 | context: {
80 | app: appContext,
81 | client: auth.extra.client,
82 | },
83 | });
84 | }
85 | },
86 | );
87 |
88 | app.post('/messages', bodyParser.raw(), requiresAuth(), (async (
89 | request: Request,
90 | response: Response,
91 | ) => {
92 | const auth = request.auth;
93 | if (!auth) {
94 | response.status(401).send('Unauthorized');
95 | return;
96 | }
97 | const sessionId = request.query.sessionId as string;
98 | const transport = transports.get(sessionId);
99 | logger.info('transport message received', {
100 | sessionId,
101 | hasTransport: Boolean(transport),
102 | });
103 |
104 | try {
105 | if (transport) {
106 | await transport.handlePostMessage(request, response);
107 | } else {
108 | logger.warn('No transport found for sessionId', { sessionId });
109 | response.status(400).send('No transport found for sessionId');
110 | }
111 | } catch (error: unknown) {
112 | logger.error('Failed to handle post message:', {
113 | message: error instanceof Error ? error.message : 'Unknown error',
114 | error,
115 | });
116 | track({
117 | userId: auth.extra.account.id,
118 | event: 'transport_message_errored',
119 | properties: { error },
120 | context: { app: appContext, client: auth.extra.client },
121 | });
122 | }
123 | }) as RequestHandler);
124 |
125 | setupExpressErrorHandler(app);
126 |
127 | try {
128 | app.listen({ port: SERVER_PORT });
129 | logger.info(`Server started on ${SERVER_HOST}`);
130 | } catch (err: unknown) {
131 | logger.error('Failed to start server:', {
132 | error: err instanceof Error ? err.message : 'Unknown error',
133 | });
134 | process.exit(1);
135 | }
136 | };
137 |
```
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
```markdown
1 | # Changelog
2 |
3 | # [Unreleased]
4 |
5 | - Feat: `compare_database_schema` tool to generate schema diff for a specific branch. With prompt to generate migration script
6 | - Feat: `neon-get-started` resource to add AI rules with steps and instructions to integrate projects with Neon
7 | - Feat: Add generic `search` and `fetch` tools for organizations, projects, and branches
8 |
9 | # [0.6.5] 2025-09-16
10 |
11 | - Feat: `list_shared_projects` tool to fetch projects that user has permissions to collaborate on
12 | - Feat: `reset_from_parent` tool to reset a branch from its parent's current state
13 | - Feat: `compare_database_schema` tool to compare the schema from the child branch and its parent
14 | - docs: add copyable server link on langing page
15 |
16 | # [0.6.4] 2025-08-22
17 |
18 | - Fix: Do not log user sensitive information on errors
19 | - Fix: Return non-500 errors as valid response with `isError=true` without logging
20 | - Improvement: Custom error handling user generated erorrs
21 | - Improvement: Extend org-only users search to support orgs not managed by console.
22 |
23 | # [0.6.3] 2025-08-04
24 |
25 | - Feat: A new tool to list authenitcated user's organizations - `list_organizations`
26 | - Docs: Switch configs to use streamable HTTP by default
27 | - Impr: While searching for project in `list_projects` tool, extend the search to all organizations.
28 |
29 | ## [0.6.2] 2025-07-17
30 |
31 | - Add warnings on security risks involved in MCP tools in production environments
32 | - Migrate the deployment to Koyeb
33 | - Mark `param` as required argument for all tools
34 |
35 | ## [0.6.1] 2025-06-19
36 |
37 | - Documentation: Updated README with new tools and features
38 | - Support API key authentication for remote server
39 |
40 | ## [0.6.0] 2025-06-16
41 |
42 | - Fix: Issue with ORG API keys in local mode
43 | - Refc: Tools into smaller manageable modules
44 | - Feat: New landing page with details of supported tools
45 | - Feat: Streamable HTTP support
46 |
47 | ## [0.5.0] 2025-05-28
48 |
49 | - Tracking tool calls and errors with Segment
50 | - Capture exections with Sentry
51 | - Add tracing with sentry
52 | - Support new org-only accounts
53 |
54 | ## [0.4.1] - 2025-05-08
55 |
56 | - fix the `npx start` command to start server in stdio transport mode
57 | - fix issue with unexpected tokens in stdio transport mode
58 |
59 | ## [0.4.0] - 2025-05-08
60 |
61 | - Feature: Support for remote MCP with OAuth flow.
62 | - Remove `__node_version` tool
63 | - Feature: Add `list_slow_queries` tool for monitoring database performance
64 | - Add `list_branch_computes` tool to list compute endpoints for a project or specific branch
65 |
66 | ## [0.3.7] - 2025-04-23
67 |
68 | - Fixes Neon Auth instructions to install latest version of the SDK
69 |
70 | ## [0.3.6] - 2025-04-20
71 |
72 | - Bumps the Neon serverless driver to 1.0.0
73 |
74 | ## [0.3.5] - 2025-04-19
75 |
76 | - Fix default database name or role name assumptions.
77 | - Adds better error message for project creations.
78 |
79 | ## [0.3.4] - 2025-03-26
80 |
81 | - Add `neon-auth`, `neon-serverless`, and `neon-drizzle` resources
82 | - Fix initialization on Windows by implementing correct platform-specific paths for Claude configuration
83 |
84 | ## [0.3.3] - 2025-03-19
85 |
86 | - Fix the API Host
87 |
88 | ## [0.3.2] - 2025-03-19
89 |
90 | - Add User-Agent to api calls from mcp server
91 |
92 | ## [0.3.1] - 2025-03-19
93 |
94 | - Add User-Agent to api calls from mcp server
95 |
96 | ## [0.3.0] - 2025-03-14
97 |
98 | - Add `provision_neon_auth` tool
99 |
100 | ## [0.2.3] - 2025-03-06
101 |
102 | - Adds `get_connection_string` tool
103 | - Hints the LLM to call the `create_project` tool to create new databases
104 |
105 | ## [0.2.2] - 2025-02-26
106 |
107 | - Fixed a bug in the `list_projects` tool when passing no params
108 | - Added a `params` property to all the tools input schemas
109 |
110 | ## [0.2.1] - 2025-02-25
111 |
112 | - Fixes a bug in the `list_projects` tool
113 | - Update the `@modelcontextprotocol/sdk` to the latest version
114 | - Use `zod` to validate tool input schemas
115 |
116 | ## [0.2.0] - 2025-02-24
117 |
118 | - Add [Smithery](https://smithery.ai/server/neon) deployment config
119 |
120 | ## [0.1.9] - 2025-01-06
121 |
122 | - Setups tests to the `prepare_database_migration` tool
123 | - Updates the `prepare_database_migration` tool to be more deterministic
124 | - Removes logging from the MCP server, following the [docs](https://modelcontextprotocol.io/docs/tools/debugging#implementing-logging)
125 |
126 | ## [0.1.8] - 2024-12-25
127 |
128 | - Added `beforePublish` script so make sure the changelog is updated before publishing
129 | - Makes the descriptions/prompts for the prepare_database_migration and complete_database_migration tools much better
130 |
131 | ## [0.1.7-beta.1] - 2024-12-19
132 |
133 | - Added support for `prepare_database_migration` and `complete_database_migration` tools
134 |
```
--------------------------------------------------------------------------------
/src/describeUtils.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * This module is derived from @neondatabase/psql-describe
3 | * Original source: https://github.com/neondatabase/psql-describe
4 | */
5 |
6 | import { neon } from '@neondatabase/serverless';
7 |
8 | export type TableDescription = {
9 | columns: ColumnDescription[];
10 | indexes: IndexDescription[];
11 | constraints: ConstraintDescription[];
12 | tableSize: string;
13 | indexSize: string;
14 | totalSize: string;
15 | };
16 |
17 | export type ColumnDescription = {
18 | name: string;
19 | type: string;
20 | nullable: boolean;
21 | default: string | null;
22 | description: string | null;
23 | };
24 |
25 | export type IndexDescription = {
26 | name: string;
27 | definition: string;
28 | size: string;
29 | };
30 |
31 | export type ConstraintDescription = {
32 | name: string;
33 | type: string;
34 | definition: string;
35 | };
36 |
37 | export const DESCRIBE_TABLE_STATEMENTS = [
38 | // Get column information
39 | `
40 | SELECT
41 | c.column_name as name,
42 | c.data_type as type,
43 | c.is_nullable = 'YES' as nullable,
44 | c.column_default as default,
45 | pd.description
46 | FROM information_schema.columns c
47 | LEFT JOIN pg_catalog.pg_statio_all_tables st ON c.table_schema = st.schemaname AND c.table_name = st.relname
48 | LEFT JOIN pg_catalog.pg_description pd ON pd.objoid = st.relid AND pd.objsubid = c.ordinal_position
49 | WHERE c.table_schema = 'public' AND c.table_name = $1
50 | ORDER BY c.ordinal_position;
51 | `,
52 |
53 | // Get index information
54 | `
55 | SELECT
56 | i.relname as name,
57 | pg_get_indexdef(i.oid) as definition,
58 | pg_size_pretty(pg_relation_size(i.oid)) as size
59 | FROM pg_class t
60 | JOIN pg_index ix ON t.oid = ix.indrelid
61 | JOIN pg_class i ON i.oid = ix.indexrelid
62 | WHERE t.relname = $1 AND t.relkind = 'r';
63 | `,
64 |
65 | // Get constraint information
66 | `
67 | SELECT
68 | tc.constraint_name as name,
69 | tc.constraint_type as type,
70 | pg_get_constraintdef(cc.oid) as definition
71 | FROM information_schema.table_constraints tc
72 | JOIN pg_catalog.pg_constraint cc ON tc.constraint_name = cc.conname
73 | WHERE tc.table_schema = 'public' AND tc.table_name = $1;
74 | `,
75 |
76 | // Get table size information
77 | `
78 | SELECT
79 | pg_size_pretty(pg_total_relation_size($1)) as total_size,
80 | pg_size_pretty(pg_relation_size($1)) as table_size,
81 | pg_size_pretty(pg_total_relation_size($1) - pg_relation_size($1)) as index_size;
82 | `,
83 | ];
84 |
85 | export async function describeTable(
86 | connectionString: string,
87 | tableName: string,
88 | ): Promise<TableDescription> {
89 | const sql = neon(connectionString);
90 |
91 | // Execute all queries in parallel
92 | const [columns, indexes, constraints, sizes] = await Promise.all([
93 | sql.query(DESCRIBE_TABLE_STATEMENTS[0], [tableName]),
94 | sql.query(DESCRIBE_TABLE_STATEMENTS[1], [tableName]),
95 | sql.query(DESCRIBE_TABLE_STATEMENTS[2], [tableName]),
96 | sql.query(DESCRIBE_TABLE_STATEMENTS[3], [tableName]),
97 | ]);
98 |
99 | return {
100 | columns: columns.map((col) => ({
101 | name: col.name,
102 | type: col.type,
103 | nullable: col.nullable,
104 | default: col.default,
105 | description: col.description,
106 | })),
107 | indexes: indexes.map((idx) => ({
108 | name: idx.name,
109 | definition: idx.definition,
110 | size: idx.size,
111 | })),
112 | constraints: constraints.map((con) => ({
113 | name: con.name,
114 | type: con.type,
115 | definition: con.definition,
116 | })),
117 | tableSize: sizes[0].table_size,
118 | indexSize: sizes[0].index_size,
119 | totalSize: sizes[0].total_size,
120 | };
121 | }
122 |
123 | export function formatTableDescription(desc: TableDescription): string {
124 | const lines: string[] = [];
125 |
126 | // Add table size information
127 | lines.push(`Table size: ${desc.tableSize}`);
128 | lines.push(`Index size: ${desc.indexSize}`);
129 | lines.push(`Total size: ${desc.totalSize}`);
130 | lines.push('');
131 |
132 | // Add columns
133 | lines.push('Columns:');
134 | desc.columns.forEach((col) => {
135 | const nullable = col.nullable ? 'NULL' : 'NOT NULL';
136 | const defaultStr = col.default ? ` DEFAULT ${col.default}` : '';
137 | const descStr = col.description ? `\n ${col.description}` : '';
138 | lines.push(` ${col.name} ${col.type} ${nullable}${defaultStr}${descStr}`);
139 | });
140 | lines.push('');
141 |
142 | // Add indexes
143 | if (desc.indexes.length > 0) {
144 | lines.push('Indexes:');
145 | desc.indexes.forEach((idx) => {
146 | lines.push(` ${idx.name} (${idx.size})`);
147 | lines.push(` ${idx.definition}`);
148 | });
149 | lines.push('');
150 | }
151 |
152 | // Add constraints
153 | if (desc.constraints.length > 0) {
154 | lines.push('Constraints:');
155 | desc.constraints.forEach((con) => {
156 | lines.push(` ${con.name} (${con.type})`);
157 | lines.push(` ${con.definition}`);
158 | });
159 | }
160 |
161 | return lines.join('\n');
162 | }
163 |
```
--------------------------------------------------------------------------------
/src/oauth/cookies.ts:
--------------------------------------------------------------------------------
```typescript
1 | import {
2 | Request as ExpressRequest,
3 | Response as ExpressResponse,
4 | } from 'express';
5 |
6 | const COOKIE_NAME = 'approved-mcp-clients';
7 | const ONE_YEAR_IN_SECONDS = 365 * 24 * 60 * 60 * 1000; // 365 days
8 |
9 | /**
10 | * Imports a secret key string for HMAC-SHA256 signing.
11 | * @param secret - The raw secret key string.
12 | * @returns A promise resolving to the CryptoKey object.
13 | */
14 | const importKey = async (secret: string): Promise<CryptoKey> => {
15 | const enc = new TextEncoder();
16 | return crypto.subtle.importKey(
17 | 'raw',
18 | enc.encode(secret),
19 | { name: 'HMAC', hash: 'SHA-256' },
20 | false,
21 | ['sign', 'verify'],
22 | );
23 | };
24 |
25 | /**
26 | * Signs data using HMAC-SHA256.
27 | * @param key - The CryptoKey for signing.
28 | * @param data - The string data to sign.
29 | * @returns A promise resolving to the signature as a hex string.
30 | */
31 | const signData = async (key: CryptoKey, data: string): Promise<string> => {
32 | const enc = new TextEncoder();
33 | const signatureBuffer = await crypto.subtle.sign(
34 | 'HMAC',
35 | key,
36 | enc.encode(data),
37 | );
38 | // Convert ArrayBuffer to hex string
39 | return Array.from(new Uint8Array(signatureBuffer))
40 | .map((b) => b.toString(16).padStart(2, '0'))
41 | .join('');
42 | };
43 |
44 | /**
45 | * Verifies an HMAC-SHA256 signature.
46 | * @param key - The CryptoKey for verification.
47 | * @param signatureHex - The signature to verify (hex string).
48 | * @param data - The original data that was signed.
49 | * @returns A promise resolving to true if the signature is valid, false otherwise.
50 | */
51 | const verifySignature = async (
52 | key: CryptoKey,
53 | signatureHex: string,
54 | data: string,
55 | ): Promise<boolean> => {
56 | try {
57 | // Convert hex signature back to ArrayBuffer
58 | const enc = new TextEncoder();
59 | const signatureBytes = new Uint8Array(
60 | signatureHex.match(/.{1,2}/g)?.map((byte) => parseInt(byte, 16)) ?? [],
61 | );
62 |
63 | return await crypto.subtle.verify(
64 | 'HMAC',
65 | key,
66 | signatureBytes.buffer,
67 | enc.encode(data),
68 | );
69 | } catch (e) {
70 | // Handle errors during hex parsing or verification
71 | console.error('Error verifying signature:', e);
72 | return false;
73 | }
74 | };
75 |
76 | /**
77 | * Parses the signed cookie and verifies its integrity.
78 | * @param cookieHeader - The value of the Cookie header from the request.
79 | * @param secret - The secret key used for signing.
80 | * @returns A promise resolving to the list of approved client IDs if the cookie is valid, otherwise null.
81 | */
82 | const getApprovedClientsFromCookie = async (
83 | cookie: string,
84 | secret: string,
85 | ): Promise<string[]> => {
86 | if (!cookie) return [];
87 |
88 | try {
89 | const [signatureHex, base64Payload] = cookie.split('.');
90 | if (!signatureHex || !base64Payload) return [];
91 |
92 | const payload = atob(base64Payload);
93 | const key = await importKey(secret);
94 | const isValid = await verifySignature(key, signatureHex, payload);
95 | if (!isValid) return [];
96 |
97 | const clients = JSON.parse(payload);
98 | return Array.isArray(clients) ? clients : [];
99 | } catch {
100 | return [];
101 | }
102 | };
103 |
104 | /**
105 | * Checks if a given client has already been approved by the user,
106 | * based on a signed cookie.
107 | *
108 | * @param request - The incoming Request object to read cookies from.
109 | * @param clientId - The OAuth client ID to check approval for.
110 | * @param cookieSecret - The secret key used to sign/verify the approval cookie.
111 | * @returns A promise resolving to true if the client ID is in the list of approved clients in a valid cookie, false otherwise.
112 | */
113 | export const isClientAlreadyApproved = async (
114 | req: ExpressRequest,
115 | clientId: string,
116 | cookieSecret: string,
117 | ) => {
118 | const approvedClients = await getApprovedClientsFromCookie(
119 | req.cookies[COOKIE_NAME] ?? '',
120 | cookieSecret,
121 | );
122 | return approvedClients.includes(clientId);
123 | };
124 |
125 | /**
126 | * Updates the approved clients cookie with a new client ID.
127 | * The cookie is signed using HMAC-SHA256 for integrity.
128 | *
129 | * @param request - Express request containing existing cookie
130 | * @param clientId - Client ID to add to approved list
131 | * @param cookieSecret - Secret key for signing cookie
132 | * @returns Cookie string with updated approved clients list
133 | */
134 | export const updateApprovedClientsCookie = async (
135 | req: ExpressRequest,
136 | res: ExpressResponse,
137 | clientId: string,
138 | cookieSecret: string,
139 | ) => {
140 | const approvedClients = await getApprovedClientsFromCookie(
141 | req.cookies[COOKIE_NAME] ?? '',
142 | cookieSecret,
143 | );
144 | const newApprovedClients = JSON.stringify(
145 | Array.from(new Set([...approvedClients, clientId])),
146 | );
147 | const key = await importKey(cookieSecret);
148 | const signature = await signData(key, newApprovedClients);
149 | res.cookie(COOKIE_NAME, `${signature}.${btoa(newApprovedClients)}`, {
150 | httpOnly: true,
151 | secure: true,
152 | sameSite: 'lax',
153 | maxAge: ONE_YEAR_IN_SECONDS,
154 | path: '/',
155 | });
156 | };
157 |
```
--------------------------------------------------------------------------------
/src/tools/handlers/search.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { CallToolResult } from '@modelcontextprotocol/sdk/types.js';
2 | import { Api, Organization, ProjectListItem } from '@neondatabase/api-client';
3 | import { Branch } from '@neondatabase/api-client';
4 | import { searchInputSchema } from '../toolsSchema.js';
5 | import { z } from 'zod';
6 | import { ToolHandlerExtraParams } from '../types.js';
7 | import { handleListProjects } from './list-projects.js';
8 | import { CONSOLE_URLS, generateConsoleUrl } from './urls.js';
9 |
10 | type SearchProps = z.infer<typeof searchInputSchema>;
11 |
12 | type MCPOrgId = `org:${string}`;
13 | type MCPProjectId = `project:${string}`;
14 | type MCPBranchId = `branch:${string}/${string}`; // projectId/branchId
15 | type SearchResultId = MCPOrgId | MCPProjectId | MCPBranchId;
16 |
17 | type SearchResult = {
18 | id: SearchResultId;
19 | title: string;
20 | url: string;
21 | type: 'organization' | 'project' | 'branch';
22 | };
23 |
24 | const matches = (
25 | entity: Organization | ProjectListItem | Branch,
26 | query: string,
27 | ) => {
28 | return (
29 | entity.name.toLowerCase().includes(query) ||
30 | entity.id.toLowerCase().includes(query)
31 | );
32 | };
33 |
34 | export async function handleSearch(
35 | { query }: SearchProps,
36 | neonClient: Api<unknown>,
37 | extra: ToolHandlerExtraParams,
38 | ): Promise<CallToolResult> {
39 | try {
40 | const results: SearchResult[] = [];
41 | const searchQuery = query.toLowerCase();
42 | // Search through all user's organizations
43 | let organizations;
44 | if (extra.account.isOrg) {
45 | const orgId = extra.account.id;
46 | const { data } = await neonClient.getOrganization(orgId);
47 | organizations = [data];
48 | } else {
49 | const { data: response } = await neonClient.getCurrentUserOrganizations();
50 | organizations = response.organizations || [];
51 | }
52 |
53 | // If in personal account, search projects
54 | if (!extra.account.isOrg) {
55 | const projects = await handleListProjects(
56 | {
57 | limit: 400,
58 | },
59 | neonClient,
60 | extra,
61 | );
62 | const searchResults = await searchProjectsAndBranches(
63 | projects,
64 | neonClient,
65 | searchQuery,
66 | );
67 |
68 | results.push(...searchResults);
69 | }
70 |
71 | // Search in all organizations
72 | for (const org of organizations) {
73 | // Check if organization matches the search query
74 | if (matches(org, searchQuery)) {
75 | results.push({
76 | id: `org:${org.id}`,
77 | title: org.name,
78 | url: generateConsoleUrl(CONSOLE_URLS.ORGANIZATION, {
79 | orgId: org.id,
80 | }),
81 | type: 'organization',
82 | });
83 | }
84 |
85 | try {
86 | const projects = await handleListProjects(
87 | {
88 | org_id: org.id,
89 | limit: 400,
90 | },
91 | neonClient,
92 | extra,
93 | );
94 |
95 | const searchResults = await searchProjectsAndBranches(
96 | projects,
97 | neonClient,
98 | searchQuery,
99 | );
100 |
101 | results.push(...searchResults);
102 | } catch {
103 | // Skip projects if we can't access them
104 | continue;
105 | }
106 | }
107 |
108 | // Also search shared projects
109 | try {
110 | const { data } = await neonClient.listSharedProjects({
111 | limit: 400,
112 | });
113 |
114 | const searchResults = await searchProjectsAndBranches(
115 | data.projects,
116 | neonClient,
117 | searchQuery,
118 | );
119 | results.push(...searchResults);
120 | } catch {
121 | // Skip shared projects if we can't access them
122 | }
123 |
124 | return {
125 | content: [
126 | {
127 | type: 'text',
128 | text: JSON.stringify(results, null, 2),
129 | },
130 | ],
131 | };
132 | } catch (error) {
133 | return {
134 | isError: true,
135 | content: [
136 | {
137 | type: 'text',
138 | text: `Failed to search: ${error instanceof Error ? error.message : 'Unknown error'}`,
139 | },
140 | ],
141 | };
142 | }
143 | }
144 |
145 | const searchProjectsAndBranches = async (
146 | projects: ProjectListItem[],
147 | neonClient: Api<unknown>,
148 | query: string,
149 | ): Promise<SearchResult[]> => {
150 | const results: SearchResult[] = [];
151 | projects.forEach((project) => {
152 | if (matches(project, query)) {
153 | results.push({
154 | id: `project:${project.id}`,
155 | title: project.name,
156 | url: generateConsoleUrl(CONSOLE_URLS.PROJECT, {
157 | projectId: project.id,
158 | }),
159 | type: 'project',
160 | });
161 | }
162 | });
163 |
164 | const branches = await Promise.all(
165 | projects.map(async (project) => {
166 | return searchBranches(project.id, neonClient, query);
167 | }),
168 | );
169 | results.push(...branches.flat());
170 | return results;
171 | };
172 |
173 | const searchBranches = async (
174 | projectId: string,
175 | neonClient: Api<unknown>,
176 | query: string,
177 | ): Promise<SearchResult[]> => {
178 | try {
179 | const { data } = await neonClient.listProjectBranches({
180 | projectId,
181 | });
182 | const branches = data.branches;
183 |
184 | return branches
185 | .filter((branch) => matches(branch, query))
186 | .map((branch) => ({
187 | id: `branch:${projectId}/${branch.id}`,
188 | title: branch.name,
189 | url: generateConsoleUrl(CONSOLE_URLS.PROJECT_BRANCH, {
190 | projectId,
191 | branchId: branch.id,
192 | }),
193 | type: 'branch',
194 | }));
195 | } catch {
196 | // Ignore if we fail to fetch branches
197 | return [];
198 | }
199 | };
200 |
```
--------------------------------------------------------------------------------
/src/tools/handlers/fetch.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { CallToolResult } from '@modelcontextprotocol/sdk/types.js';
2 | import { Api, MemberWithUser, ProjectListItem } from '@neondatabase/api-client';
3 | import { fetchInputSchema } from '../toolsSchema.js';
4 | import { z } from 'zod';
5 | import { handleDescribeProject } from './decribe-project.js';
6 | import { handleDescribeBranch } from './describe-branch.js';
7 | import { ToolHandlerExtraParams } from '../types.js';
8 | import { generateConsoleUrl, CONSOLE_URLS } from './urls.js';
9 |
10 | type FetchProps = z.infer<typeof fetchInputSchema>;
11 |
12 | export async function handleFetch(
13 | { id }: FetchProps,
14 | neonClient: Api<unknown>,
15 | extra: ToolHandlerExtraParams,
16 | ): Promise<CallToolResult> {
17 | try {
18 | // Parse the ID format
19 | if (id.startsWith('org:') || id.startsWith('org-')) {
20 | return await fetchOrganizationDetails(id.slice(4), neonClient);
21 | } else if (id.startsWith('branch:')) {
22 | const [projectId, branchId] = id.slice(7).split('/');
23 | return await fetchBranchDetails(projectId, branchId, neonClient, extra);
24 | } else if (id.startsWith('project:')) {
25 | return await fetchProjectDetails(id.slice(8), neonClient);
26 | } else {
27 | return {
28 | isError: true,
29 | content: [
30 | {
31 | type: 'text',
32 | text: `Invalid ID format: "${id}". Expected format: org:*, project:*, or branch:*`,
33 | },
34 | ],
35 | };
36 | }
37 | } catch (error) {
38 | return {
39 | isError: true,
40 | content: [
41 | {
42 | type: 'text',
43 | text: `Failed to fetch details: ${error instanceof Error ? error.message : 'Unknown error'}`,
44 | },
45 | ],
46 | };
47 | }
48 | }
49 |
50 | async function fetchOrganizationDetails(
51 | orgId: string,
52 | neonClient: Api<unknown>,
53 | ): Promise<CallToolResult> {
54 | try {
55 | const { data: orgData } = await neonClient.getOrganization(orgId);
56 |
57 | let members: MemberWithUser[] = [];
58 | try {
59 | const { data: membersData } =
60 | await neonClient.getOrganizationMembers(orgId);
61 | members = membersData.members || [];
62 | } catch {
63 | // Skip if we can't access members
64 | }
65 |
66 | // Get projects count in this organization
67 | let projects: ProjectListItem[] = [];
68 | try {
69 | const { data: projectsData } = await neonClient.listProjects({
70 | org_id: orgId,
71 | });
72 | projects = projectsData.projects || [];
73 | } catch {
74 | // Skip if we can't access projects
75 | }
76 |
77 | const details = {
78 | organization: {
79 | id: orgData.id,
80 | name: orgData.name,
81 | created_at: orgData.created_at,
82 | updated_at: orgData.updated_at,
83 | },
84 | console_url: generateConsoleUrl(CONSOLE_URLS.ORGANIZATION, {
85 | orgId: orgData.id,
86 | }),
87 | };
88 |
89 | return {
90 | content: [
91 | {
92 | type: 'text',
93 | text: `**Organization Details**
94 | **Basic Information:**
95 | - Name: ${details.organization.name}
96 | - ID: ${details.organization.id}
97 | - Created: ${new Date(details.organization.created_at).toLocaleDateString()}
98 |
99 | **Statistics:**
100 | ${members.length > 0 ? `- Members: ${members.length}` : undefined}
101 | - Projects: ${projects.length}
102 | `,
103 | metadata: {
104 | org: orgData,
105 | members: members,
106 | projects: projects,
107 | },
108 | },
109 | ],
110 | };
111 | } catch (error) {
112 | return {
113 | isError: true,
114 | content: [
115 | {
116 | type: 'text',
117 | text: `Failed to fetch organization details: ${error instanceof Error ? error.message : 'Unknown error'}`,
118 | },
119 | ],
120 | };
121 | }
122 | }
123 |
124 | async function fetchProjectDetails(
125 | projectId: string,
126 | neonClient: Api<unknown>,
127 | ): Promise<CallToolResult> {
128 | try {
129 | const { project, branches } = await handleDescribeProject(
130 | projectId,
131 | neonClient,
132 | );
133 |
134 | const defaultBranch = branches.find((branch) => branch.default);
135 | return {
136 | content: [
137 | {
138 | type: 'text',
139 | text: `**Project Details**
140 |
141 | **Basic Information:**
142 | - Name: ${project.name}
143 | - ID: ${project.id}
144 | - Region: ${project.region_id}
145 | - PostgreSQL Version: ${project.pg_version}
146 | - Created: ${new Date(project.created_at).toLocaleDateString()}
147 | - Last Updated: ${new Date(project.updated_at).toLocaleDateString()}
148 |
149 | **Statistics:**
150 | - Branches: ${branches.length}
151 | - Default Branch: ${defaultBranch?.name} (${defaultBranch?.id})
152 | `,
153 | metadata: {
154 | project: project,
155 | branches: branches,
156 | },
157 | },
158 | ],
159 | };
160 | } catch (error) {
161 | return {
162 | isError: true,
163 | content: [
164 | {
165 | type: 'text',
166 | text: `Failed to fetch project details: ${error instanceof Error ? error.message : 'Unknown error'}`,
167 | },
168 | ],
169 | };
170 | }
171 | }
172 |
173 | async function fetchBranchDetails(
174 | projectId: string,
175 | branchId: string,
176 | neonClient: Api<unknown>,
177 | extra: ToolHandlerExtraParams,
178 | ): Promise<CallToolResult> {
179 | try {
180 | const result = await handleDescribeBranch(
181 | {
182 | projectId,
183 | branchId,
184 | },
185 | neonClient,
186 | extra,
187 | );
188 |
189 | return {
190 | content: [
191 | {
192 | type: 'text',
193 | text: JSON.stringify(result, null, 2),
194 | },
195 | ],
196 | };
197 | } catch (error) {
198 | return {
199 | isError: true,
200 | content: [
201 | {
202 | type: 'text',
203 | text: `Failed to fetch branch details: ${error instanceof Error ? error.message : 'Unknown error'}`,
204 | },
205 | ],
206 | };
207 | }
208 | }
209 |
```
--------------------------------------------------------------------------------
/landing/app/globals.css:
--------------------------------------------------------------------------------
```css
1 | @import 'tailwindcss';
2 | @import 'tw-animate-css';
3 |
4 | /* @custom-variant dark (&:is(.dark *)); */
5 |
6 | @theme inline {
7 | --color-background: var(--background);
8 | --color-foreground: var(--foreground);
9 | --color-sidebar-ring: var(--sidebar-ring);
10 | --color-sidebar-border: var(--sidebar-border);
11 | --color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
12 | --color-sidebar-accent: var(--sidebar-accent);
13 | --color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
14 | --color-sidebar-primary: var(--sidebar-primary);
15 | --color-sidebar-foreground: var(--sidebar-foreground);
16 | --color-sidebar: var(--sidebar);
17 | --color-chart-5: var(--chart-5);
18 | --color-chart-4: var(--chart-4);
19 | --color-chart-3: var(--chart-3);
20 | --color-chart-2: var(--chart-2);
21 | --color-chart-1: var(--chart-1);
22 | --color-ring: var(--ring);
23 | --color-input: var(--input);
24 | --color-border: var(--border);
25 | --color-destructive: var(--destructive);
26 | --color-accent-foreground: var(--accent-foreground);
27 | --color-accent: var(--accent);
28 | --color-muted-foreground: var(--muted-foreground);
29 | --color-muted: var(--muted);
30 | --color-secondary-foreground: var(--secondary-foreground);
31 | --color-secondary: var(--secondary);
32 | --color-primary-foreground: var(--primary-foreground);
33 | --color-primary: var(--primary);
34 | --color-popover-foreground: var(--popover-foreground);
35 | --color-popover: var(--popover);
36 | --color-card-foreground: var(--card-foreground);
37 | --color-card: var(--card);
38 | --radius-sm: calc(var(--radius) - 4px);
39 | --radius-md: calc(var(--radius) - 2px);
40 | --radius-lg: var(--radius);
41 | --radius-xl: calc(var(--radius) + 4px);
42 | --font-sans: var(--font-geist-sans);
43 | --font-mono: var(--font-geist-mono);
44 |
45 | /* user defined */
46 | --color-important-notes: var(--important-notes);
47 | --color-link: var(--link);
48 | }
49 |
50 | :root {
51 | --radius: 0.625rem;
52 | --background: oklch(1 0 0);
53 | --foreground: oklch(0.129 0.042 264.695);
54 | --card: oklch(1 0 0);
55 | --card-foreground: oklch(0.129 0.042 264.695);
56 | --popover: oklch(1 0 0);
57 | --popover-foreground: oklch(0.129 0.042 264.695);
58 | --primary: oklch(0.208 0.042 265.755);
59 | --primary-foreground: oklch(0.984 0.003 247.858);
60 | --secondary: oklch(0.968 0.007 247.896);
61 | --secondary-foreground: oklch(0.208 0.042 265.755);
62 | --muted: oklch(0.968 0.007 247.896);
63 | --muted-foreground: oklch(0.554 0.046 257.417);
64 | --accent: oklch(0.968 0.007 247.896);
65 | --accent-foreground: oklch(0.208 0.042 265.755);
66 | --destructive: oklch(0.577 0.245 27.325);
67 | --border: oklch(0.929 0.013 255.508);
68 | --input: oklch(0.929 0.013 255.508);
69 | --ring: oklch(0.704 0.04 256.788);
70 | --chart-1: oklch(0.646 0.222 41.116);
71 | --chart-2: oklch(0.6 0.118 184.704);
72 | --chart-3: oklch(0.398 0.07 227.392);
73 | --chart-4: oklch(0.828 0.189 84.429);
74 | --chart-5: oklch(0.769 0.188 70.08);
75 | --sidebar: oklch(0.984 0.003 247.858);
76 | --sidebar-foreground: oklch(0.129 0.042 264.695);
77 | --sidebar-primary: oklch(0.208 0.042 265.755);
78 | --sidebar-primary-foreground: oklch(0.984 0.003 247.858);
79 | --sidebar-accent: oklch(0.968 0.007 247.896);
80 | --sidebar-accent-foreground: oklch(0.208 0.042 265.755);
81 | --sidebar-border: oklch(0.929 0.013 255.508);
82 | --sidebar-ring: oklch(0.704 0.04 256.788);
83 |
84 | /* user defined */
85 | --important-notes: var(--color-orange-800);
86 | --link: oklch(0.64 0.14 160.38);
87 | }
88 |
89 | @variant dark {
90 | :root {
91 | --background: oklch(0.129 0.042 264.695);
92 | --foreground: oklch(0.984 0.003 247.858);
93 | --card: oklch(0.208 0.042 265.755);
94 | --card-foreground: oklch(0.984 0.003 247.858);
95 | --popover: oklch(0.208 0.042 265.755);
96 | --popover-foreground: oklch(0.984 0.003 247.858);
97 | --primary: oklch(0.929 0.013 255.508);
98 | --primary-foreground: oklch(0.208 0.042 265.755);
99 | --secondary: oklch(0.279 0.041 260.031);
100 | --secondary-foreground: oklch(0.984 0.003 247.858);
101 | --muted: oklch(0.279 0.041 260.031);
102 | --muted-foreground: oklch(0.704 0.04 256.788);
103 | --accent: oklch(0.279 0.041 260.031);
104 | --accent-foreground: oklch(0.984 0.003 247.858);
105 | --destructive: oklch(0.704 0.191 22.216);
106 | --border: oklch(1 0 0 / 10%);
107 | --input: oklch(1 0 0 / 15%);
108 | --ring: oklch(0.551 0.027 264.364);
109 | --chart-1: oklch(0.488 0.243 264.376);
110 | --chart-2: oklch(0.696 0.17 162.48);
111 | --chart-3: oklch(0.769 0.188 70.08);
112 | --chart-4: oklch(0.627 0.265 303.9);
113 | --chart-5: oklch(0.645 0.246 16.439);
114 | --sidebar: oklch(0.208 0.042 265.755);
115 | --sidebar-foreground: oklch(0.984 0.003 247.858);
116 | --sidebar-primary: oklch(0.488 0.243 264.376);
117 | --sidebar-primary-foreground: oklch(0.984 0.003 247.858);
118 | --sidebar-accent: oklch(0.279 0.041 260.031);
119 | --sidebar-accent-foreground: oklch(0.984 0.003 247.858);
120 | --sidebar-border: oklch(1 0 0 / 10%);
121 | --sidebar-ring: oklch(0.551 0.027 264.364);
122 |
123 | /* user defined */
124 | --important-notes: var(--color-orange-100);
125 | --link: oklch(0.81 0.18 160.37);
126 | }
127 | }
128 |
129 | @utility monospaced {
130 | @apply font-[family-name:var(--font-geist-mono)];
131 | }
132 |
133 | @layer base {
134 | * {
135 | @apply border-border outline-ring/50;
136 | }
137 |
138 | body {
139 | @apply bg-background text-foreground;
140 | /*font-family: Arial, Helvetica, sans-serif;*/
141 | }
142 |
143 | button:not(:disabled),
144 | [role='button']:not(:disabled) {
145 | cursor: pointer;
146 | }
147 | }
148 |
149 | @layer page {
150 | .external-link {
151 | @apply text-link;
152 | @apply font-semibold;
153 | @apply border-b;
154 | @apply border-transparent;
155 | @apply hover:border-current;
156 | }
157 | }
158 |
```
--------------------------------------------------------------------------------
/mcp-client/src/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { Anthropic } from '@anthropic-ai/sdk';
2 |
3 | import {
4 | StdioClientTransport,
5 | StdioServerParameters,
6 | } from '@modelcontextprotocol/sdk/client/stdio.js';
7 | import {
8 | ListToolsResultSchema,
9 | CallToolResultSchema,
10 | } from '@modelcontextprotocol/sdk/types.js';
11 | import { Client } from '@modelcontextprotocol/sdk/client/index.js';
12 | import chalk from 'chalk';
13 | import { Tool } from '@anthropic-ai/sdk/resources/index.mjs';
14 | import { Stream } from '@anthropic-ai/sdk/streaming.mjs';
15 | import { consoleStyles, Logger, LoggerOptions } from './logger.js';
16 |
17 | interface Message {
18 | role: 'user' | 'assistant';
19 | content: string;
20 | }
21 |
22 | type MCPClientOptions = StdioServerParameters & {
23 | loggerOptions?: LoggerOptions;
24 | };
25 |
26 | export class MCPClient {
27 | private anthropicClient: Anthropic;
28 | private messages: Message[] = [];
29 | private mcpClient: Client;
30 | private transport: StdioClientTransport;
31 | private tools: Tool[] = [];
32 | private logger: Logger;
33 |
34 | constructor({ loggerOptions, ...serverConfig }: MCPClientOptions) {
35 | this.anthropicClient = new Anthropic({
36 | apiKey: process.env.ANTHROPIC_API_KEY,
37 | });
38 |
39 | this.mcpClient = new Client(
40 | { name: 'cli-client', version: '1.0.0' },
41 | { capabilities: {} },
42 | );
43 |
44 | this.transport = new StdioClientTransport(serverConfig);
45 | this.logger = new Logger(loggerOptions ?? { mode: 'verbose' });
46 | }
47 |
48 | async start() {
49 | try {
50 | await this.mcpClient.connect(this.transport);
51 | await this.initMCPTools();
52 | } catch (error) {
53 | this.logger.log('Failed to initialize MCP Client: ' + error + '\n', {
54 | type: 'error',
55 | });
56 | process.exit(1);
57 | }
58 | }
59 |
60 | async stop() {
61 | await this.mcpClient.close();
62 | }
63 |
64 | private async initMCPTools() {
65 | const toolsResults = await this.mcpClient.request(
66 | { method: 'tools/list' },
67 | ListToolsResultSchema,
68 | );
69 | this.tools = toolsResults.tools.map(({ inputSchema, ...tool }) => ({
70 | ...tool,
71 | input_schema: inputSchema,
72 | }));
73 | }
74 |
75 | private formatToolCall(toolName: string, args: any): string {
76 | return (
77 | '\n' +
78 | consoleStyles.tool.bracket('[') +
79 | consoleStyles.tool.name(toolName) +
80 | consoleStyles.tool.bracket('] ') +
81 | consoleStyles.tool.args(JSON.stringify(args, null, 2)) +
82 | '\n'
83 | );
84 | }
85 |
86 | private formatJSON(json: string): string {
87 | return json
88 | .replace(/"([^"]+)":/g, chalk.blue('"$1":'))
89 | .replace(/: "([^"]+)"/g, ': ' + chalk.green('"$1"'));
90 | }
91 |
92 | private async processStream(
93 | stream: Stream<Anthropic.Messages.RawMessageStreamEvent>,
94 | ): Promise<void> {
95 | let currentMessage = '';
96 | let currentToolName = '';
97 | let currentToolInputString = '';
98 |
99 | this.logger.log(consoleStyles.assistant);
100 | for await (const chunk of stream) {
101 | switch (chunk.type) {
102 | case 'message_start':
103 | case 'content_block_stop':
104 | continue;
105 |
106 | case 'content_block_start':
107 | if (chunk.content_block?.type === 'tool_use') {
108 | currentToolName = chunk.content_block.name;
109 | }
110 | break;
111 |
112 | case 'content_block_delta':
113 | if (chunk.delta.type === 'text_delta') {
114 | this.logger.log(chunk.delta.text);
115 | currentMessage += chunk.delta.text;
116 | } else if (chunk.delta.type === 'input_json_delta') {
117 | if (currentToolName && chunk.delta.partial_json) {
118 | currentToolInputString += chunk.delta.partial_json;
119 | }
120 | }
121 | break;
122 |
123 | case 'message_delta':
124 | if (currentMessage) {
125 | this.messages.push({
126 | role: 'assistant',
127 | content: currentMessage,
128 | });
129 | }
130 |
131 | if (chunk.delta.stop_reason === 'tool_use') {
132 | const toolArgs = currentToolInputString
133 | ? JSON.parse(currentToolInputString)
134 | : {};
135 |
136 | this.logger.log(
137 | this.formatToolCall(currentToolName, toolArgs) + '\n',
138 | );
139 | const toolResult = await this.mcpClient.request(
140 | {
141 | method: 'tools/call',
142 | params: {
143 | name: currentToolName,
144 | arguments: toolArgs,
145 | },
146 | },
147 | CallToolResultSchema,
148 | );
149 |
150 | const formattedResult = this.formatJSON(
151 | JSON.stringify(toolResult.content.flatMap((c) => c.text)),
152 | );
153 |
154 | this.messages.push({
155 | role: 'user',
156 | content: formattedResult,
157 | });
158 |
159 | const nextStream = await this.anthropicClient.messages.create({
160 | messages: this.messages,
161 | model: 'claude-3-5-sonnet-20241022',
162 | max_tokens: 8192,
163 | tools: this.tools,
164 | stream: true,
165 | });
166 | await this.processStream(nextStream);
167 | }
168 | break;
169 |
170 | case 'message_stop':
171 | break;
172 |
173 | default:
174 | this.logger.log(`Unknown event type: ${JSON.stringify(chunk)}\n`, {
175 | type: 'warning',
176 | });
177 | }
178 | }
179 | }
180 |
181 | async processQuery(query: string) {
182 | try {
183 | this.messages.push({ role: 'user', content: query });
184 |
185 | const stream = await this.anthropicClient.messages.create({
186 | messages: this.messages,
187 | model: 'claude-3-5-sonnet-20241022',
188 | max_tokens: 8192,
189 | tools: this.tools,
190 | stream: true,
191 | });
192 | await this.processStream(stream);
193 |
194 | return this.messages;
195 | } catch (error) {
196 | this.logger.log('\nError during query processing: ' + error + '\n', {
197 | type: 'error',
198 | });
199 | if (error instanceof Error) {
200 | this.logger.log(
201 | consoleStyles.assistant +
202 | 'I apologize, but I encountered an error: ' +
203 | error.message +
204 | '\n',
205 | );
206 | }
207 | }
208 | }
209 | }
210 |
```
--------------------------------------------------------------------------------
/src/oauth/utils.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { NextFunction, Request, Response } from 'express';
2 | import cors from 'cors';
3 | import crypto from 'crypto';
4 | import { model } from './model.js';
5 | import { ApiKeyRecord, apiKeys } from './kv-store.js';
6 | import { createNeonClient } from '../server/api.js';
7 | import { identify } from '../analytics/analytics.js';
8 |
9 | const READ_ONLY_HEADER = 'X-read-only';
10 |
11 | export const ensureCorsHeaders = () =>
12 | cors({
13 | origin: true,
14 | methods: '*',
15 | allowedHeaders: `Authorization, Origin, Content-Type, Accept, ${READ_ONLY_HEADER}, *`,
16 | });
17 |
18 | const fetchAccountDetails = async (
19 | accessToken: string,
20 | ): Promise<ApiKeyRecord | null> => {
21 | const apiKeyRecord = await apiKeys.get(accessToken);
22 | if (apiKeyRecord) {
23 | return apiKeyRecord;
24 | }
25 |
26 | try {
27 | const neonClient = createNeonClient(accessToken);
28 | const { data: auth } = await neonClient.getAuthDetails();
29 | if (auth.auth_method === 'api_key_org') {
30 | const { data: org } = await neonClient.getOrganization(auth.account_id);
31 | const record = {
32 | apiKey: accessToken,
33 | authMethod: auth.auth_method,
34 | account: {
35 | id: auth.account_id,
36 | name: org.name,
37 | isOrg: true,
38 | },
39 | };
40 | identify(record.account, { context: { authMethod: record.authMethod } });
41 | await apiKeys.set(accessToken, record);
42 | return record;
43 | }
44 | const { data: user } = await neonClient.getCurrentUserInfo();
45 | const record = {
46 | apiKey: accessToken,
47 | authMethod: auth.auth_method,
48 | account: {
49 | id: user.id,
50 | name: user.name,
51 | email: user.email,
52 | isOrg: false,
53 | },
54 | };
55 | identify(record.account, { context: { authMethod: record.authMethod } });
56 | await apiKeys.set(accessToken, record);
57 | return record;
58 | } catch {
59 | return null;
60 | }
61 | };
62 |
63 | export const requiresAuth =
64 | () => async (request: Request, response: Response, next: NextFunction) => {
65 | const authorization = request.headers.authorization;
66 | if (!authorization) {
67 | response.status(401).json({ error: 'Unauthorized' });
68 | return;
69 | }
70 |
71 | const accessToken = extractBearerToken(authorization);
72 | // Check for X-Read-Only header
73 | const readOnlyHeader = request.headers[READ_ONLY_HEADER.toLowerCase()];
74 | const readOnly = readOnlyHeader === 'true' || readOnlyHeader === '1';
75 |
76 | const token = await model.getAccessToken(accessToken);
77 | if (token) {
78 | if (!token.expires_at || token.expires_at < Date.now()) {
79 | response.status(401).json({ error: 'Access token expired' });
80 | return;
81 | }
82 |
83 | request.auth = {
84 | token: token.accessToken,
85 | clientId: token.client.id,
86 | scopes: Array.isArray(token.scope)
87 | ? token.scope
88 | : (token.scope?.split(' ') ?? []),
89 | extra: {
90 | account: {
91 | id: token.user.id,
92 | name: token.user.name,
93 | email: token.user.email,
94 | isOrg: false,
95 | },
96 | client: {
97 | id: token.client.id,
98 | name: token.client.client_name,
99 | },
100 | readOnly,
101 | },
102 | };
103 |
104 | next();
105 | return;
106 | }
107 |
108 | // If the token is not found, try to resolve the auth headers with Neon for other means of authentication.
109 | const apiKeyRecord = await fetchAccountDetails(accessToken);
110 | if (!apiKeyRecord) {
111 | response.status(401).json({ error: 'Invalid access token' });
112 | return;
113 | }
114 | request.auth = {
115 | token: accessToken,
116 | clientId: 'api-key',
117 | scopes: ['*'],
118 | extra: {
119 | account: apiKeyRecord.account,
120 | readOnly,
121 | },
122 | };
123 | next();
124 | return;
125 | };
126 |
127 | export type DownstreamAuthRequest = {
128 | responseType: string;
129 | clientId: string;
130 | redirectUri: string;
131 | scope: string[];
132 | state: string;
133 | codeChallenge?: string;
134 | codeChallengeMethod?: string;
135 | };
136 |
137 | export const parseAuthRequest = (request: Request): DownstreamAuthRequest => {
138 | const responseType = (request.query.response_type || '') as string;
139 | const clientId = (request.query.client_id || '') as string;
140 | const redirectUri = (request.query.redirect_uri || '') as string;
141 | const scope = (request.query.scope || '') as string;
142 | const state = (request.query.state || '') as string;
143 | const codeChallenge = (request.query.code_challenge as string) || undefined;
144 | const codeChallengeMethod = (request.query.code_challenge_method ||
145 | 'plain') as string;
146 |
147 | return {
148 | responseType,
149 | clientId,
150 | redirectUri,
151 | scope: scope.split(' ').filter(Boolean),
152 | state,
153 | codeChallenge,
154 | codeChallengeMethod,
155 | };
156 | };
157 |
158 | export const decodeAuthParams = (state: string): DownstreamAuthRequest => {
159 | const decoded = atob(state);
160 | return JSON.parse(decoded);
161 | };
162 |
163 | export const generateRandomString = (length: number): string => {
164 | const charset =
165 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
166 | const array = new Uint8Array(length);
167 | crypto.getRandomValues(array);
168 | return Array.from(array, (byte) => charset[byte % charset.length]).join('');
169 | };
170 |
171 | export const extractBearerToken = (authorizationHeader: string): string => {
172 | if (!authorizationHeader) return '';
173 | return authorizationHeader.replace(/^Bearer\s+/i, '');
174 | };
175 |
176 | export const extractClientCredentials = (request: Request) => {
177 | const authorization = request.headers.authorization;
178 | if (authorization?.startsWith('Basic ')) {
179 | const credentials = atob(authorization.replace(/^Basic\s+/i, ''));
180 | const [clientId, clientSecret] = credentials.split(':');
181 | return { clientId, clientSecret };
182 | }
183 |
184 | return {
185 | clientId: request.body.client_id,
186 | clientSecret: request.body.client_secret,
187 | };
188 | };
189 |
190 | export const toSeconds = (ms: number): number => {
191 | return Math.floor(ms / 1000);
192 | };
193 |
194 | export const toMilliseconds = (seconds: number): number => {
195 | return seconds * 1000;
196 | };
197 |
198 | export const verifyPKCE = (
199 | codeChallenge: string,
200 | codeChallengeMethod: string,
201 | codeVerifier: string,
202 | ): boolean => {
203 | if (!codeChallenge || !codeChallengeMethod || !codeVerifier) {
204 | return false;
205 | }
206 |
207 | if (codeChallengeMethod === 'S256') {
208 | const hash = crypto
209 | .createHash('sha256')
210 | .update(codeVerifier)
211 | .digest('base64url');
212 | return codeChallenge === hash;
213 | }
214 |
215 | if (codeChallengeMethod === 'plain') {
216 | return codeChallenge === codeVerifier;
217 | }
218 |
219 | return false;
220 | };
221 |
```
--------------------------------------------------------------------------------
/src/server/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | #!/usr/bin/env node
2 |
3 | import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
4 | import { NEON_RESOURCES } from '../resources.js';
5 | import { NEON_PROMPTS, getPromptTemplate } from '../prompts.js';
6 | import {
7 | NEON_HANDLERS,
8 | NEON_TOOLS,
9 | ToolHandlerExtended,
10 | } from '../tools/index.js';
11 | import { logger } from '../utils/logger.js';
12 | import { createNeonClient, getPackageJson } from './api.js';
13 | import { track } from '../analytics/analytics.js';
14 | import { captureException, startSpan } from '@sentry/node';
15 | import { ServerContext } from '../types/context.js';
16 | import { setSentryTags } from '../sentry/utils.js';
17 | import { ToolHandlerExtraParams } from '../tools/types.js';
18 | import { handleToolError } from './errors.js';
19 | import { detectClientApplication } from '../utils/client-application.js';
20 |
21 | export const createMcpServer = (context: ServerContext) => {
22 | const server = new McpServer(
23 | {
24 | name: 'mcp-server-neon',
25 | version: getPackageJson().version,
26 | },
27 | {
28 | capabilities: {
29 | tools: {},
30 | resources: {},
31 | prompts: {
32 | listChanged: true,
33 | },
34 | },
35 | },
36 | );
37 |
38 | const neonClient = createNeonClient(context.apiKey);
39 |
40 | // Compute client info once at server instantiation
41 | let clientName = context.userAgent ?? 'unknown';
42 | let clientApplication = detectClientApplication(clientName);
43 |
44 | // Track server initialization
45 | const trackServerInit = () => {
46 | track({
47 | userId: context.account.id,
48 | event: 'server_init',
49 | properties: {
50 | clientName,
51 | clientApplication,
52 | readOnly: String(context.readOnly ?? false),
53 | },
54 | context: {
55 | client: context.client,
56 | app: context.app,
57 | },
58 | });
59 | logger.info('Server initialized:', {
60 | clientName,
61 | clientApplication,
62 | readOnly: context.readOnly,
63 | });
64 | };
65 |
66 | // Always use MCP handshake clientInfo (more reliable than HTTP User-Agent)
67 | // This ensures we get the real client name even when using mcp-remote,
68 | // which forwards the original client name (e.g., "Cursor (via mcp-remote 0.1.31)")
69 | server.server.oninitialized = () => {
70 | const clientInfo = server.server.getClientVersion();
71 | // Prefer MCP clientInfo over HTTP User-Agent
72 | if (clientInfo?.name) {
73 | clientName = clientInfo.name;
74 | clientApplication = detectClientApplication(clientName);
75 | }
76 | trackServerInit();
77 | };
78 |
79 | // Filter tools based on read-only mode
80 | const availableTools = context.readOnly
81 | ? NEON_TOOLS.filter((tool) => tool.readOnlySafe)
82 | : NEON_TOOLS;
83 |
84 | // Register tools
85 | availableTools.forEach((tool) => {
86 | const handler = NEON_HANDLERS[tool.name];
87 | if (!handler) {
88 | throw new Error(`Handler for tool ${tool.name} not found`);
89 | }
90 |
91 | const toolHandler = handler as ToolHandlerExtended<typeof tool.name>;
92 |
93 | server.tool(
94 | tool.name,
95 | tool.description,
96 | { params: tool.inputSchema },
97 | async (args, extra) => {
98 | return await startSpan(
99 | {
100 | name: 'tool_call',
101 | attributes: {
102 | tool_name: tool.name,
103 | },
104 | },
105 | async (span) => {
106 | const properties = {
107 | tool_name: tool.name,
108 | readOnly: String(context.readOnly ?? false),
109 | clientName,
110 | };
111 | logger.info('tool call:', properties);
112 | setSentryTags(context);
113 | track({
114 | userId: context.account.id,
115 | event: 'tool_call',
116 | properties,
117 | context: {
118 | client: context.client,
119 | app: context.app,
120 | clientName,
121 | },
122 | });
123 |
124 | const extraArgs: ToolHandlerExtraParams = {
125 | ...extra,
126 | account: context.account,
127 | readOnly: context.readOnly,
128 | clientApplication,
129 | };
130 | try {
131 | return await toolHandler(args, neonClient, extraArgs);
132 | } catch (error) {
133 | span.setStatus({
134 | code: 2,
135 | });
136 | return handleToolError(error, properties);
137 | }
138 | },
139 | );
140 | },
141 | );
142 | });
143 |
144 | // Register resources
145 | NEON_RESOURCES.forEach((resource) => {
146 | server.resource(
147 | resource.name,
148 | resource.uri,
149 | {
150 | description: resource.description,
151 | mimeType: resource.mimeType,
152 | },
153 | async (url) => {
154 | const properties = { resource_name: resource.name };
155 | logger.info('resource call:', properties);
156 | setSentryTags(context);
157 | track({
158 | userId: context.account.id,
159 | event: 'resource_call',
160 | properties,
161 | context: { client: context.client, app: context.app },
162 | });
163 | try {
164 | return await resource.handler(url);
165 | } catch (error) {
166 | captureException(error, {
167 | extra: properties,
168 | });
169 | throw error;
170 | }
171 | },
172 | );
173 | });
174 |
175 | // Register prompts
176 | NEON_PROMPTS.forEach((prompt) => {
177 | server.prompt(
178 | prompt.name,
179 | prompt.description,
180 | prompt.argsSchema,
181 | async (args, extra) => {
182 | const properties = { prompt_name: prompt.name, clientName };
183 | logger.info('prompt call:', properties);
184 | setSentryTags(context);
185 | track({
186 | userId: context.account.id,
187 | event: 'prompt_call',
188 | properties,
189 | context: { client: context.client, app: context.app },
190 | });
191 | try {
192 | const extraArgs: ToolHandlerExtraParams = {
193 | ...extra,
194 | account: context.account,
195 | readOnly: context.readOnly,
196 | clientApplication,
197 | };
198 | const template = await getPromptTemplate(
199 | prompt.name,
200 | extraArgs,
201 | args,
202 | );
203 | return {
204 | messages: [
205 | {
206 | role: 'user',
207 | content: {
208 | type: 'text',
209 | text: template,
210 | },
211 | },
212 | ],
213 | };
214 | } catch (error) {
215 | captureException(error, {
216 | extra: properties,
217 | });
218 | throw error;
219 | }
220 | },
221 | );
222 | });
223 |
224 | server.server.onerror = (error: unknown) => {
225 | const message = error instanceof Error ? error.message : 'Unknown error';
226 | logger.error('Server error:', {
227 | message,
228 | error,
229 | });
230 | const contexts = { app: context.app, client: context.client };
231 | const eventId = captureException(error, {
232 | user: { id: context.account.id },
233 | contexts: contexts,
234 | });
235 | track({
236 | userId: context.account.id,
237 | event: 'server_error',
238 | properties: { message, error, eventId },
239 | context: contexts,
240 | });
241 | };
242 |
243 | return server;
244 | };
245 |
```
--------------------------------------------------------------------------------
/.github/workflows/claude-code-review.yml:
--------------------------------------------------------------------------------
```yaml
1 | name: Claude Code Review
2 |
3 | on:
4 | pull_request:
5 | types: [opened] # Only run when PR is first created, not on every commit
6 | # Optional: Only run on specific file changes
7 | # paths:
8 | # - "src/**/*.ts"
9 | # - "src/**/*.tsx"
10 | # - "src/**/*.js"
11 | # - "src/**/*.jsx"
12 | workflow_dispatch:
13 | inputs:
14 | pr_number:
15 | description: 'PR number to review'
16 | required: true
17 | type: number
18 |
19 | concurrency:
20 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.inputs.pr_number }}
21 | cancel-in-progress: true
22 |
23 | jobs:
24 | claude-review:
25 | # Disabled: set to false to prevent workflow execution
26 | if: ${{ false }}
27 |
28 | runs-on:
29 | group: neondatabase-protected-runner-group
30 | labels: linux-ubuntu-latest
31 | permissions:
32 | contents: read
33 | pull-requests: write
34 | issues: read
35 | id-token: write
36 |
37 | steps:
38 | - name: Checkout repository
39 | uses: actions/checkout@v4
40 | with:
41 | fetch-depth: 1
42 | ref: ${{ github.event_name == 'workflow_dispatch' && format('refs/pull/{0}/head', github.event.inputs.pr_number) || '' }}
43 |
44 | - name: Run Claude Code Review
45 | id: claude-review
46 | uses: anthropics/claude-code-action@v1
47 | with:
48 | anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
49 | prompt: |
50 | # Code Review Task
51 |
52 | **REPO:** ${{ github.repository }}
53 | **PR:** ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
54 | **COMMIT:** ${{ github.event.pull_request.head.sha }}
55 |
56 | ## Context
57 |
58 | This is the **Neon MCP Server** - a Model Context Protocol server bridging LLMs to Neon Postgres API.
59 | Review this PR with understanding of:
60 | - MCP tool/handler architecture (see CLAUDE.md lines 83-122)
61 | - TypeScript ES2022 + Node16 ESM requirements
62 | - Tool registration pattern: definitions.ts → toolsSchema.ts → handlers/ → tools.ts
63 | - Multi-call state management for migrations/tuning tools
64 |
65 | ## What's Already Automated (Don't Review)
66 |
67 | - ❌ Lint errors → `bun run lint` (automated by pr.yml)
68 | - ❌ Build failures → `bun run build` (automated by pr.yml)
69 | - ❌ Formatting issues → Automated
70 |
71 | ## Focus Your Review On (Significant Issues Only)
72 |
73 | 1. **Architecture & Design**
74 | - Does new tool follow the tool registration pattern?
75 | - Is handler properly typed in NEON_HANDLERS?
76 | - Are Zod schemas correctly defined in toolsSchema.ts?
77 |
78 | 2. **Security Vulnerabilities**
79 | - SQL injection risks (tool handlers using raw SQL)
80 | - Secrets exposure (API keys, tokens logged or returned)
81 | - Input validation gaps (Zod schema completeness)
82 | - Command injection in bash tool uses
83 |
84 | 3. **Logic Bugs**
85 | - Error handling gaps (unhandled promise rejections)
86 | - State management issues (branch ID tracking for multi-call tools)
87 | - Edge cases not covered (null/undefined handling)
88 |
89 | 4. **Performance Issues**
90 | - N+1 API call patterns
91 | - Inefficient Neon API usage
92 | - Missing pagination handling
93 | - Unnecessary data fetching
94 |
95 | 5. **Testing Gaps**
96 | - Missing Braintrust evaluations for new tools
97 | - Uncovered edge cases in existing tests
98 | - Integration test scenarios missing
99 |
100 | 6. **MCP-Specific Issues**
101 | - Tool descriptions not clear for LLMs
102 | - Missing analytics tracking (trackEvent calls)
103 | - Error handling doesn't use ToolError pattern
104 | - Missing Sentry error capture
105 |
106 | ## Review Instructions
107 |
108 | ### Step 1: Analyze the PR
109 | Use `gh pr view` and `gh pr diff` to understand the changes.
110 |
111 | ### Step 2: Identify Significant Issues
112 | - Read the full diff and changed files
113 | - For each significant issue, note: file path, line number, severity, description
114 | - Only flag issues a human reviewer would care about (not lint/format)
115 |
116 | ### Step 3: Post Inline Comments
117 | For each significant issue (max 5 per file), post an inline comment using:
118 |
119 | ```bash
120 | gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number || github.event.inputs.pr_number }}/comments -f body="COMMENT_BODY" -f path="relative/path/to/file.ts" -F line=42 -f side="RIGHT" -f commit_id="${{ github.event.pull_request.head.sha || github.sha }}"
121 | ```
122 |
123 | **IMPORTANT:**
124 | - Use a SINGLE LINE command (no backslashes or line continuations)
125 | - For this PR, use: `gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number || github.event.inputs.pr_number }}/comments`
126 | - Commit SHA: `${{ github.event.pull_request.head.sha || github.sha }}`
127 | - Post comments for EVERY significant issue you find (not just a summary)
128 | - Keep the body text concise and use \n for line breaks within the body parameter
129 |
130 | **Inline Comment Format:**
131 | - Use emoji severity: 🔴 Critical | 🟡 Important | 🔵 Consider
132 | - Start with **[Category]** (Security/Logic/Performance/Architecture/Testing)
133 | - Explain the issue clearly
134 | - Provide actionable fix or suggestion
135 | - Reference CLAUDE.md patterns when applicable
136 |
137 | **Example:**
138 | ```
139 | 🔴 **[Security]**: Potential SQL injection vulnerability. User input is concatenated directly into SQL query.\n\n**Fix:** Use parameterized queries:\n\`\`\`typescript\nconst result = await query('SELECT * FROM users WHERE name = $1', [userName]);\n\`\`\`
140 | ```
141 |
142 | Note: In the actual gh command, newlines are represented as \n within the body parameter.
143 |
144 | ### Step 4: Post Summary Comment
145 | After posting inline comments, create a summary with:
146 | - Review statistics (files, lines, issues)
147 | - Severity breakdown (🔴, 🟡, 🔵 counts)
148 | - Key findings (2-3 most critical issues)
149 | - What looks good (2-3 positive aspects)
150 | - Note that lint/build are automated
151 |
152 | Use `gh pr comment` to post the summary.
153 |
154 | ## Guidelines
155 |
156 | - **Be selective**: Only comment on significant issues worth a human's attention
157 | - **Be specific**: Reference exact lines, provide clear fixes
158 | - **Be constructive**: Explain the "why" behind suggestions
159 | - **Be project-aware**: Use CLAUDE.md patterns and terminology
160 | - **Don't duplicate**: Skip issues automated tools will catch
161 |
162 | # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
163 | # or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
164 | claude_args: '--allowed-tools "Bash(gh:*)"'
165 |
```
--------------------------------------------------------------------------------
/src/tools/utils.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { NEON_DEFAULT_DATABASE_NAME } from '../constants.js';
2 | import { Api, Organization, Branch } from '@neondatabase/api-client';
3 | import { ToolHandlerExtraParams } from './types.js';
4 | import { NotFoundError } from '../server/errors.js';
5 |
6 | export const splitSqlStatements = (sql: string) => {
7 | return sql.split(';').filter(Boolean);
8 | };
9 |
10 | export const DESCRIBE_DATABASE_STATEMENTS = [
11 | `
12 | CREATE OR REPLACE FUNCTION public.show_db_tree()
13 | RETURNS TABLE (tree_structure text) AS
14 | $$
15 | BEGIN
16 | -- First show all databases
17 | RETURN QUERY
18 | SELECT ':file_folder: ' || datname || ' (DATABASE)'
19 | FROM pg_database
20 | WHERE datistemplate = false;
21 |
22 | -- Then show current database structure
23 | RETURN QUERY
24 | WITH RECURSIVE
25 | -- Get schemas
26 | schemas AS (
27 | SELECT
28 | n.nspname AS object_name,
29 | 1 AS level,
30 | n.nspname AS path,
31 | 'SCHEMA' AS object_type
32 | FROM pg_namespace n
33 | WHERE n.nspname NOT LIKE 'pg_%'
34 | AND n.nspname != 'information_schema'
35 | ),
36 |
37 | -- Get all objects (tables, views, functions, etc.)
38 | objects AS (
39 | SELECT
40 | c.relname AS object_name,
41 | 2 AS level,
42 | s.path || ' → ' || c.relname AS path,
43 | CASE c.relkind
44 | WHEN 'r' THEN 'TABLE'
45 | WHEN 'v' THEN 'VIEW'
46 | WHEN 'm' THEN 'MATERIALIZED VIEW'
47 | WHEN 'i' THEN 'INDEX'
48 | WHEN 'S' THEN 'SEQUENCE'
49 | WHEN 'f' THEN 'FOREIGN TABLE'
50 | END AS object_type
51 | FROM pg_class c
52 | JOIN pg_namespace n ON n.oid = c.relnamespace
53 | JOIN schemas s ON n.nspname = s.object_name
54 | WHERE c.relkind IN ('r','v','m','i','S','f')
55 |
56 | UNION ALL
57 |
58 | SELECT
59 | p.proname AS object_name,
60 | 2 AS level,
61 | s.path || ' → ' || p.proname AS path,
62 | 'FUNCTION' AS object_type
63 | FROM pg_proc p
64 | JOIN pg_namespace n ON n.oid = p.pronamespace
65 | JOIN schemas s ON n.nspname = s.object_name
66 | ),
67 |
68 | -- Combine schemas and objects
69 | combined AS (
70 | SELECT * FROM schemas
71 | UNION ALL
72 | SELECT * FROM objects
73 | )
74 |
75 | -- Final output with tree-like formatting
76 | SELECT
77 | REPEAT(' ', level) ||
78 | CASE
79 | WHEN level = 1 THEN '└── :open_file_folder: '
80 | ELSE ' └── ' ||
81 | CASE object_type
82 | WHEN 'TABLE' THEN ':bar_chart: '
83 | WHEN 'VIEW' THEN ':eye: '
84 | WHEN 'MATERIALIZED VIEW' THEN ':newspaper: '
85 | WHEN 'FUNCTION' THEN ':zap: '
86 | WHEN 'INDEX' THEN ':mag: '
87 | WHEN 'SEQUENCE' THEN ':1234: '
88 | WHEN 'FOREIGN TABLE' THEN ':globe_with_meridians: '
89 | ELSE ''
90 | END
91 | END || object_name || ' (' || object_type || ')'
92 | FROM combined
93 | ORDER BY path;
94 | END;
95 | $$ LANGUAGE plpgsql;
96 | `,
97 | `
98 | -- To use the function:
99 | SELECT * FROM show_db_tree();
100 | `,
101 | ];
102 |
103 | /**
104 | * Returns the default database for a project branch
105 | * If a database name is provided, it fetches and returns that database
106 | * Otherwise, it looks for a database named 'neondb' and returns that
107 | * If 'neondb' doesn't exist, it returns the first available database
108 | * Throws an error if no databases are found
109 | */
110 | export async function getDefaultDatabase(
111 | {
112 | projectId,
113 | branchId,
114 | databaseName,
115 | }: {
116 | projectId: string;
117 | branchId: string;
118 | databaseName?: string;
119 | },
120 | neonClient: Api<unknown>,
121 | ) {
122 | const { data } = await neonClient.listProjectBranchDatabases(
123 | projectId,
124 | branchId,
125 | );
126 | const databases = data.databases;
127 | if (databases.length === 0) {
128 | throw new NotFoundError('No databases found in your project branch');
129 | }
130 |
131 | if (databaseName) {
132 | const requestedDatabase = databases.find((db) => db.name === databaseName);
133 | if (requestedDatabase) {
134 | return requestedDatabase;
135 | }
136 | }
137 |
138 | const defaultDatabase = databases.find(
139 | (db) => db.name === NEON_DEFAULT_DATABASE_NAME,
140 | );
141 | return defaultDatabase || databases[0];
142 | }
143 |
144 | /**
145 | * Resolves the organization ID for API calls that require org_id parameter.
146 | *
147 | * For new users (those without billing_account), this function fetches user's organizations and auto-selects only organization managed by console. If there are multiple organizations managed by console, it throws an error asking user to specify org_id.
148 | *
149 | * For existing users (with billing_account), returns undefined to use default behavior.
150 | *
151 | * @param params - The parameters object that may contain org_id
152 | * @param neonClient - The Neon API client
153 | * @returns The organization to use, or undefined for default behavior
154 | */
155 | export async function getOrgByOrgIdOrDefault(
156 | params: { org_id?: string },
157 | neonClient: Api<unknown>,
158 | extra: ToolHandlerExtraParams,
159 | ): Promise<Organization | undefined> {
160 | // 1. If org_id is provided use it
161 | // 2. If using Org API key, use the account id
162 | if (params.org_id || extra.account.isOrg) {
163 | const orgId = params.org_id || extra.account.id;
164 | const { data } = await neonClient.getOrganization(orgId);
165 | return data;
166 | }
167 |
168 | const { data: user } = await neonClient.getCurrentUserInfo();
169 | if (user.billing_account) {
170 | return undefined;
171 | }
172 |
173 | const { data: response } = await neonClient.getCurrentUserOrganizations();
174 | const organizations = response.organizations || [];
175 |
176 | // 1. Filter organizations by managed_by==console, if there is only one organization, return that
177 | const consoleOrganizations = organizations.filter(
178 | (org) => org.managed_by === 'console',
179 | );
180 |
181 | if (consoleOrganizations.length === 1) {
182 | return consoleOrganizations[0];
183 | }
184 |
185 | // 2. If there are no organizations managed by console, and if there is only one organization (unfiltered), then return that organization
186 | if (consoleOrganizations.length === 0 && organizations.length === 1) {
187 | return organizations[0];
188 | }
189 |
190 | // 3. If there are no organizations at all, then throw error saying there are no organizations
191 | if (organizations.length === 0) {
192 | throw new NotFoundError('No organizations found for this user');
193 | }
194 |
195 | // 4. If there are multiple organizations, then throw error mentioning list of all these orgs (unfiltered)
196 | const orgList = organizations
197 | .map(
198 | (org) => `- ${org.name} (ID: ${org.id}) [managed by: ${org.managed_by}]`,
199 | )
200 | .join('\n');
201 | throw new NotFoundError(
202 | `Multiple organizations found. Please specify the org_id parameter with one of the following organization IDs:\n${orgList}`,
203 | );
204 | }
205 |
206 | export function filterOrganizations(
207 | organizations: Organization[],
208 | search?: string,
209 | ) {
210 | if (!search) {
211 | return organizations;
212 | }
213 | const searchLower = search.toLowerCase();
214 | return organizations.filter(
215 | (org) =>
216 | org.name.toLowerCase().includes(searchLower) ||
217 | org.id.toLowerCase().includes(searchLower),
218 | );
219 | }
220 |
221 | /**
222 | * Checks if a string looks like a branch ID based on the neonctl format
223 | * Branch IDs have format like "br-small-term-683261" (br- prefix + haiku pattern)
224 | */
225 | export function looksLikeBranchId(branch: string): boolean {
226 | const HAIKU_REGEX = /^[a-z0-9]+-[a-z0-9]+-[a-z0-9]+$/;
227 | return branch.startsWith('br-') && HAIKU_REGEX.test(branch.substring(3));
228 | }
229 |
230 | /**
231 | * Checks if a string looks like a project ID based on format from console
232 | * Project IDs have format like "small-term-683261"
233 | */
234 | export function looksLikeProjectId(projectId: string): boolean {
235 | const HAIKU_REGEX = /^[a-zA-Z]+-[a-zA-Z]+-[0-9]{8}$/;
236 | return HAIKU_REGEX.test(projectId);
237 | }
238 |
239 | /**
240 | * Resolves a branch name or ID to the actual branch ID
241 | * If the input looks like a branch ID, returns it as-is
242 | * Otherwise, searches for a branch with matching name and returns its ID
243 | */
244 | export async function resolveBranchId(
245 | branchNameOrId: string,
246 | projectId: string,
247 | neonClient: Api<unknown>,
248 | ): Promise<{ branchId: string; branches: Branch[] }> {
249 | // Get all branches (we'll need this data anyway)
250 | const branchResponse = await neonClient.listProjectBranches({
251 | projectId,
252 | });
253 | const branches = branchResponse.data.branches;
254 |
255 | if (looksLikeBranchId(branchNameOrId)) {
256 | // Verify the branch ID actually exists
257 | const branch = branches.find((b) => b.id === branchNameOrId);
258 | if (!branch) {
259 | throw new NotFoundError(
260 | `Branch ID "${branchNameOrId}" not found in project ${projectId}`,
261 | );
262 | }
263 | return { branchId: branchNameOrId, branches };
264 | }
265 |
266 | // Search by name
267 | const branch = branches.find((b) => b.name === branchNameOrId);
268 | if (!branch) {
269 | const availableBranches = branches.map((b) => b.name).join(', ');
270 | throw new NotFoundError(
271 | `Branch name "${branchNameOrId}" not found in project ${projectId}.\nAvailable branches: ${availableBranches}`,
272 | );
273 | }
274 |
275 | return { branchId: branch.id, branches };
276 | }
277 |
```
--------------------------------------------------------------------------------
/src/tools-evaluations/prepare-database-migration.eval.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { Eval, EvalCase, Reporter, reportFailures } from 'braintrust';
2 | import { LLMClassifierFromTemplate } from 'autoevals';
3 |
4 | import { createApiClient } from '@neondatabase/api-client';
5 | import { deleteNonDefaultBranches, evaluateTask } from './evalUtils';
6 |
7 | const EVAL_INFO = {
8 | projectId: 'black-recipe-75251165',
9 | roleName: 'neondb_owner',
10 | databaseName: 'neondb',
11 | mainBranchId: 'br-cold-bird-a5icgh5h',
12 | };
13 |
14 | const getMainBranchDatabaseSchema = async () => {
15 | const neonClient = createApiClient({
16 | apiKey: process.env.NEON_API_KEY!,
17 | });
18 |
19 | const dbSchema = await neonClient.getProjectBranchSchema({
20 | projectId: EVAL_INFO.projectId,
21 | branchId: EVAL_INFO.mainBranchId,
22 | db_name: EVAL_INFO.databaseName,
23 | });
24 |
25 | return dbSchema.data.sql;
26 | };
27 |
28 | const factualityAnthropic = LLMClassifierFromTemplate({
29 | name: 'Factuality Anthropic',
30 | promptTemplate: `
31 | You are comparing a submitted answer to an expert answer on a given question. Here is the data:
32 | [BEGIN DATA]
33 | ************
34 | [Question]: {{{input}}}
35 | ************
36 | [Expert]: {{{expected}}}
37 | ************
38 | [Submission]: {{{output}}}
39 | ************
40 | [END DATA]
41 |
42 | Compare the factual content of the submitted answer with the expert answer.
43 | Implementation details like specific IDs, or exact formatting should be considered non-factual differences.
44 |
45 | Ignore the following differences:
46 | - Specific migration IDs or references
47 | - Formatting or structural variations
48 | - Order of presenting the information
49 | - Restatements of the same request/question
50 | - Additional confirmatory language that doesn't add new information
51 |
52 | The submitted answer may either be:
53 | (A) A subset missing key factual information from the expert answer
54 | (B) A superset that FIRST agrees with the expert answer's core facts AND THEN adds additional factual information
55 | (C) Factually equivalent to the expert answer
56 | (D) In factual disagreement with or takes a completely different action than the expert answer
57 | (E) Different only in non-factual implementation details
58 |
59 | Select the most appropriate option, prioritizing the core factual content over implementation specifics.
60 | `,
61 | choiceScores: {
62 | A: 0.4,
63 | B: 0.8,
64 | C: 1,
65 | D: 0,
66 | E: 1,
67 | },
68 | temperature: 0,
69 | useCoT: true,
70 | model: 'claude-3-5-sonnet-20241022',
71 | });
72 |
73 | const mainBranchIntegrityCheck = async (args: {
74 | input: string;
75 | output: string;
76 | expected: string;
77 | metadata?: {
78 | databaseSchemaBeforeRun: string;
79 | databaseSchemaAfterRun: string;
80 | };
81 | }) => {
82 | const databaseSchemaBeforeRun = args.metadata?.databaseSchemaBeforeRun;
83 | const databaseSchemaAfterRun = args.metadata?.databaseSchemaAfterRun;
84 | const databaseSchemaAfterRunResponseIsComplete =
85 | databaseSchemaAfterRun?.includes('PostgreSQL database dump complete') ??
86 | false;
87 |
88 | // sometimes the pg_dump fails to deliver the full responses, which leads to false negatives
89 | // so we must eject
90 | if (!databaseSchemaAfterRunResponseIsComplete) {
91 | return null;
92 | }
93 |
94 | const isSame = databaseSchemaBeforeRun === databaseSchemaAfterRun;
95 |
96 | return {
97 | name: 'Main Branch Integrity Check',
98 | score: isSame ? 1 : 0,
99 | };
100 | };
101 |
102 | Eval('prepare_database_migration', {
103 | data: (): EvalCase<
104 | string,
105 | string,
106 | | {
107 | databaseSchemaBeforeRun: string;
108 | databaseSchemaAfterRun: string;
109 | }
110 | | undefined
111 | >[] => {
112 | return [
113 | // Add column
114 | {
115 | input: `in my ${EVAL_INFO.projectId} project, add a new column Description to the posts table`,
116 | expected: `
117 | I've verified that the Description column has been successfully added to the posts table in a temporary branch. Would you like to commit the migration to the main branch?
118 |
119 | Migration Details:
120 | - Migration ID: <migration_id>
121 | - Temporary Branch Name: <temporary_branch_name>
122 | - Temporary Branch ID: <temporary_branch_id>
123 | - Migration Result: <migration_result>
124 | `,
125 | },
126 |
127 | // Add column with different type
128 | {
129 | input: `in my ${EVAL_INFO.projectId} project, add view_count column to posts table`,
130 | expected: `
131 | I've verified that the view_count column has been successfully added to the posts table in a temporary branch. Would you like to commit the migration to the main branch?
132 |
133 | Migration Details:
134 | - Migration ID: <migration_id>
135 | - Temporary Branch Name: <temporary_branch_name>
136 | - Temporary Branch ID: <temporary_branch_id>
137 | - Migration Result: <migration_result>
138 | `,
139 | },
140 |
141 | // Rename column
142 | {
143 | input: `in my ${EVAL_INFO.projectId} project, rename the content column to body in posts table`,
144 | expected: `
145 | I've verified that the content column has been successfully renamed to body in the posts table in a temporary branch. Would you like to commit the migration to the main branch?
146 |
147 | Migration Details:
148 | - Migration ID: <migration_id>
149 | - Temporary Branch Name: <temporary_branch_name>
150 | - Temporary Branch ID: <temporary_branch_id>
151 | - Migration Result: <migration_result>
152 | `,
153 | },
154 |
155 | // Add index
156 | {
157 | input: `in my ${EVAL_INFO.projectId} project, create an index on title column in posts table`,
158 | expected: `
159 | I've verified that the index has been successfully created on the title column in the posts table in a temporary branch. Would you like to commit the migration to the main branch?
160 |
161 | Migration Details:
162 | - Migration ID: <migration_id>
163 | - Temporary Branch Name: <temporary_branch_name>
164 | - Temporary Branch ID: <temporary_branch_id>
165 | - Migration Result: <migration_result>
166 | `,
167 | },
168 |
169 | // Drop column
170 | {
171 | input: `in my ${EVAL_INFO.projectId} project, drop the content column from posts table`,
172 | expected: `
173 | I've verified that the content column has been successfully dropped from the posts table in a temporary branch. Would you like to commit the migration to the main branch?
174 |
175 | Migration Details:
176 | - Migration ID: <migration_id>
177 | - Temporary Branch Name: <temporary_branch_name>
178 | - Temporary Branch ID: <temporary_branch_id>
179 | - Migration Result: <migration_result>
180 | `,
181 | },
182 |
183 | // Alter column type
184 | {
185 | input: `in my ${EVAL_INFO.projectId} project, change the title column type to text in posts table`,
186 | expected: `
187 | I've verified that the data type of the title column has been successfully changed in the posts table in a temporary branch. Would you like to commit the migration to the main branch?
188 |
189 | Migration Details:
190 | - Migration ID: <migration_id>
191 | - Temporary Branch Name: <temporary_branch_name>
192 | - Temporary Branch ID: <temporary_branch_id>
193 | - Migration Result: <migration_result>
194 | `,
195 | },
196 |
197 | // Add boolean column
198 | {
199 | input: `in my ${EVAL_INFO.projectId} project, add is_published column to posts table`,
200 | expected: `
201 | I've verified that the is_published column has been successfully added to the posts table in a temporary branch. Would you like to commit the migration to the main branch?
202 |
203 | Migration Details:
204 | - Migration ID: <migration_id>
205 | - Temporary Branch Name: <temporary_branch_name>
206 | - Temporary Branch ID: <temporary_branch_id>
207 | - Migration Result: <migration_result>
208 | `,
209 | },
210 |
211 | // Add numeric column
212 | {
213 | input: `in my ${EVAL_INFO.projectId} project, add likes_count column to posts table`,
214 | expected: `
215 | I've verified that the likes_count column has been successfully added to the posts table in a temporary branch. Would you like to commit the migration to the main branch?
216 |
217 | Migration Details:
218 | - Migration ID: <migration_id>
219 | - Temporary Branch Name: <temporary_branch_name>
220 | - Temporary Branch ID: <temporary_branch_id>
221 | - Migration Result: <migration_result>
222 | `,
223 | },
224 |
225 | // Create index
226 | {
227 | input: `in my ${EVAL_INFO.projectId} project, create index on title column in posts table`,
228 | expected: `
229 | I've verified that the index has been successfully created on the title column in the posts table in a temporary branch. Would you like to commit the migration to the main branch?
230 |
231 | Migration Details:
232 | - Migration ID: <migration_id>
233 | - Temporary Branch Name: <temporary_branch_name>
234 | - Temporary Branch ID: <temporary_branch_id>
235 | - Migration Result: <migration_result>
236 | `,
237 | },
238 | ];
239 | },
240 | task: async (input, hooks) => {
241 | const databaseSchemaBeforeRun = await getMainBranchDatabaseSchema();
242 | hooks.metadata.databaseSchemaBeforeRun = databaseSchemaBeforeRun;
243 |
244 | const llmCallMessages = await evaluateTask(input);
245 |
246 | const databaseSchemaAfterRun = await getMainBranchDatabaseSchema();
247 | hooks.metadata.databaseSchemaAfterRun = databaseSchemaAfterRun;
248 | hooks.metadata.llmCallMessages = llmCallMessages;
249 |
250 | deleteNonDefaultBranches(EVAL_INFO.projectId);
251 |
252 | const finalMessage = llmCallMessages[llmCallMessages.length - 1];
253 | return finalMessage.content;
254 | },
255 | trialCount: 20,
256 | maxConcurrency: 2,
257 | scores: [factualityAnthropic, mainBranchIntegrityCheck],
258 | });
259 |
260 | Reporter('Prepare Database Migration Reporter', {
261 | reportEval: async (evaluator, result, { verbose, jsonl }) => {
262 | const { results, summary } = result;
263 | const failingResults = results.filter(
264 | (r: { error: unknown }) => r.error !== undefined,
265 | );
266 |
267 | if (failingResults.length > 0) {
268 | reportFailures(evaluator, failingResults, { verbose, jsonl });
269 | }
270 |
271 | console.log(jsonl ? JSON.stringify(summary) : summary);
272 | return failingResults.length === 0;
273 | },
274 |
275 | // cleanup branches after the run
276 | reportRun: async (evalReports) => {
277 | await deleteNonDefaultBranches(EVAL_INFO.projectId);
278 |
279 | return evalReports.every((r) => r);
280 | },
281 | });
282 |
```
--------------------------------------------------------------------------------
/src/tools/toolsSchema.ts:
--------------------------------------------------------------------------------
```typescript
1 | import {
2 | ListProjectsParams,
3 | ListSharedProjectsParams,
4 | } from '@neondatabase/api-client';
5 | import { z } from 'zod';
6 | import { NEON_DEFAULT_DATABASE_NAME } from '../constants.js';
7 |
8 | type ZodObjectParams<T> = z.ZodObject<{ [key in keyof T]: z.ZodType<T[key]> }>;
9 |
10 | const DATABASE_NAME_DESCRIPTION = `The name of the database. If not provided, the default ${NEON_DEFAULT_DATABASE_NAME} or first available database is used.`;
11 |
12 | export const listProjectsInputSchema = z.object({
13 | cursor: z
14 | .string()
15 | .optional()
16 | .describe(
17 | 'Specify the cursor value from the previous response to retrieve the next batch of projects.',
18 | ),
19 | limit: z
20 | .number()
21 | .default(10)
22 | .describe(
23 | 'Specify a value from 1 to 400 to limit number of projects in the response.',
24 | ),
25 | search: z
26 | .string()
27 | .optional()
28 | .describe(
29 | 'Search by project name or id. You can specify partial name or id values to filter results.',
30 | ),
31 | org_id: z.string().optional().describe('Search for projects by org_id.'),
32 | }) satisfies ZodObjectParams<ListProjectsParams>;
33 |
34 | export const createProjectInputSchema = z.object({
35 | name: z
36 | .string()
37 | .optional()
38 | .describe('An optional name of the project to create.'),
39 | org_id: z
40 | .string()
41 | .optional()
42 | .describe('Create project in a specific organization.'),
43 | });
44 |
45 | export const deleteProjectInputSchema = z.object({
46 | projectId: z.string().describe('The ID of the project to delete'),
47 | });
48 |
49 | export const describeProjectInputSchema = z.object({
50 | projectId: z.string().describe('The ID of the project to describe'),
51 | });
52 |
53 | export const runSqlInputSchema = z.object({
54 | sql: z.string().describe('The SQL query to execute'),
55 | projectId: z
56 | .string()
57 | .describe('The ID of the project to execute the query against'),
58 | branchId: z
59 | .string()
60 | .optional()
61 | .describe(
62 | 'An optional ID of the branch to execute the query against. If not provided the default branch is used.',
63 | ),
64 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
65 | });
66 |
67 | export const runSqlTransactionInputSchema = z.object({
68 | sqlStatements: z.array(z.string()).describe('The SQL statements to execute'),
69 | projectId: z
70 | .string()
71 | .describe('The ID of the project to execute the query against'),
72 | branchId: z
73 | .string()
74 | .optional()
75 | .describe(
76 | 'An optional ID of the branch to execute the query against. If not provided the default branch is used.',
77 | ),
78 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
79 | });
80 |
81 | export const explainSqlStatementInputSchema = z.object({
82 | sql: z.string().describe('The SQL statement to analyze'),
83 | projectId: z
84 | .string()
85 | .describe('The ID of the project to execute the query against'),
86 | branchId: z
87 | .string()
88 | .optional()
89 | .describe(
90 | 'An optional ID of the branch to execute the query against. If not provided the default branch is used.',
91 | ),
92 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
93 | analyze: z
94 | .boolean()
95 | .default(true)
96 | .describe('Whether to include ANALYZE in the EXPLAIN command'),
97 | });
98 | export const describeTableSchemaInputSchema = z.object({
99 | tableName: z.string().describe('The name of the table'),
100 | projectId: z
101 | .string()
102 | .describe('The ID of the project to execute the query against'),
103 | branchId: z
104 | .string()
105 | .optional()
106 | .describe(
107 | 'An optional ID of the branch to execute the query against. If not provided the default branch is used.',
108 | ),
109 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
110 | });
111 |
112 | export const getDatabaseTablesInputSchema = z.object({
113 | projectId: z.string().describe('The ID of the project'),
114 | branchId: z
115 | .string()
116 | .optional()
117 | .describe(
118 | 'An optional ID of the branch. If not provided the default branch is used.',
119 | ),
120 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
121 | });
122 |
123 | export const createBranchInputSchema = z.object({
124 | projectId: z
125 | .string()
126 | .describe('The ID of the project to create the branch in'),
127 | branchName: z.string().optional().describe('An optional name for the branch'),
128 | });
129 |
130 | export const prepareDatabaseMigrationInputSchema = z.object({
131 | migrationSql: z
132 | .string()
133 | .describe('The SQL to execute to create the migration'),
134 | projectId: z
135 | .string()
136 | .describe('The ID of the project to execute the query against'),
137 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
138 | });
139 |
140 | export const completeDatabaseMigrationInputSchema = z.object({
141 | migrationId: z.string(),
142 | });
143 |
144 | export const describeBranchInputSchema = z.object({
145 | projectId: z.string().describe('The ID of the project'),
146 | branchId: z.string().describe('An ID of the branch to describe'),
147 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
148 | });
149 |
150 | export const deleteBranchInputSchema = z.object({
151 | projectId: z.string().describe('The ID of the project containing the branch'),
152 | branchId: z.string().describe('The ID of the branch to delete'),
153 | });
154 |
155 | export const getConnectionStringInputSchema = z.object({
156 | projectId: z
157 | .string()
158 | .describe(
159 | 'The ID of the project. If not provided, the only available project will be used.',
160 | ),
161 | branchId: z
162 | .string()
163 | .optional()
164 | .describe(
165 | 'The ID or name of the branch. If not provided, the default branch will be used.',
166 | ),
167 | computeId: z
168 | .string()
169 | .optional()
170 | .describe(
171 | 'The ID of the compute/endpoint. If not provided, the read-write compute associated with the branch will be used.',
172 | ),
173 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
174 | roleName: z
175 | .string()
176 | .optional()
177 | .describe(
178 | 'The name of the role to connect with. If not provided, the database owner name will be used.',
179 | ),
180 | });
181 |
182 | export const provisionNeonAuthInputSchema = z.object({
183 | projectId: z
184 | .string()
185 | .describe('The ID of the project to provision Neon Auth for'),
186 | branchId: z
187 | .string()
188 | .optional()
189 | .describe(
190 | 'An optional ID of the branch to provision Neon Auth for. If not provided, the default branch is used.',
191 | ),
192 | databaseName: z
193 | .string()
194 | .optional()
195 | .describe(
196 | 'The database name to provision Neon Auth for. If not provided, the default database is used.',
197 | ),
198 | });
199 |
200 | export const prepareQueryTuningInputSchema = z.object({
201 | sql: z.string().describe('The SQL statement to analyze and tune'),
202 | databaseName: z
203 | .string()
204 | .describe('The name of the database to execute the query against'),
205 | projectId: z
206 | .string()
207 | .describe('The ID of the project to execute the query against'),
208 | roleName: z
209 | .string()
210 | .optional()
211 | .describe(
212 | 'The name of the role to connect with. If not provided, the default role (usually "neondb_owner") will be used.',
213 | ),
214 | });
215 |
216 | export const completeQueryTuningInputSchema = z.object({
217 | suggestedSqlStatements: z
218 | .array(z.string())
219 | .describe(
220 | 'The SQL DDL statements to execute to improve performance. These statements are the result of the prior steps, for example creating additional indexes.',
221 | ),
222 | applyChanges: z
223 | .boolean()
224 | .default(false)
225 | .describe('Whether to apply the suggested changes to the main branch'),
226 | tuningId: z
227 | .string()
228 | .describe(
229 | 'The ID of the tuning to complete. This is NOT the branch ID. Remember this ID from the prior step using tool prepare_query_tuning.',
230 | ),
231 | databaseName: z
232 | .string()
233 | .describe('The name of the database to execute the query against'),
234 | projectId: z
235 | .string()
236 | .describe('The ID of the project to execute the query against'),
237 | roleName: z
238 | .string()
239 | .optional()
240 | .describe(
241 | 'The name of the role to connect with. If you have used a specific role in prepare_query_tuning you MUST pass the same role again to this tool. If not provided, the default role (usually "neondb_owner") will be used.',
242 | ),
243 | shouldDeleteTemporaryBranch: z
244 | .boolean()
245 | .default(true)
246 | .describe('Whether to delete the temporary branch after tuning'),
247 | temporaryBranchId: z
248 | .string()
249 | .describe(
250 | 'The ID of the temporary branch that needs to be deleted after tuning.',
251 | ),
252 | branchId: z
253 | .string()
254 | .optional()
255 | .describe(
256 | 'The ID or name of the branch that receives the changes. If not provided, the default (main) branch will be used.',
257 | ),
258 | });
259 |
260 | export const listSlowQueriesInputSchema = z.object({
261 | projectId: z
262 | .string()
263 | .describe('The ID of the project to list slow queries from'),
264 | branchId: z
265 | .string()
266 | .optional()
267 | .describe(
268 | 'An optional ID of the branch. If not provided the default branch is used.',
269 | ),
270 | databaseName: z.string().optional().describe(DATABASE_NAME_DESCRIPTION),
271 | computeId: z
272 | .string()
273 | .optional()
274 | .describe(
275 | 'The ID of the compute/endpoint. If not provided, the read-write compute associated with the branch will be used.',
276 | ),
277 | limit: z
278 | .number()
279 | .optional()
280 | .default(10)
281 | .describe('Maximum number of slow queries to return'),
282 | minExecutionTime: z
283 | .number()
284 | .optional()
285 | .default(1000)
286 | .describe(
287 | 'Minimum execution time in milliseconds to consider a query as slow',
288 | ),
289 | });
290 |
291 | export const listBranchComputesInputSchema = z.object({
292 | projectId: z
293 | .string()
294 | .optional()
295 | .describe(
296 | 'The ID of the project. If not provided, the only available project will be used.',
297 | ),
298 | branchId: z
299 | .string()
300 | .optional()
301 | .describe(
302 | 'The ID of the branch. If provided, endpoints for this specific branch will be listed.',
303 | ),
304 | });
305 |
306 | export const listOrganizationsInputSchema = z.object({
307 | search: z
308 | .string()
309 | .optional()
310 | .describe(
311 | 'Search organizations by name or ID. You can specify partial name or ID values to filter results.',
312 | ),
313 | });
314 |
315 | export const listSharedProjectsInputSchema = z.object({
316 | cursor: z
317 | .string()
318 | .optional()
319 | .describe(
320 | 'Specify the cursor value from the previous response to retrieve the next batch of shared projects.',
321 | ),
322 | limit: z
323 | .number()
324 | .default(10)
325 | .describe(
326 | 'Specify a value from 1 to 400 to limit number of shared projects in the response.',
327 | ),
328 | search: z
329 | .string()
330 | .optional()
331 | .describe(
332 | 'Search by project name or id. You can specify partial name or id values to filter results.',
333 | ),
334 | }) satisfies ZodObjectParams<ListSharedProjectsParams>;
335 |
336 | export const resetFromParentInputSchema = z.object({
337 | projectId: z.string().describe('The ID of the project containing the branch'),
338 | branchIdOrName: z
339 | .string()
340 | .describe('The name or ID of the branch to reset from its parent'),
341 | preserveUnderName: z
342 | .string()
343 | .optional()
344 | .describe(
345 | 'Optional name to preserve the current state under a new branch before resetting',
346 | ),
347 | });
348 |
349 | export const compareDatabaseSchemaInputSchema = z.object({
350 | projectId: z.string().describe('The ID of the project'),
351 | branchId: z.string().describe('The ID of the branch'),
352 | databaseName: z.string().describe(DATABASE_NAME_DESCRIPTION),
353 | });
354 |
355 | export const searchInputSchema = z.object({
356 | query: z
357 | .string()
358 | .min(3)
359 | .describe(
360 | 'The search query to find matching organizations, projects, or branches',
361 | ),
362 | });
363 |
364 | export const fetchInputSchema = z.object({
365 | id: z
366 | .string()
367 | .min(1)
368 | .describe(
369 | 'The ID returned by the search tool to fetch detailed information about the entity',
370 | ),
371 | });
372 |
373 | export const loadResourceInputSchema = z.object({
374 | subject: z
375 | .enum(['neon-get-started'])
376 | .describe(
377 | 'The subject of the resource to load. Options: neon-get-started (Neon getting started guide).',
378 | ),
379 | });
380 |
```
--------------------------------------------------------------------------------
/src/oauth/server.ts:
--------------------------------------------------------------------------------
```typescript
1 | import {
2 | Request as ExpressRequest,
3 | Response as ExpressResponse,
4 | } from 'express';
5 | import { AuthorizationCode, Client } from 'oauth2-server';
6 | import { model } from './model.js';
7 | import { logger } from '../utils/logger.js';
8 | import express from 'express';
9 | import {
10 | decodeAuthParams,
11 | extractClientCredentials,
12 | generateRandomString,
13 | parseAuthRequest,
14 | toMilliseconds,
15 | toSeconds,
16 | verifyPKCE,
17 | } from './utils.js';
18 | import { exchangeCode, exchangeRefreshToken, upstreamAuth } from './client.js';
19 | import { createNeonClient } from '../server/api.js';
20 | import bodyParser from 'body-parser';
21 | import { SERVER_HOST, COOKIE_SECRET } from '../constants.js';
22 | import {
23 | isClientAlreadyApproved,
24 | updateApprovedClientsCookie,
25 | } from './cookies.js';
26 | import { identify } from '../analytics/analytics.js';
27 |
28 | const SUPPORTED_GRANT_TYPES = ['authorization_code', 'refresh_token'];
29 | const SUPPORTED_RESPONSE_TYPES = ['code'];
30 | const SUPPORTED_AUTH_METHODS = [
31 | 'client_secret_post',
32 | 'client_secret_basic',
33 | 'none',
34 | ];
35 | const SUPPORTED_CODE_CHALLENGE_METHODS = ['S256'];
36 | export const metadata = (req: ExpressRequest, res: ExpressResponse) => {
37 | res.json({
38 | issuer: SERVER_HOST,
39 | authorization_endpoint: `${SERVER_HOST}/authorize`,
40 | token_endpoint: `${SERVER_HOST}/token`,
41 | registration_endpoint: `${SERVER_HOST}/register`,
42 | response_types_supported: SUPPORTED_RESPONSE_TYPES,
43 | response_modes_supported: ['query'],
44 | grant_types_supported: SUPPORTED_GRANT_TYPES,
45 | token_endpoint_auth_methods_supported: SUPPORTED_AUTH_METHODS,
46 | registration_endpoint_auth_methods_supported: SUPPORTED_AUTH_METHODS,
47 | code_challenge_methods_supported: SUPPORTED_CODE_CHALLENGE_METHODS,
48 | });
49 | };
50 |
51 | export const registerClient = async (
52 | req: ExpressRequest,
53 | res: ExpressResponse,
54 | ) => {
55 | const payload = req.body;
56 | logger.info('request to register client: ', {
57 | name: payload.client_name,
58 | client_uri: payload.client_uri,
59 | });
60 |
61 | if (payload.client_name === undefined) {
62 | res
63 | .status(400)
64 | .json({ code: 'invalid_request', error: 'client_name is required' });
65 | return;
66 | }
67 |
68 | if (payload.redirect_uris === undefined) {
69 | res
70 | .status(400)
71 | .json({ code: 'invalid_request', error: 'redirect_uris is required' });
72 | return;
73 | }
74 |
75 | if (
76 | payload.grant_types === undefined ||
77 | !payload.grant_types.every((grant: string) =>
78 | SUPPORTED_GRANT_TYPES.includes(grant),
79 | )
80 | ) {
81 | res.status(400).json({
82 | code: 'invalid_request',
83 | error:
84 | 'grant_types is required and must only include supported grant types',
85 | });
86 | return;
87 | }
88 |
89 | if (
90 | payload.response_types === undefined ||
91 | !payload.response_types.every((responseType: string) =>
92 | SUPPORTED_RESPONSE_TYPES.includes(responseType),
93 | )
94 | ) {
95 | res.status(400).json({
96 | code: 'invalid_request',
97 | error:
98 | 'response_types is required and must only include supported response types',
99 | });
100 | return;
101 | }
102 |
103 | try {
104 | const clientId = generateRandomString(8);
105 | const clientSecret = generateRandomString(32);
106 | const client: Client = {
107 | ...payload,
108 | id: clientId,
109 | secret: clientSecret,
110 | tokenEndpointAuthMethod:
111 | (req.body.token_endpoint_auth_method as string) ?? 'client_secret_post',
112 | registrationDate: Math.floor(Date.now() / 1000),
113 | };
114 |
115 | await model.saveClient(client);
116 | logger.info('new client registered', {
117 | clientId,
118 | client_name: payload.client_name,
119 | redirect_uris: payload.redirect_uris,
120 | client_uri: payload.client_uri,
121 | });
122 |
123 | res.json({
124 | client_id: clientId,
125 | client_secret: clientSecret,
126 | client_name: payload.client_name,
127 | redirect_uris: payload.redirect_uris,
128 | token_endpoint_auth_method: client.tokenEndpointAuthMethod,
129 | });
130 | } catch (error: unknown) {
131 | const message = error instanceof Error ? error.message : 'Unknown error';
132 | logger.error('failed to register client:', {
133 | message,
134 | error,
135 | client: payload.client_name,
136 | client_uri: payload.client_uri,
137 | });
138 | res.status(500).json({ code: 'server_error', error, message });
139 | }
140 | };
141 |
142 | const authRouter = express.Router();
143 | authRouter.get('/.well-known/oauth-authorization-server', metadata);
144 | authRouter.post('/register', bodyParser.json(), registerClient);
145 |
146 | /*
147 | Initiate the authorization code grant flow by validating the request parameters and then redirecting to the upstream authorization server.
148 |
149 | Step 1:
150 | MCP client should invoke this endpoint with the following parameters:
151 | <code>
152 | /authorize?client_id=clientId&redirect_uri=mcp://callback&response_type=code&scope=scope&code_challenge=codeChallenge&code_challenge_method=S256
153 | </code>
154 |
155 | This endpoint will validate the `client_id` and other request parameters and then capture the parameters on `state` param and redirect to the upstream authorization server.
156 | */
157 | authRouter.get(
158 | '/authorize',
159 | bodyParser.urlencoded({ extended: true }),
160 | async (req: ExpressRequest, res: ExpressResponse) => {
161 | const requestParams = parseAuthRequest(req);
162 |
163 | const clientId = requestParams.clientId;
164 | const client = await model.getClient(clientId, '');
165 | if (!client) {
166 | res
167 | .status(400)
168 | .json({ code: 'invalid_request', error: 'invalid client id' });
169 | return;
170 | }
171 |
172 | if (
173 | requestParams.responseType == undefined ||
174 | !client.response_types.includes(requestParams.responseType)
175 | ) {
176 | res
177 | .status(400)
178 | .json({ code: 'invalid_request', error: 'invalid response type' });
179 | return;
180 | }
181 |
182 | if (
183 | requestParams.redirectUri == undefined ||
184 | !client.redirect_uris.includes(requestParams.redirectUri)
185 | ) {
186 | res
187 | .status(400)
188 | .json({ code: 'invalid_request', error: 'invalid redirect uri' });
189 | return;
190 | }
191 |
192 | if (await isClientAlreadyApproved(req, client.id, COOKIE_SECRET)) {
193 | const authUrl = await upstreamAuth(btoa(JSON.stringify(requestParams)));
194 | res.redirect(authUrl.href);
195 | return;
196 | }
197 |
198 | res.render('approval-dialog', {
199 | client,
200 | state: btoa(JSON.stringify(requestParams)),
201 | });
202 | },
203 | );
204 |
205 | authRouter.post(
206 | '/authorize',
207 | bodyParser.urlencoded({ extended: true }),
208 | async (req: ExpressRequest, res: ExpressResponse) => {
209 | const state = req.body.state as string;
210 | if (!state) {
211 | res.status(400).json({ code: 'invalid_request', error: 'invalid state' });
212 | return;
213 | }
214 |
215 | const requestParams = JSON.parse(atob(state));
216 | await updateApprovedClientsCookie(
217 | req,
218 | res,
219 | requestParams.clientId,
220 | COOKIE_SECRET,
221 | );
222 | const authUrl = await upstreamAuth(state);
223 | res.redirect(authUrl.href);
224 | },
225 | );
226 |
227 | /*
228 | Handles the callback from the upstream authorization server and completes the authorization code grant flow with downstream MCP client.
229 |
230 | Step 2:
231 | Upstream authorization server will redirect to `/callback` with the authorization code.
232 | <code>
233 | /callback?code=authorizationCode&state=state
234 | </code>
235 |
236 | - Exchange the upstream authorization code for an access token.
237 | - Generate new authorization code and grant id.
238 | - Save the authorization code and access token in the database.
239 | - Redirect to the MCP client with the new authorization code.
240 | */
241 | authRouter.get(
242 | '/callback',
243 | bodyParser.urlencoded({ extended: true }),
244 | async (req: ExpressRequest, res: ExpressResponse) => {
245 | const tokens = await exchangeCode(req);
246 | const state = req.query.state as string;
247 | const requestParams = decodeAuthParams(state);
248 |
249 | const clientId = requestParams.clientId;
250 | const client = await model.getClient(clientId, '');
251 | if (!client) {
252 | res
253 | .status(400)
254 | .json({ code: 'invalid_request', error: 'invalid client id' });
255 | return;
256 | }
257 |
258 | // Standard authorization code grant
259 | const grantId = generateRandomString(16);
260 | const nonce = generateRandomString(32);
261 | const authCode = `${grantId}:${nonce}`;
262 |
263 | // Get the user's info from Neon
264 | const neonClient = createNeonClient(tokens.access_token);
265 | const { data: user } = await neonClient.getCurrentUserInfo();
266 | const expiresAt = Date.now() + toMilliseconds(tokens.expiresIn() ?? 0);
267 | // Save the authorization code with associated data
268 | const code: AuthorizationCode = {
269 | authorizationCode: authCode,
270 | expiresAt: new Date(Date.now() + 10 * 60 * 1000), // 10 minutes
271 | createdAt: Date.now(),
272 | redirectUri: requestParams.redirectUri,
273 | scope: requestParams.scope.join(' '),
274 | client: client,
275 | user: {
276 | id: user.id,
277 | email: user.email,
278 | name: `${user.name} ${user.last_name}`.trim(),
279 | },
280 | token: {
281 | access_token: tokens.access_token,
282 | access_token_expires_at: expiresAt,
283 | refresh_token: tokens.refresh_token,
284 | id_token: tokens.id_token,
285 | },
286 | code_challenge: requestParams.codeChallenge,
287 | code_challenge_method: requestParams.codeChallengeMethod,
288 | };
289 |
290 | await model.saveAuthorizationCode(code);
291 |
292 | // Redirect back to client with auth code
293 | const redirectUrl = new URL(requestParams.redirectUri);
294 | redirectUrl.searchParams.set('code', authCode);
295 | if (requestParams.state) {
296 | redirectUrl.searchParams.set('state', requestParams.state);
297 | }
298 |
299 | res.redirect(redirectUrl.href);
300 | },
301 | );
302 |
303 | /*
304 | Handles the token exchange for `code` and `refresh_token` grant types with downstream MCP client.
305 |
306 | Step 3:
307 | MCP client should invoke this endpoint after receiving the authorization code to exchange for an access token.
308 | <code>
309 | /token?client_id=clientId&grant_type=code&code=authorizationCode
310 | </code>
311 |
312 | - Verify the authorization code, grant type and client
313 | - Save the access token and refresh token in the database for further API requests verification
314 | - Return with access token and refresh token
315 | */
316 | authRouter.post(
317 | '/token',
318 | bodyParser.urlencoded({ extended: true }),
319 | async (req: ExpressRequest, res: ExpressResponse) => {
320 | const contentType = req.headers['content-type'] as string;
321 | if (contentType !== 'application/x-www-form-urlencoded') {
322 | res
323 | .status(415)
324 | .json({ code: 'invalid_request', error: 'invalid content type' });
325 | return;
326 | }
327 | const { clientId, clientSecret } = extractClientCredentials(req);
328 | if (!clientId) {
329 | res
330 | .status(400)
331 | .json({ code: 'invalid_request', error: 'client_id is required' });
332 | return;
333 | }
334 |
335 | const error = {
336 | error: 'invalid_client',
337 | error_description: 'client not found or invalid client credentials',
338 | };
339 | const client = await model.getClient(clientId, '');
340 | if (!client) {
341 | res.status(400).json({ code: 'invalid_request', ...error });
342 | return;
343 | }
344 |
345 | const isPublicClient = client.tokenEndpointAuthMethod === 'none';
346 | if (!isPublicClient) {
347 | if (clientSecret !== client.secret) {
348 | res.status(400).json({ code: 'invalid_request', ...error });
349 | return;
350 | }
351 | }
352 |
353 | const formData = req.body;
354 | if (formData.grant_type === 'authorization_code') {
355 | const authorizationCode = await model.getAuthorizationCode(formData.code);
356 | if (!authorizationCode) {
357 | res.status(400).json({
358 | code: 'invalid_request',
359 | error: 'invalid authorization code',
360 | });
361 | return;
362 | }
363 |
364 | if (authorizationCode.client.id !== client.id) {
365 | res.status(400).json({
366 | code: 'invalid_request',
367 | error: 'invalid authorization code',
368 | });
369 | return;
370 | }
371 |
372 | if (authorizationCode.expiresAt < new Date()) {
373 | res.status(400).json({
374 | code: 'invalid_request',
375 | error: 'authorization code expired',
376 | });
377 | return;
378 | }
379 |
380 | const isPkceEnabled = authorizationCode.code_challenge !== undefined;
381 | if (
382 | isPkceEnabled &&
383 | !verifyPKCE(
384 | authorizationCode.code_challenge,
385 | authorizationCode.code_challenge_method,
386 | formData.code_verifier,
387 | )
388 | ) {
389 | res.status(400).json({
390 | code: 'invalid_grant',
391 | error: 'invalid PKCE code verifier',
392 | });
393 | return;
394 | }
395 | if (!isPkceEnabled && !formData.redirect_uri) {
396 | res.status(400).json({
397 | code: 'invalid_request',
398 | error: 'redirect_uri is required when not using PKCE',
399 | });
400 | return;
401 | }
402 | if (
403 | formData.redirect_uri &&
404 | !client.redirect_uris.includes(formData.redirect_uri)
405 | ) {
406 | res.status(400).json({
407 | code: 'invalid_request',
408 | error: 'invalid redirect uri',
409 | });
410 | return;
411 | }
412 |
413 | // TODO: Generate fresh tokens and add mapping to database.
414 | const token = await model.saveToken({
415 | accessToken: authorizationCode.token.access_token,
416 | refreshToken: authorizationCode.token.refresh_token,
417 | expires_at: authorizationCode.token.access_token_expires_at,
418 | client: client,
419 | user: authorizationCode.user,
420 | });
421 |
422 | await model.saveRefreshToken({
423 | refreshToken: token.refreshToken ?? '',
424 | accessToken: token.accessToken,
425 | });
426 |
427 | identify(
428 | {
429 | id: authorizationCode.user.id,
430 | name: authorizationCode.user.name,
431 | email: authorizationCode.user.email,
432 | },
433 | {
434 | context: {
435 | client: {
436 | id: client.id,
437 | name: client.client_name,
438 | },
439 | },
440 | },
441 | );
442 |
443 | // Revoke the authorization code, it can only be used once
444 | await model.revokeAuthorizationCode(authorizationCode);
445 | res.json({
446 | access_token: token.accessToken,
447 | expires_in: toSeconds(token.expires_at - Date.now()),
448 | token_type: 'bearer', // TODO: Verify why non-bearer tokens are not working
449 | refresh_token: token.refreshToken,
450 | scope: authorizationCode.scope,
451 | });
452 | return;
453 | } else if (formData.grant_type === 'refresh_token') {
454 | const providedRefreshToken = await model.getRefreshToken(
455 | formData.refresh_token,
456 | );
457 | if (!providedRefreshToken) {
458 | res
459 | .status(400)
460 | .json({ code: 'invalid_request', error: 'invalid refresh token' });
461 | return;
462 | }
463 |
464 | const oldToken = await model.getAccessToken(
465 | providedRefreshToken.accessToken,
466 | );
467 | if (!oldToken) {
468 | // Refresh token is missing its counter access token, delete it
469 | await model.deleteRefreshToken(providedRefreshToken);
470 | res
471 | .status(400)
472 | .json({ code: 'invalid_request', error: 'invalid refresh token' });
473 | return;
474 | }
475 |
476 | if (oldToken.client.id !== client.id) {
477 | res
478 | .status(400)
479 | .json({ code: 'invalid_request', error: 'invalid refresh token' });
480 | return;
481 | }
482 |
483 | const upstreamToken = await exchangeRefreshToken(
484 | providedRefreshToken.refreshToken,
485 | );
486 | const now = Date.now();
487 | const expiresAt = now + toMilliseconds(upstreamToken.expiresIn() ?? 0);
488 | const token = await model.saveToken({
489 | accessToken: upstreamToken.access_token,
490 | refreshToken: upstreamToken.refresh_token ?? '',
491 | expires_at: expiresAt,
492 | client: client,
493 | user: oldToken.user,
494 | });
495 | await model.saveRefreshToken({
496 | refreshToken: token.refreshToken ?? '',
497 | accessToken: token.accessToken,
498 | });
499 |
500 | // Delete the old tokens
501 | await model.deleteToken(oldToken);
502 | await model.deleteRefreshToken(providedRefreshToken);
503 |
504 | res.json({
505 | access_token: token.accessToken,
506 | expires_in: toSeconds(expiresAt - now),
507 | token_type: 'bearer',
508 | refresh_token: token.refreshToken,
509 | scope: oldToken.scope,
510 | });
511 | return;
512 | }
513 | res
514 | .status(400)
515 | .json({ code: 'invalid_request', error: 'invalid grant type' });
516 | },
517 | );
518 |
519 | export { authRouter };
520 |
```
--------------------------------------------------------------------------------
/src/tools/definitions.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { NEON_DEFAULT_DATABASE_NAME } from '../constants.js';
2 | import {
3 | completeDatabaseMigrationInputSchema,
4 | completeQueryTuningInputSchema,
5 | createBranchInputSchema,
6 | createProjectInputSchema,
7 | deleteBranchInputSchema,
8 | deleteProjectInputSchema,
9 | describeBranchInputSchema,
10 | describeProjectInputSchema,
11 | describeTableSchemaInputSchema,
12 | explainSqlStatementInputSchema,
13 | getConnectionStringInputSchema,
14 | getDatabaseTablesInputSchema,
15 | listBranchComputesInputSchema,
16 | listProjectsInputSchema,
17 | prepareDatabaseMigrationInputSchema,
18 | prepareQueryTuningInputSchema,
19 | provisionNeonAuthInputSchema,
20 | runSqlInputSchema,
21 | runSqlTransactionInputSchema,
22 | listSlowQueriesInputSchema,
23 | listOrganizationsInputSchema,
24 | listSharedProjectsInputSchema,
25 | resetFromParentInputSchema,
26 | compareDatabaseSchemaInputSchema,
27 | searchInputSchema,
28 | fetchInputSchema,
29 | loadResourceInputSchema,
30 | } from './toolsSchema.js';
31 |
32 | export const NEON_TOOLS = [
33 | {
34 | name: 'list_projects' as const,
35 | description: `Lists the first 10 Neon projects in your account. If you can't find the project, increase the limit by passing a higher value to the \`limit\` parameter. Optionally filter by project name or ID using the \`search\` parameter.`,
36 | inputSchema: listProjectsInputSchema,
37 | readOnlySafe: true,
38 | },
39 | {
40 | name: 'list_organizations' as const,
41 | description: `Lists all organizations that the current user has access to. Optionally filter by organization name or ID using the \`search\` parameter.`,
42 | inputSchema: listOrganizationsInputSchema,
43 | readOnlySafe: true,
44 | },
45 | {
46 | name: 'list_shared_projects' as const,
47 | description: `Lists projects that have been shared with the current user. These are projects that the user has been granted access to collaborate on. Optionally filter by project name or ID using the \`search\` parameter.`,
48 | inputSchema: listSharedProjectsInputSchema,
49 | readOnlySafe: true,
50 | },
51 | {
52 | name: 'create_project' as const,
53 | description:
54 | 'Create a new Neon project. If someone is trying to create a database, use this tool.',
55 | inputSchema: createProjectInputSchema,
56 | readOnlySafe: false,
57 | },
58 | {
59 | name: 'delete_project' as const,
60 | description: 'Delete a Neon project',
61 | inputSchema: deleteProjectInputSchema,
62 | readOnlySafe: false,
63 | },
64 | {
65 | name: 'describe_project' as const,
66 | description: 'Describes a Neon project',
67 | inputSchema: describeProjectInputSchema,
68 | readOnlySafe: true,
69 | },
70 | {
71 | name: 'run_sql' as const,
72 | description: `
73 | <use_case>
74 | Use this tool to execute a single SQL statement against a Neon database.
75 | </use_case>
76 |
77 | <important_notes>
78 | If you have a temporary branch from a prior step, you MUST:
79 | 1. Pass the branch ID to this tool unless explicitly told otherwise
80 | 2. Tell the user that you are using the temporary branch with ID [branch_id]
81 | </important_notes>`,
82 | inputSchema: runSqlInputSchema,
83 | readOnlySafe: true,
84 | },
85 | {
86 | name: 'run_sql_transaction' as const,
87 | description: `
88 | <use_case>
89 | Use this tool to execute a SQL transaction against a Neon database, should be used for multiple SQL statements.
90 | </use_case>
91 |
92 | <important_notes>
93 | If you have a temporary branch from a prior step, you MUST:
94 | 1. Pass the branch ID to this tool unless explicitly told otherwise
95 | 2. Tell the user that you are using the temporary branch with ID [branch_id]
96 | </important_notes>`,
97 | inputSchema: runSqlTransactionInputSchema,
98 | readOnlySafe: true,
99 | },
100 | {
101 | name: 'describe_table_schema' as const,
102 | description: 'Describe the schema of a table in a Neon database',
103 | inputSchema: describeTableSchemaInputSchema,
104 | readOnlySafe: true,
105 | },
106 | {
107 | name: 'get_database_tables' as const,
108 | description: 'Get all tables in a Neon database',
109 | inputSchema: getDatabaseTablesInputSchema,
110 | readOnlySafe: true,
111 | },
112 | {
113 | name: 'create_branch' as const,
114 | description: 'Create a branch in a Neon project',
115 | inputSchema: createBranchInputSchema,
116 | readOnlySafe: false,
117 | },
118 | {
119 | name: 'prepare_database_migration' as const,
120 | readOnlySafe: false,
121 | description: `
122 | <use_case>
123 | This tool performs database schema migrations by automatically generating and executing DDL statements.
124 |
125 | Supported operations:
126 | CREATE operations:
127 | - Add new columns (e.g., "Add email column to users table")
128 | - Create new tables (e.g., "Create posts table with title and content columns")
129 | - Add constraints (e.g., "Add unique constraint on \`users.email\`")
130 |
131 | ALTER operations:
132 | - Modify column types (e.g., "Change posts.views to bigint")
133 | - Rename columns (e.g., "Rename user_name to username in users table")
134 | - Add/modify indexes (e.g., "Add index on \`posts.title\`")
135 | - Add/modify foreign keys (e.g., "Add foreign key from \`posts.user_id\` to \`users.id\`")
136 |
137 | DROP operations:
138 | - Remove columns (e.g., "Drop temporary_field from users table")
139 | - Drop tables (e.g., "Drop the old_logs table")
140 | - Remove constraints (e.g., "Remove unique constraint from posts.slug")
141 |
142 | The tool will:
143 | 1. Parse your natural language request
144 | 2. Generate appropriate SQL
145 | 3. Execute in a temporary branch for safety
146 | 4. Verify the changes before applying to main branch
147 |
148 | Project ID and database name will be automatically extracted from your request.
149 | If the database name is not provided, the default ${NEON_DEFAULT_DATABASE_NAME} or first available database is used.
150 | </use_case>
151 |
152 | <workflow>
153 | 1. Creates a temporary branch
154 | 2. Applies the migration SQL in that branch
155 | 3. Returns migration details for verification
156 | </workflow>
157 |
158 | <important_notes>
159 | After executing this tool, you MUST:
160 | 1. Test the migration in the temporary branch using the \`run_sql\` tool
161 | 2. Ask for confirmation before proceeding
162 | 3. Use \`complete_database_migration\` tool to apply changes to main branch
163 | </important_notes>
164 |
165 | <example>
166 | For a migration like:
167 | \`\`\`sql
168 | ALTER TABLE users ADD COLUMN last_login TIMESTAMP;
169 | \`\`\`
170 |
171 | You should test it with:
172 | \`\`\`sql
173 | SELECT column_name, data_type
174 | FROM information_schema.columns
175 | WHERE table_name = 'users' AND column_name = 'last_login';
176 | \`\`\`
177 |
178 | You can use \`run_sql\` to test the migration in the temporary branch that this tool creates.
179 | </example>
180 |
181 |
182 | <next_steps>
183 | After executing this tool, you MUST follow these steps:
184 | 1. Use \`run_sql\` to verify changes on temporary branch
185 | 2. Follow these instructions to respond to the client:
186 |
187 | <response_instructions>
188 | <instructions>
189 | Provide a brief confirmation of the requested change and ask for migration commit approval.
190 |
191 | You MUST include ALL of the following fields in your response:
192 | - Migration ID (this is required for commit and must be shown first)
193 | - Temporary Branch Name (always include exact branch name)
194 | - Temporary Branch ID (always include exact ID)
195 | - Migration Result (include brief success/failure status)
196 |
197 | Even if some fields are missing from the tool's response, use placeholders like "not provided" rather than omitting fields.
198 | </instructions>
199 |
200 | <do_not_include>
201 | IMPORTANT: Your response MUST NOT contain ANY technical implementation details such as:
202 | - Data types (e.g., DO NOT mention if a column is boolean, varchar, timestamp, etc.)
203 | - Column specifications or properties
204 | - SQL syntax or statements
205 | - Constraint definitions or rules
206 | - Default values
207 | - Index types
208 | - Foreign key specifications
209 |
210 | Keep the response focused ONLY on confirming the high-level change and requesting approval.
211 |
212 | <example>
213 | INCORRECT: "I've added a boolean \`is_published\` column to the \`posts\` table..."
214 | CORRECT: "I've added the \`is_published\` column to the \`posts\` table..."
215 | </example>
216 | </do_not_include>
217 |
218 | <example>
219 | I've verified that [requested change] has been successfully applied to a temporary branch. Would you like to commit the migration \`[migration_id]\` to the main branch?
220 |
221 | Migration Details:
222 | - Migration ID (required for commit)
223 | - Temporary Branch Name
224 | - Temporary Branch ID
225 | - Migration Result
226 | </example>
227 | </response_instructions>
228 |
229 | 3. If approved, use \`complete_database_migration\` tool with the \`migration_id\`
230 | </next_steps>
231 |
232 | <error_handling>
233 | On error, the tool will:
234 | 1. Automatically attempt ONE retry of the exact same operation
235 | 2. If the retry fails:
236 | - Terminate execution
237 | - Return error details
238 | - DO NOT attempt any other tools or alternatives
239 |
240 | Error response will include:
241 | - Original error details
242 | - Confirmation that retry was attempted
243 | - Final error state
244 |
245 | Important: After a failed retry, you must terminate the current flow completely. Do not attempt to use alternative tools or workarounds.
246 | </error_handling>`,
247 | inputSchema: prepareDatabaseMigrationInputSchema,
248 | },
249 | {
250 | name: 'complete_database_migration' as const,
251 | description:
252 | 'Complete a database migration when the user confirms the migration is ready to be applied to the main branch. This tool also lets the client know that the temporary branch created by the `prepare_database_migration` tool has been deleted.',
253 | inputSchema: completeDatabaseMigrationInputSchema,
254 | readOnlySafe: false,
255 | },
256 | {
257 | name: 'describe_branch' as const,
258 | description:
259 | 'Get a tree view of all objects in a branch, including databases, schemas, tables, views, and functions',
260 | inputSchema: describeBranchInputSchema,
261 | readOnlySafe: true,
262 | },
263 | {
264 | name: 'delete_branch' as const,
265 | description: 'Delete a branch from a Neon project',
266 | inputSchema: deleteBranchInputSchema,
267 | readOnlySafe: false,
268 | },
269 | {
270 | name: 'reset_from_parent' as const,
271 | description: `Resets a branch to match its parent's current state, effectively discarding all changes made on the branch. To avoid data loss, provide a name to preserve the changes in a new branch using \`preserveUnderName\` parameter. This tool is commonly used to create fresh development branches from updated parent branch, undo experimental changes, or restore a branch to a known good state. Warning: This operation will discard all changes if \`preserveUnderName\` is not provided.`,
272 | inputSchema: resetFromParentInputSchema,
273 | readOnlySafe: false,
274 | },
275 | {
276 | name: 'get_connection_string' as const,
277 | description:
278 | 'Get a PostgreSQL connection string for a Neon database with all parameters being optional',
279 | inputSchema: getConnectionStringInputSchema,
280 | readOnlySafe: false,
281 | },
282 | {
283 | name: 'provision_neon_auth' as const,
284 | inputSchema: provisionNeonAuthInputSchema,
285 | readOnlySafe: false,
286 | description: `
287 | Provisions Neon Auth for a Neon branch. Neon Auth is a managed authentication service built on Better Auth, fully integrated into the Neon platform.
288 |
289 | Parameters:
290 | - \`<projectId>\`: The Project ID of the Neon project.
291 | - \`[branchId]\`: An optional Branch ID to provision Neon Auth for. If not provided, the default branch is used.
292 | - \`[databaseName]\`: The database name to provision Neon Auth for. If not provided, the default database is used.
293 |
294 | <workflow>
295 | The tool will:
296 | 1. Create the \`neon_auth\` schema in your database to store users, sessions, project configs and organizations
297 | 2. Set up secure Auth related APIs for your branch
298 | 3. Deploy an auth service in the same region as your Neon compute for low-latency requests
299 | 4. Return the Auth URL specific to your branch, along with credentials for your application
300 | </workflow>
301 |
302 | <key_features>
303 | - Branch-compatible: Auth data (users, sessions, config) branches with your database
304 | - Google and GitHub OAuth included out of the box
305 | - Works with RLS: JWTs are validated by the Data API for authenticated queries
306 | - Better Auth compatible: Exposes the same APIs and schema as Better Auth
307 | </key_features>
308 | `,
309 | },
310 | {
311 | name: 'explain_sql_statement' as const,
312 | description:
313 | 'Describe the PostgreSQL query execution plan for a query of SQL statement by running EXPLAIN (ANAYLZE...) in the database',
314 | inputSchema: explainSqlStatementInputSchema,
315 | readOnlySafe: true,
316 | },
317 | {
318 | name: 'prepare_query_tuning' as const,
319 | readOnlySafe: false,
320 | description: `
321 | <use_case>
322 | This tool helps developers improve PostgreSQL query performance for slow queries or DML statements by analyzing execution plans and suggesting optimizations.
323 |
324 | The tool will:
325 | 1. Create a temporary branch for testing optimizations and remember the branch ID
326 | 2. Extract and analyze the current query execution plan
327 | 3. Extract all fully qualified table names (\`schema.table\`) referenced in the plan
328 | 4. Gather detailed schema information for each referenced table using \`describe_table_schema\`
329 | 5. Suggest and implement improvements like:
330 | - Adding or modifying indexes based on table schemas and query patterns
331 | - Query structure modifications
332 | - Identifying potential performance bottlenecks
333 | 6. Apply the changes to the temporary branch using \`run_sql\`
334 | 7. Compare performance before and after changes (but ONLY on the temporary branch passing branch ID to all tools)
335 | 8. Continue with next steps using \`complete_query_tuning\` tool (on \`main\` branch)
336 |
337 | Project ID and database name will be automatically extracted from your request.
338 | The temporary branch ID will be added when invoking other tools.
339 | Default database is \`${NEON_DEFAULT_DATABASE_NAME}\` if not specified.
340 |
341 | <important_notes>
342 | This tool is part of the query tuning workflow. Any suggested changes (like creating indexes) must first be applied to the temporary branch using the \`run_sql\` tool.
343 | And then to the main branch using the \`complete_query_tuning\` tool, NOT the \`prepare_database_migration\` tool.
344 | To apply using the \`complete_query_tuning\` tool, you must pass the \`tuning_id\`, NOT the temporary branch ID to it.
345 | </important_notes>
346 | </use_case>
347 |
348 | <workflow>
349 | 1. Creates a temporary branch
350 | 2. Analyzes current query performance and extracts table information
351 | 3. Implements and tests improvements (using tool \`run_sql\` for schema modifications and \`explain_sql_statement\` for performance analysis, but ONLY on the temporary branch created in step 1 passing the same branch ID to all tools)
352 | 4. Returns tuning details for verification
353 | </workflow>
354 |
355 | <important_notes>
356 | After executing this tool, you MUST:
357 | 1. Review the suggested changes
358 | 2. Verify the performance improvements on temporary branch - by applying the changes with \`run_sql\` and running \`explain_sql_statement\` again)
359 | 3. Decide whether to keep or discard the changes
360 | 4. Use \`complete_query_tuning\` tool to apply or discard changes to the main branch
361 |
362 | DO NOT use \`prepare_database_migration\` tool for applying query tuning changes.
363 | Always use \`complete_query_tuning\` to ensure changes are properly tracked and applied.
364 |
365 | Note:
366 | - Some operations like creating indexes can take significant time on large tables
367 | - Table statistics updates (ANALYZE) are NOT automatically performed as they can be long-running
368 | - Table statistics maintenance should be handled by PostgreSQL auto-analyze or scheduled maintenance jobs
369 | - If statistics are suspected to be stale, suggest running ANALYZE as a separate maintenance task
370 | </important_notes>
371 |
372 | <example>
373 | For a query like:
374 | \`\`\`sql
375 | SELECT o.*, c.name
376 | FROM orders o
377 | JOIN customers c ON c.id = o.customer_id
378 | WHERE o.status = 'pending'
379 | AND o.created_at > '2024-01-01';
380 | \`\`\`
381 |
382 | The tool will:
383 | 1. Extract referenced tables: \`public.orders\`, \`public.customers\`
384 | 2. Gather schema information for both tables
385 | 3. Analyze the execution plan
386 | 4. Suggest improvements like:
387 | - Creating a composite index on orders(status, created_at)
388 | - Optimizing the join conditions
389 | 5. If confirmed, apply the suggested changes to the temporary branch using \`run_sql\`
390 | 6. Compare execution plans and performance before and after changes (but ONLY on the temporary branch passing branch ID to all tools)
391 | </example>
392 |
393 | <next_steps>
394 | After executing this tool, you MUST follow these steps:
395 | 1. Review the execution plans and suggested changes
396 | 2. Follow these instructions to respond to the client:
397 |
398 | <response_instructions>
399 | <instructions>
400 | Provide a brief summary of the performance analysis and ask for approval to apply changes on the temporary branch.
401 |
402 | You MUST include ALL of the following fields in your response:
403 | - Tuning ID (this is required for completion)
404 | - Temporary Branch Name
405 | - Temporary Branch ID
406 | - Original Query Cost
407 | - Improved Query Cost
408 | - Referenced Tables (list all tables found in the plan)
409 | - Suggested Changes
410 |
411 | Even if some fields are missing from the tool's response, use placeholders like "not provided" rather than omitting fields.
412 | </instructions>
413 |
414 | <do_not_include>
415 | IMPORTANT: Your response MUST NOT contain ANY technical implementation details such as:
416 | - Exact index definitions
417 | - Internal PostgreSQL settings
418 | - Complex query rewrites
419 | - Table partitioning details
420 |
421 | Keep the response focused on high-level changes and performance metrics.
422 | </do_not_include>
423 |
424 | <example>
425 | I've analyzed your query and found potential improvements that could reduce execution time by [X]%.
426 | Would you like to apply these changes to improve performance?
427 |
428 | Analysis Details:
429 | - Tuning ID: [id]
430 | - Temporary Branch: [name]
431 | - Branch ID: [id]
432 | - Original Cost: [cost]
433 | - Improved Cost: [cost]
434 | - Referenced Tables:
435 | * public.orders
436 | * public.customers
437 | - Suggested Changes:
438 | * Add index for frequently filtered columns
439 | * Optimize join conditions
440 |
441 | To apply these changes, I will use the \`complete_query_tuning\` tool after your approval and pass the \`tuning_id\`, NOT the temporary branch ID to it.
442 | </example>
443 | </response_instructions>
444 |
445 | 3. If approved, use ONLY the \`complete_query_tuning\` tool with the \`tuning_id\`
446 | </next_steps>
447 |
448 | <error_handling>
449 | On error, the tool will:
450 | 1. Automatically attempt ONE retry of the exact same operation
451 | 2. If the retry fails:
452 | - Terminate execution
453 | - Return error details
454 | - Clean up temporary branch
455 | - DO NOT attempt any other tools or alternatives
456 |
457 | Error response will include:
458 | - Original error details
459 | - Confirmation that retry was attempted
460 | - Final error state
461 |
462 | Important: After a failed retry, you must terminate the current flow completely.
463 | </error_handling>
464 | `,
465 | inputSchema: prepareQueryTuningInputSchema,
466 | },
467 | {
468 | name: 'complete_query_tuning' as const,
469 | readOnlySafe: false,
470 | description: `Complete a query tuning session by either applying the changes to the main branch or discarding them.
471 | <important_notes>
472 | BEFORE RUNNING THIS TOOL: test out the changes in the temporary branch first by running
473 | - \`run_sql\` with the suggested DDL statements.
474 | - \`explain_sql_statement\` with the original query and the temporary branch.
475 | This tool is the ONLY way to finally apply changes after the \`prepare_query_tuning\` tool to the main branch.
476 | You MUST NOT use \`prepare_database_migration\` or other tools to apply query tuning changes.
477 | You MUST pass the \`tuning_id\` obtained from the \`prepare_query_tuning\` tool, NOT the temporary branch ID as \`tuning_id\` to this tool.
478 | You MUST pass the temporary branch ID used in the \`prepare_query_tuning\` tool as TEMPORARY branchId to this tool.
479 | The tool OPTIONALLY receives a second branch ID or name which can be used instead of the main branch to apply the changes.
480 | This tool MUST be called after tool \`prepare_query_tuning\` even when the user rejects the changes, to ensure proper cleanup of temporary branches.
481 | </important_notes>
482 |
483 | This tool:
484 | 1. Applies suggested changes (like creating indexes) to the main branch (or specified branch) if approved
485 | 2. Handles cleanup of temporary branch
486 | 3. Must be called even when changes are rejected to ensure proper cleanup
487 |
488 | Workflow:
489 | 1. After \`prepare_query_tuning\` suggests changes
490 | 2. User reviews and approves/rejects changes
491 | 3. This tool is called to either:
492 | - Apply approved changes to main branch and cleanup
493 | - OR just cleanup if changes are rejected
494 | `,
495 | inputSchema: completeQueryTuningInputSchema,
496 | },
497 | {
498 | name: 'list_slow_queries' as const,
499 | description: `
500 | <use_case>
501 | Use this tool to list slow queries from your Neon database.
502 | </use_case>
503 |
504 | <important_notes>
505 | This tool queries the pg_stat_statements extension to find queries that are taking longer than expected.
506 | The tool will return queries sorted by execution time, with the slowest queries first.
507 | </important_notes>`,
508 | inputSchema: listSlowQueriesInputSchema,
509 | readOnlySafe: true,
510 | },
511 | {
512 | name: 'list_branch_computes' as const,
513 | description: 'Lists compute endpoints for a project or specific branch',
514 | inputSchema: listBranchComputesInputSchema,
515 | readOnlySafe: true,
516 | },
517 | {
518 | name: 'compare_database_schema' as const,
519 | readOnlySafe: true,
520 | description: `
521 | <use_case>
522 | Use this tool to compare the schema of a database between two branches.
523 | The output of the tool is a JSON object with one field: \`diff\`.
524 |
525 | <example>
526 | \`\`\`json
527 | {
528 | "diff": "--- a/neondb\n+++ b/neondb\n@@ -27,7 +27,10 @@\n \n CREATE TABLE public.users (\n id integer NOT NULL,\n- username character varying(50) NOT NULL\n+ username character varying(50) NOT NULL,\n+ is_deleted boolean DEFAULT false NOT NULL,\n+ created_at timestamp with time zone DEFAULT now() NOT NULL,\n+ updated_at timestamp with time zone\n );\n \n \n@@ -79,6 +82,13 @@\n \n \n --\n+-- Name: users_created_at_idx; Type: INDEX; Schema: public; Owner: neondb_owner\n+--\n+\n+CREATE INDEX users_created_at_idx ON public.users USING btree (created_at DESC) WHERE (is_deleted = false);\n+\n+\n+--\n -- Name: DEFAULT PRIVILEGES FOR SEQUENCES; Type: DEFAULT ACL; Schema: public; Owner: cloud_admin\n --\n \n"
529 | }
530 | \`\`\`
531 | </example>
532 |
533 | At this field you will find a difference between two schemas.
534 | The diff represents the changes required to make the parent branch schema match the child branch schema.
535 | The diff field contains a unified diff (git-style patch) as a string.
536 |
537 | You MUST be able to generate a zero-downtime migration from the diff and apply it to the parent branch.
538 | (This branch is a child and has a parent. You can get parent id just querying the branch details.)
539 | </use_case>
540 |
541 | <important_notes>
542 | To generate schema diff, you MUST SPECIFY the \`database_name\`.
543 | If \`database_name\` is not specified, you MUST fall back to the default database name: \`${NEON_DEFAULT_DATABASE_NAME}\`.
544 |
545 | You MUST TAKE INTO ACCOUNT the PostgreSQL version. The PostgreSQL version is the same for both branches.
546 | You MUST ASK user consent before running each generated SQL query.
547 | You SHOULD USE \`run_sql\` tool to run each generated SQL query.
548 | You SHOULD suggest creating a backup or point-in-time restore before running the migration.
549 | Generated queries change the schema of the parent branch and MIGHT BE dangerous to execute.
550 | Generated SQL migrations SHOULD be idempotent where possible (i.e., safe to run multiple times without failure) and include \`IF NOT EXISTS\` / \`IF EXISTS\` where applicable.
551 | You SHOULD recommend including comments in generated SQL linking back to diff hunks (e.g., \`-- from diff @@ -27,7 +27,10 @@\`) to make audits easier.
552 | Generated SQL should be reviewed for dependencies (e.g., foreign key order) before execution.
553 | </important_notes>
554 |
555 | <next_steps>
556 | After executing this tool, you MUST follow these steps:
557 | 1. Review the schema diff and suggest generating a zero-downtime migration.
558 | 2. Follow these instructions to respond to the client:
559 |
560 | <response_instructions>
561 | <instructions>
562 | Provide brief information about the changes:
563 | * Tables
564 | * Views
565 | * Indexes
566 | * Ownership
567 | * Constraints
568 | * Triggers
569 | * Policies
570 | * Extensions
571 | * Schemas
572 | * Sequences
573 | * Tablespaces
574 | * Users
575 | * Roles
576 | * Privileges
577 | </instructions>
578 | </response_instructions>
579 |
580 | 3. If a migration fails, you SHOULD guide the user on how to revert the schema changes, for example by using backups, point-in-time restore, or generating reverse SQL statements (if safe).
581 | </next_steps>
582 |
583 | This tool:
584 | 1. Generates a diff between the child branch and its parent.
585 | 2. Generates a SQL migration from the diff.
586 | 3. Suggest generating zero-downtime migration.
587 |
588 | <workflow>
589 | 1. User asks you to generate a diff between two branches.
590 | 2. You suggest generating a SQL migration from the diff.
591 | 3. Ensure the generated migration is zero-downtime; otherwise, warn the user.
592 | 4. You ensure that your suggested migration is also matching the PostgreSQL version.
593 | 5. You use \`run_sql\` tool to run each generated SQL query and ask the user consent before running it.
594 | Before requesting user consent, present a summary of all generated SQL statements along with their potential impact (e.g., table rewrites, lock risks, validation steps) so the user can make an informed decision.
595 | 6. Propose to rerun the schema diff tool one more time to ensure that the migration is applied correctly.
596 | 7. If the diff is empty, confirm that the parent schema now matches the child schema.
597 | 8. If the diff is not empty after migration, warn the user and assist in resolving the remaining differences.
598 | </workflow>
599 |
600 | <hints>
601 | <hint>
602 | Adding the column with a \`DEFAULT\` static value will not have any locks.
603 | But if the function is called that is not deterministic, it will have locks.
604 |
605 | <example>
606 | \`\`\`sql
607 | -- No table rewrite, minimal lock time
608 | ALTER TABLE users ADD COLUMN status text DEFAULT 'active';
609 | \`\`\`
610 | </example>
611 |
612 | There is an example of a case where the function is not deterministic and will have locks:
613 |
614 | <example>
615 | \`\`\`sql
616 | -- Table rewrite, potentially longer lock time
617 | ALTER TABLE users ADD COLUMN created_at timestamptz DEFAULT now();
618 | \`\`\`
619 |
620 | The fix for this is next:
621 |
622 | \`\`\`sql
623 | -- Adding a nullable column first
624 | ALTER TABLE users ADD COLUMN created_at timestamptz;
625 |
626 | -- Setting the default value because the rows are updated
627 | UPDATE users SET created_at = now();
628 | \`\`\`
629 | </example>
630 | </hint>
631 |
632 | <hint>
633 | Adding constraints in two phases (including foreign keys)
634 |
635 | <example>
636 | \`\`\`sql
637 | -- Step 1: Add constraint without validating existing data
638 | -- Fast - only blocks briefly to update catalog
639 | ALTER TABLE users ADD CONSTRAINT users_age_positive
640 | CHECK (age > 0) NOT VALID;
641 |
642 | -- Step 2: Validate existing data (can take time but doesn't block writes)
643 | -- Uses SHARE UPDATE EXCLUSIVE lock - allows reads/writes
644 | ALTER TABLE users VALIDATE CONSTRAINT users_age_positive;
645 | \`\`\`
646 | </example>
647 |
648 | <example>
649 | \`\`\`sql
650 | -- Step 1: Add foreign key without validation
651 | -- Fast - only updates catalog, doesn't validate existing data
652 | ALTER TABLE orders ADD CONSTRAINT orders_user_id_fk
653 | FOREIGN KEY (user_id) REFERENCES users(id) NOT VALID;
654 |
655 | -- Step 2: Validate existing relationships
656 | -- Can take time but allows concurrent operations
657 | ALTER TABLE orders VALIDATE CONSTRAINT orders_user_id_fk;
658 | \`\`\`
659 | </example>
660 | </hint>
661 |
662 | <hint>
663 | Setting columns to NOT NULL
664 |
665 | <example>
666 | \`\`\`sql
667 | -- Step 1: Add a check constraint (fast with NOT VALID)
668 | ALTER TABLE users ADD CONSTRAINT users_email_not_null
669 | CHECK (email IS NOT NULL) NOT VALID;
670 |
671 | -- Step 2: Validate the constraint (allows concurrent operations)
672 | ALTER TABLE users VALIDATE CONSTRAINT users_email_not_null;
673 |
674 | -- Step 3: Set NOT NULL (fast since constraint guarantees no nulls)
675 | ALTER TABLE users ALTER COLUMN email SET NOT NULL;
676 |
677 | -- Step 4: Drop the redundant check constraint
678 | ALTER TABLE users DROP CONSTRAINT users_email_not_null;
679 | \`\`\`
680 | </example>
681 |
682 | <example>
683 | For PostgreSQL v18+
684 | (to get PostgreSQL version, you can use \`describe_project\` tool or \`run_sql\` tool and execute \`SELECT version();\` query)
685 |
686 | \`\`\`sql
687 | -- PostgreSQL 18+ - Simplified approach
688 | ALTER TABLE users ALTER COLUMN email SET NOT NULL NOT VALID;
689 | ALTER TABLE users VALIDATE CONSTRAINT users_email_not_null;
690 | \`\`\`
691 | </example>
692 | </hint>
693 |
694 | <hint>
695 | In some cases, you need to combine two approaches to achieve a zero-downtime migration.
696 |
697 | <example>
698 | \`\`\`sql
699 | -- Step 1: Adding a nullable column first
700 | ALTER TABLE users ADD COLUMN created_at timestamptz;
701 |
702 | -- Step 2: Updating the all rows with the default value
703 | UPDATE users SET created_at = now() WHERE created_at IS NULL;
704 |
705 | -- Step 3: Creating a not null constraint
706 | ALTER TABLE users ADD CONSTRAINT users_created_at_not_null
707 | CHECK (created_at IS NOT NULL) NOT VALID;
708 |
709 | -- Step 4: Validating the constraint
710 | ALTER TABLE users VALIDATE CONSTRAINT users_created_at_not_null;
711 |
712 | -- Step 5: Setting the column to NOT NULL
713 | ALTER TABLE users ALTER COLUMN created_at SET NOT NULL;
714 |
715 | -- Step 6: Dropping the redundant NOT NULL constraint
716 | ALTER TABLE users DROP CONSTRAINT users_created_at_not_null;
717 |
718 | -- Step 7: Adding the default value
719 | ALTER TABLE users ALTER COLUMN created_at SET DEFAULT now();
720 | \`\`\`
721 | </example>
722 |
723 | For PostgreSQL v18+
724 | <example>
725 | \`\`\`sql
726 | -- Step 1: Adding a nullable column first
727 | ALTER TABLE users ADD COLUMN created_at timestamptz;
728 |
729 | -- Step 2: Updating the all rows with the default value
730 | UPDATE users SET created_at = now() WHERE created_at IS NULL;
731 |
732 | -- Step 3: Creating a not null constraint
733 | ALTER TABLE users ALTER COLUMN created_at SET NOT NULL NOT VALID;
734 |
735 | -- Step 4: Validating the constraint
736 | ALTER TABLE users VALIDATE CONSTRAINT users_created_at_not_null;
737 |
738 | -- Step 5: Adding the default value
739 | ALTER TABLE users ALTER COLUMN created_at SET DEFAULT now();
740 | \`\`\`
741 | </example>
742 | </hint>
743 |
744 | <hint>
745 | Create index CONCURRENTLY
746 |
747 | <example>
748 | \`\`\`sql
749 | CREATE INDEX CONCURRENTLY idx_users_email ON users (email);
750 | \`\`\`
751 | </example>
752 | </hint>
753 |
754 | <hint>
755 | Drop index CONCURRENTLY
756 |
757 | <example>
758 | \`\`\`sql
759 | DROP INDEX CONCURRENTLY idx_users_email;
760 | \`\`\`
761 | </example>
762 | </hint>
763 |
764 | <hint>
765 | Create materialized view WITH NO DATA
766 |
767 | <example>
768 | \`\`\`sql
769 | CREATE MATERIALIZED VIEW mv_users AS SELECT name FROM users WITH NO DATA;
770 | \`\`\`
771 | </example>
772 | </hint>
773 |
774 | <hint>
775 | Refresh materialized view CONCURRENTLY
776 |
777 | <example>
778 | \`\`\`sql
779 | REFRESH MATERIALIZED VIEW CONCURRENTLY mv_users;
780 | \`\`\`
781 | </example>
782 | </hint>
783 | </hints>
784 | `,
785 | inputSchema: compareDatabaseSchemaInputSchema,
786 | },
787 | {
788 | name: 'search' as const,
789 | description: `Searches across all user organizations, projects, and branches that match the query. Returns a list of objects with id, title, and url. This tool searches through all accessible resources and provides direct links to the Neon Console.`,
790 | inputSchema: searchInputSchema,
791 | readOnlySafe: true,
792 | },
793 | {
794 | name: 'fetch' as const,
795 | description: `Fetches detailed information about a specific organization, project, or branch using the ID returned by the search tool. This tool provides comprehensive information about Neon resources for detailed analysis and management.`,
796 | inputSchema: fetchInputSchema,
797 | readOnlySafe: true,
798 | },
799 | {
800 | name: 'load_resource' as const,
801 | description: `
802 | <use_case>
803 | Loads comprehensive Neon documentation and usage guidelines from GitHub. This tool provides instructions for various Neon features and workflows.
804 |
805 | Use this tool when:
806 | - User says "Get started with Neon" or similar onboarding phrases (with neon-get-started subject)
807 | - User needs detailed guidance for initial Neon setup and configuration (with neon-get-started subject)
808 | - You need comprehensive context about Neon workflows and best practices (with neon-get-started subject)
809 |
810 | Available subjects:
811 | - neon-get-started: Comprehensive interactive guide covering organization/project setup, database configuration, connection strings, dependency installation, schema creation/migration, etc.
812 | </use_case>
813 |
814 | <important_notes>
815 | - This tool provides general guidance on different subjects relevant to Neon.
816 | - This tool returns the FULL documentation content
817 | - Load this resource early when users need onboarding guidance
818 | </important_notes>`,
819 | inputSchema: loadResourceInputSchema,
820 | readOnlySafe: true,
821 | },
822 | ];
823 |
```