This is page 2 of 2. Use http://codebase.md/neondatabase-labs/mcp-server-neon?page={x} to view the full context.
# Directory Structure
```
├── .bun-version
├── .dockerignore
├── .env.example
├── .github
│ └── workflows
│ ├── koyeb-preview.yml
│ ├── koyeb-prod.yml
│ └── pr.yml
├── .gitignore
├── .npmrc
├── .nvmrc
├── .prettierignore
├── .prettierrc
├── bun.lock
├── CHANGELOG.md
├── Dockerfile
├── eslint.config.js
├── landing
│ ├── .gitignore
│ ├── app
│ │ ├── globals.css
│ │ ├── layout.tsx
│ │ └── page.tsx
│ ├── components
│ │ ├── CodeSnippet.tsx
│ │ ├── CopyableUrl.tsx
│ │ ├── DescriptionItem.tsx
│ │ ├── ExternalIcon.tsx
│ │ ├── ExternalLink.tsx
│ │ ├── Header.tsx
│ │ ├── Introduction.tsx
│ │ ├── ThemeProvider.tsx
│ │ └── ui
│ │ ├── accordion.tsx
│ │ ├── alert.tsx
│ │ └── button.tsx
│ ├── components.json
│ ├── eslint.config.mjs
│ ├── icons
│ │ ├── github.svg
│ │ └── neon.svg
│ ├── lib
│ │ ├── description.ts
│ │ └── utils.ts
│ ├── next.config.ts
│ ├── package-lock.json
│ ├── package.json
│ ├── postcss.config.mjs
│ ├── public
│ │ └── favicon.ico
│ ├── README.md
│ └── tsconfig.json
├── LICENSE
├── mcp-client
│ ├── .env.example
│ ├── package-lock.json
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ ├── bin.ts
│ │ ├── cli-client.ts
│ │ ├── index.ts
│ │ ├── logger.ts
│ │ └── neon-cli-client.ts
│ └── tsconfig.json
├── package-lock.json
├── package.json
├── public
│ └── logo.png
├── PUBLISH.md
├── README.md
├── remote.Dockerfile
├── scripts
│ └── before-publish.ts
├── src
│ ├── analytics
│ │ └── analytics.ts
│ ├── constants.ts
│ ├── describeUtils.ts
│ ├── index.ts
│ ├── initConfig.ts
│ ├── oauth
│ │ ├── client.ts
│ │ ├── cookies.ts
│ │ ├── kv-store.ts
│ │ ├── model.ts
│ │ ├── server.ts
│ │ └── utils.ts
│ ├── resources.ts
│ ├── sentry
│ │ ├── instrument.ts
│ │ └── utils.ts
│ ├── server
│ │ ├── api.ts
│ │ ├── errors.ts
│ │ └── index.ts
│ ├── tools
│ │ ├── definitions.ts
│ │ ├── handlers
│ │ │ └── neon-auth.ts
│ │ ├── index.ts
│ │ ├── state.ts
│ │ ├── tools.ts
│ │ ├── toolsSchema.ts
│ │ ├── types.ts
│ │ └── utils.ts
│ ├── tools-evaluations
│ │ ├── evalUtils.ts
│ │ └── prepare-database-migration.eval.ts
│ ├── transports
│ │ ├── sse-express.ts
│ │ ├── stdio.ts
│ │ └── stream.ts
│ ├── types
│ │ ├── auth.ts
│ │ ├── context.ts
│ │ └── express.d.ts
│ ├── utils
│ │ ├── logger.ts
│ │ └── polyfills.ts
│ └── views
│ ├── approval-dialog.pug
│ └── styles.css
├── tsconfig.json
└── tsconfig.test.json
```
# Files
--------------------------------------------------------------------------------
/src/oauth/server.ts:
--------------------------------------------------------------------------------
```typescript
import {
Request as ExpressRequest,
Response as ExpressResponse,
} from 'express';
import { AuthorizationCode, Client } from 'oauth2-server';
import { model } from './model.js';
import { logger } from '../utils/logger.js';
import express from 'express';
import {
decodeAuthParams,
extractClientCredentials,
generateRandomString,
parseAuthRequest,
toMilliseconds,
toSeconds,
verifyPKCE,
} from './utils.js';
import { exchangeCode, exchangeRefreshToken, upstreamAuth } from './client.js';
import { createNeonClient } from '../server/api.js';
import bodyParser from 'body-parser';
import { SERVER_HOST, COOKIE_SECRET } from '../constants.js';
import {
isClientAlreadyApproved,
updateApprovedClientsCookie,
} from './cookies.js';
import { identify } from '../analytics/analytics.js';
const SUPPORTED_GRANT_TYPES = ['authorization_code', 'refresh_token'];
const SUPPORTED_RESPONSE_TYPES = ['code'];
const SUPPORTED_AUTH_METHODS = [
'client_secret_post',
'client_secret_basic',
'none',
];
const SUPPORTED_CODE_CHALLENGE_METHODS = ['S256'];
export const metadata = (req: ExpressRequest, res: ExpressResponse) => {
res.json({
issuer: SERVER_HOST,
authorization_endpoint: `${SERVER_HOST}/authorize`,
token_endpoint: `${SERVER_HOST}/token`,
registration_endpoint: `${SERVER_HOST}/register`,
response_types_supported: SUPPORTED_RESPONSE_TYPES,
response_modes_supported: ['query'],
grant_types_supported: SUPPORTED_GRANT_TYPES,
token_endpoint_auth_methods_supported: SUPPORTED_AUTH_METHODS,
registration_endpoint_auth_methods_supported: SUPPORTED_AUTH_METHODS,
code_challenge_methods_supported: SUPPORTED_CODE_CHALLENGE_METHODS,
});
};
export const registerClient = async (
req: ExpressRequest,
res: ExpressResponse,
) => {
const payload = req.body;
logger.info('request to register client: ', {
name: payload.client_name,
client_uri: payload.client_uri,
});
if (payload.client_name === undefined) {
res
.status(400)
.json({ code: 'invalid_request', error: 'client_name is required' });
return;
}
if (payload.redirect_uris === undefined) {
res
.status(400)
.json({ code: 'invalid_request', error: 'redirect_uris is required' });
return;
}
if (
payload.grant_types === undefined ||
!payload.grant_types.every((grant: string) =>
SUPPORTED_GRANT_TYPES.includes(grant),
)
) {
res.status(400).json({
code: 'invalid_request',
error:
'grant_types is required and must only include supported grant types',
});
return;
}
if (
payload.response_types === undefined ||
!payload.response_types.every((responseType: string) =>
SUPPORTED_RESPONSE_TYPES.includes(responseType),
)
) {
res.status(400).json({
code: 'invalid_request',
error:
'response_types is required and must only include supported response types',
});
return;
}
try {
const clientId = generateRandomString(8);
const clientSecret = generateRandomString(32);
const client: Client = {
...payload,
id: clientId,
secret: clientSecret,
tokenEndpointAuthMethod:
(req.body.token_endpoint_auth_method as string) ?? 'client_secret_post',
registrationDate: Math.floor(Date.now() / 1000),
};
await model.saveClient(client);
logger.info('new client registered', {
clientId,
client_name: payload.client_name,
redirect_uris: payload.redirect_uris,
client_uri: payload.client_uri,
});
res.json({
client_id: clientId,
client_secret: clientSecret,
client_name: payload.client_name,
redirect_uris: payload.redirect_uris,
token_endpoint_auth_method: client.tokenEndpointAuthMethod,
});
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Unknown error';
logger.error('failed to register client:', {
message,
error,
client: payload.client_name,
client_uri: payload.client_uri,
});
res.status(500).json({ code: 'server_error', error, message });
}
};
const authRouter = express.Router();
authRouter.get('/.well-known/oauth-authorization-server', metadata);
authRouter.post('/register', bodyParser.json(), registerClient);
/*
Initiate the authorization code grant flow by validating the request parameters and then redirecting to the upstream authorization server.
Step 1:
MCP client should invoke this endpoint with the following parameters:
<code>
/authorize?client_id=clientId&redirect_uri=mcp://callback&response_type=code&scope=scope&code_challenge=codeChallenge&code_challenge_method=S256
</code>
This endpoint will validate the `client_id` and other request parameters and then capture the parameters on `state` param and redirect to the upstream authorization server.
*/
authRouter.get(
'/authorize',
bodyParser.urlencoded({ extended: true }),
async (req: ExpressRequest, res: ExpressResponse) => {
const requestParams = parseAuthRequest(req);
const clientId = requestParams.clientId;
const client = await model.getClient(clientId, '');
if (!client) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid client id' });
return;
}
if (
requestParams.responseType == undefined ||
!client.response_types.includes(requestParams.responseType)
) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid response type' });
return;
}
if (
requestParams.redirectUri == undefined ||
!client.redirect_uris.includes(requestParams.redirectUri)
) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid redirect uri' });
return;
}
if (await isClientAlreadyApproved(req, client.id, COOKIE_SECRET)) {
const authUrl = await upstreamAuth(btoa(JSON.stringify(requestParams)));
res.redirect(authUrl.href);
return;
}
res.render('approval-dialog', {
client,
state: btoa(JSON.stringify(requestParams)),
});
},
);
authRouter.post(
'/authorize',
bodyParser.urlencoded({ extended: true }),
async (req: ExpressRequest, res: ExpressResponse) => {
const state = req.body.state as string;
if (!state) {
res.status(400).json({ code: 'invalid_request', error: 'invalid state' });
return;
}
const requestParams = JSON.parse(atob(state));
await updateApprovedClientsCookie(
req,
res,
requestParams.clientId,
COOKIE_SECRET,
);
const authUrl = await upstreamAuth(state);
res.redirect(authUrl.href);
},
);
/*
Handles the callback from the upstream authorization server and completes the authorization code grant flow with downstream MCP client.
Step 2:
Upstream authorization server will redirect to `/callback` with the authorization code.
<code>
/callback?code=authorizationCode&state=state
</code>
- Exchange the upstream authorization code for an access token.
- Generate new authorization code and grant id.
- Save the authorization code and access token in the database.
- Redirect to the MCP client with the new authorization code.
*/
authRouter.get(
'/callback',
bodyParser.urlencoded({ extended: true }),
async (req: ExpressRequest, res: ExpressResponse) => {
const tokens = await exchangeCode(req);
const state = req.query.state as string;
const requestParams = decodeAuthParams(state);
const clientId = requestParams.clientId;
const client = await model.getClient(clientId, '');
if (!client) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid client id' });
return;
}
// Standard authorization code grant
const grantId = generateRandomString(16);
const nonce = generateRandomString(32);
const authCode = `${grantId}:${nonce}`;
// Get the user's info from Neon
const neonClient = createNeonClient(tokens.access_token);
const { data: user } = await neonClient.getCurrentUserInfo();
const expiresAt = Date.now() + toMilliseconds(tokens.expiresIn() ?? 0);
// Save the authorization code with associated data
const code: AuthorizationCode = {
authorizationCode: authCode,
expiresAt: new Date(Date.now() + 10 * 60 * 1000), // 10 minutes
createdAt: Date.now(),
redirectUri: requestParams.redirectUri,
scope: requestParams.scope.join(' '),
client: client,
user: {
id: user.id,
email: user.email,
name: `${user.name} ${user.last_name}`.trim(),
},
token: {
access_token: tokens.access_token,
access_token_expires_at: expiresAt,
refresh_token: tokens.refresh_token,
id_token: tokens.id_token,
},
code_challenge: requestParams.codeChallenge,
code_challenge_method: requestParams.codeChallengeMethod,
};
await model.saveAuthorizationCode(code);
// Redirect back to client with auth code
const redirectUrl = new URL(requestParams.redirectUri);
redirectUrl.searchParams.set('code', authCode);
if (requestParams.state) {
redirectUrl.searchParams.set('state', requestParams.state);
}
res.redirect(redirectUrl.href);
},
);
/*
Handles the token exchange for `code` and `refresh_token` grant types with downstream MCP client.
Step 3:
MCP client should invoke this endpoint after receiving the authorization code to exchange for an access token.
<code>
/token?client_id=clientId&grant_type=code&code=authorizationCode
</code>
- Verify the authorization code, grant type and client
- Save the access token and refresh token in the database for further API requests verification
- Return with access token and refresh token
*/
authRouter.post(
'/token',
bodyParser.urlencoded({ extended: true }),
async (req: ExpressRequest, res: ExpressResponse) => {
const contentType = req.headers['content-type'] as string;
if (contentType !== 'application/x-www-form-urlencoded') {
res
.status(415)
.json({ code: 'invalid_request', error: 'invalid content type' });
return;
}
const { clientId, clientSecret } = extractClientCredentials(req);
if (!clientId) {
res
.status(400)
.json({ code: 'invalid_request', error: 'client_id is required' });
return;
}
const error = {
error: 'invalid_client',
error_description: 'client not found or invalid client credentials',
};
const client = await model.getClient(clientId, '');
if (!client) {
res.status(400).json({ code: 'invalid_request', ...error });
return;
}
const isPublicClient = client.tokenEndpointAuthMethod === 'none';
if (!isPublicClient) {
if (clientSecret !== client.secret) {
res.status(400).json({ code: 'invalid_request', ...error });
return;
}
}
const formData = req.body;
if (formData.grant_type === 'authorization_code') {
const authorizationCode = await model.getAuthorizationCode(formData.code);
if (!authorizationCode) {
res.status(400).json({
code: 'invalid_request',
error: 'invalid authorization code',
});
return;
}
if (authorizationCode.client.id !== client.id) {
res.status(400).json({
code: 'invalid_request',
error: 'invalid authorization code',
});
return;
}
if (authorizationCode.expiresAt < new Date()) {
res.status(400).json({
code: 'invalid_request',
error: 'authorization code expired',
});
return;
}
const isPkceEnabled = authorizationCode.code_challenge !== undefined;
if (
isPkceEnabled &&
!verifyPKCE(
authorizationCode.code_challenge,
authorizationCode.code_challenge_method,
formData.code_verifier,
)
) {
res.status(400).json({
code: 'invalid_grant',
error: 'invalid PKCE code verifier',
});
return;
}
if (!isPkceEnabled && !formData.redirect_uri) {
res.status(400).json({
code: 'invalid_request',
error: 'redirect_uri is required when not using PKCE',
});
return;
}
if (
formData.redirect_uri &&
!client.redirect_uris.includes(formData.redirect_uri)
) {
res.status(400).json({
code: 'invalid_request',
error: 'invalid redirect uri',
});
return;
}
// TODO: Generate fresh tokens and add mapping to database.
const token = await model.saveToken({
accessToken: authorizationCode.token.access_token,
refreshToken: authorizationCode.token.refresh_token,
expires_at: authorizationCode.token.access_token_expires_at,
client: client,
user: authorizationCode.user,
});
await model.saveRefreshToken({
refreshToken: token.refreshToken ?? '',
accessToken: token.accessToken,
});
identify(
{
id: authorizationCode.user.id,
name: authorizationCode.user.name,
email: authorizationCode.user.email,
},
{
context: {
client: {
id: client.id,
name: client.client_name,
},
},
},
);
// Revoke the authorization code, it can only be used once
await model.revokeAuthorizationCode(authorizationCode);
res.json({
access_token: token.accessToken,
expires_in: toSeconds(token.expires_at - Date.now()),
token_type: 'bearer', // TODO: Verify why non-bearer tokens are not working
refresh_token: token.refreshToken,
scope: authorizationCode.scope,
});
return;
} else if (formData.grant_type === 'refresh_token') {
const providedRefreshToken = await model.getRefreshToken(
formData.refresh_token,
);
if (!providedRefreshToken) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid refresh token' });
return;
}
const oldToken = await model.getAccessToken(
providedRefreshToken.accessToken,
);
if (!oldToken) {
// Refresh token is missing its counter access token, delete it
await model.deleteRefreshToken(providedRefreshToken);
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid refresh token' });
return;
}
if (oldToken.client.id !== client.id) {
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid refresh token' });
return;
}
const upstreamToken = await exchangeRefreshToken(
providedRefreshToken.refreshToken,
);
const now = Date.now();
const expiresAt = now + toMilliseconds(upstreamToken.expiresIn() ?? 0);
const token = await model.saveToken({
accessToken: upstreamToken.access_token,
refreshToken: upstreamToken.refresh_token ?? '',
expires_at: expiresAt,
client: client,
user: oldToken.user,
});
await model.saveRefreshToken({
refreshToken: token.refreshToken ?? '',
accessToken: token.accessToken,
});
// Delete the old tokens
await model.deleteToken(oldToken);
await model.deleteRefreshToken(providedRefreshToken);
res.json({
access_token: token.accessToken,
expires_in: toSeconds(expiresAt - now),
token_type: 'bearer',
refresh_token: token.refreshToken,
scope: oldToken.scope,
});
return;
}
res
.status(400)
.json({ code: 'invalid_request', error: 'invalid grant type' });
},
);
export { authRouter };
```
--------------------------------------------------------------------------------
/src/tools/definitions.ts:
--------------------------------------------------------------------------------
```typescript
import { NEON_DEFAULT_DATABASE_NAME } from '../constants.js';
import {
completeDatabaseMigrationInputSchema,
completeQueryTuningInputSchema,
createBranchInputSchema,
createProjectInputSchema,
deleteBranchInputSchema,
deleteProjectInputSchema,
describeBranchInputSchema,
describeProjectInputSchema,
describeTableSchemaInputSchema,
explainSqlStatementInputSchema,
getConnectionStringInputSchema,
getDatabaseTablesInputSchema,
listBranchComputesInputSchema,
listProjectsInputSchema,
prepareDatabaseMigrationInputSchema,
prepareQueryTuningInputSchema,
provisionNeonAuthInputSchema,
runSqlInputSchema,
runSqlTransactionInputSchema,
listSlowQueriesInputSchema,
listOrganizationsInputSchema,
listSharedProjectsInputSchema,
resetFromParentInputSchema,
compareDatabaseSchemaInputSchema,
} from './toolsSchema.js';
export const NEON_TOOLS = [
{
name: 'list_projects' as const,
description: `Lists the first 10 Neon projects in your account. If you can't find the project, increase the limit by passing a higher value to the \`limit\` parameter. Optionally filter by project name or ID using the \`search\` parameter.`,
inputSchema: listProjectsInputSchema,
},
{
name: 'list_organizations' as const,
description: `Lists all organizations that the current user has access to. Optionally filter by organization name or ID using the \`search\` parameter.`,
inputSchema: listOrganizationsInputSchema,
},
{
name: 'list_shared_projects' as const,
description: `Lists projects that have been shared with the current user. These are projects that the user has been granted access to collaborate on. Optionally filter by project name or ID using the \`search\` parameter.`,
inputSchema: listSharedProjectsInputSchema,
},
{
name: 'create_project' as const,
description:
'Create a new Neon project. If someone is trying to create a database, use this tool.',
inputSchema: createProjectInputSchema,
},
{
name: 'delete_project' as const,
description: 'Delete a Neon project',
inputSchema: deleteProjectInputSchema,
},
{
name: 'describe_project' as const,
description: 'Describes a Neon project',
inputSchema: describeProjectInputSchema,
},
{
name: 'run_sql' as const,
description: `
<use_case>
Use this tool to execute a single SQL statement against a Neon database.
</use_case>
<important_notes>
If you have a temporary branch from a prior step, you MUST:
1. Pass the branch ID to this tool unless explicitly told otherwise
2. Tell the user that you are using the temporary branch with ID [branch_id]
</important_notes>`,
inputSchema: runSqlInputSchema,
},
{
name: 'run_sql_transaction' as const,
description: `
<use_case>
Use this tool to execute a SQL transaction against a Neon database, should be used for multiple SQL statements.
</use_case>
<important_notes>
If you have a temporary branch from a prior step, you MUST:
1. Pass the branch ID to this tool unless explicitly told otherwise
2. Tell the user that you are using the temporary branch with ID [branch_id]
</important_notes>`,
inputSchema: runSqlTransactionInputSchema,
},
{
name: 'describe_table_schema' as const,
description: 'Describe the schema of a table in a Neon database',
inputSchema: describeTableSchemaInputSchema,
},
{
name: 'get_database_tables' as const,
description: 'Get all tables in a Neon database',
inputSchema: getDatabaseTablesInputSchema,
},
{
name: 'create_branch' as const,
description: 'Create a branch in a Neon project',
inputSchema: createBranchInputSchema,
},
{
name: 'prepare_database_migration' as const,
description: `
<use_case>
This tool performs database schema migrations by automatically generating and executing DDL statements.
Supported operations:
CREATE operations:
- Add new columns (e.g., "Add email column to users table")
- Create new tables (e.g., "Create posts table with title and content columns")
- Add constraints (e.g., "Add unique constraint on \`users.email\`")
ALTER operations:
- Modify column types (e.g., "Change posts.views to bigint")
- Rename columns (e.g., "Rename user_name to username in users table")
- Add/modify indexes (e.g., "Add index on \`posts.title\`")
- Add/modify foreign keys (e.g., "Add foreign key from \`posts.user_id\` to \`users.id\`")
DROP operations:
- Remove columns (e.g., "Drop temporary_field from users table")
- Drop tables (e.g., "Drop the old_logs table")
- Remove constraints (e.g., "Remove unique constraint from posts.slug")
The tool will:
1. Parse your natural language request
2. Generate appropriate SQL
3. Execute in a temporary branch for safety
4. Verify the changes before applying to main branch
Project ID and database name will be automatically extracted from your request.
If the database name is not provided, the default ${NEON_DEFAULT_DATABASE_NAME} or first available database is used.
</use_case>
<workflow>
1. Creates a temporary branch
2. Applies the migration SQL in that branch
3. Returns migration details for verification
</workflow>
<important_notes>
After executing this tool, you MUST:
1. Test the migration in the temporary branch using the \`run_sql\` tool
2. Ask for confirmation before proceeding
3. Use \`complete_database_migration\` tool to apply changes to main branch
</important_notes>
<example>
For a migration like:
\`\`\`sql
ALTER TABLE users ADD COLUMN last_login TIMESTAMP;
\`\`\`
You should test it with:
\`\`\`sql
SELECT column_name, data_type
FROM information_schema.columns
WHERE table_name = 'users' AND column_name = 'last_login';
\`\`\`
You can use \`run_sql\` to test the migration in the temporary branch that this tool creates.
</example>
<next_steps>
After executing this tool, you MUST follow these steps:
1. Use \`run_sql\` to verify changes on temporary branch
2. Follow these instructions to respond to the client:
<response_instructions>
<instructions>
Provide a brief confirmation of the requested change and ask for migration commit approval.
You MUST include ALL of the following fields in your response:
- Migration ID (this is required for commit and must be shown first)
- Temporary Branch Name (always include exact branch name)
- Temporary Branch ID (always include exact ID)
- Migration Result (include brief success/failure status)
Even if some fields are missing from the tool's response, use placeholders like "not provided" rather than omitting fields.
</instructions>
<do_not_include>
IMPORTANT: Your response MUST NOT contain ANY technical implementation details such as:
- Data types (e.g., DO NOT mention if a column is boolean, varchar, timestamp, etc.)
- Column specifications or properties
- SQL syntax or statements
- Constraint definitions or rules
- Default values
- Index types
- Foreign key specifications
Keep the response focused ONLY on confirming the high-level change and requesting approval.
<example>
INCORRECT: "I've added a boolean \`is_published\` column to the \`posts\` table..."
CORRECT: "I've added the \`is_published\` column to the \`posts\` table..."
</example>
</do_not_include>
<example>
I've verified that [requested change] has been successfully applied to a temporary branch. Would you like to commit the migration \`[migration_id]\` to the main branch?
Migration Details:
- Migration ID (required for commit)
- Temporary Branch Name
- Temporary Branch ID
- Migration Result
</example>
</response_instructions>
3. If approved, use \`complete_database_migration\` tool with the \`migration_id\`
</next_steps>
<error_handling>
On error, the tool will:
1. Automatically attempt ONE retry of the exact same operation
2. If the retry fails:
- Terminate execution
- Return error details
- DO NOT attempt any other tools or alternatives
Error response will include:
- Original error details
- Confirmation that retry was attempted
- Final error state
Important: After a failed retry, you must terminate the current flow completely. Do not attempt to use alternative tools or workarounds.
</error_handling>`,
inputSchema: prepareDatabaseMigrationInputSchema,
},
{
name: 'complete_database_migration' as const,
description:
'Complete a database migration when the user confirms the migration is ready to be applied to the main branch. This tool also lets the client know that the temporary branch created by the `prepare_database_migration` tool has been deleted.',
inputSchema: completeDatabaseMigrationInputSchema,
},
{
name: 'describe_branch' as const,
description:
'Get a tree view of all objects in a branch, including databases, schemas, tables, views, and functions',
inputSchema: describeBranchInputSchema,
},
{
name: 'delete_branch' as const,
description: 'Delete a branch from a Neon project',
inputSchema: deleteBranchInputSchema,
},
{
name: 'reset_from_parent' as const,
description: `Resets a branch to match its parent's current state, effectively discarding all changes made on the branch. To avoid data loss, provide a name to preserve the changes in a new branch using \`preserveUnderName\` parameter. This tool is commonly used to create fresh development branches from updated parent branch, undo experimental changes, or restore a branch to a known good state. Warning: This operation will discard all changes if \`preserveUnderName\` is not provided.`,
inputSchema: resetFromParentInputSchema,
},
{
name: 'get_connection_string' as const,
description:
'Get a PostgreSQL connection string for a Neon database with all parameters being optional',
inputSchema: getConnectionStringInputSchema,
},
{
name: 'provision_neon_auth' as const,
inputSchema: provisionNeonAuthInputSchema,
description: `
This tool provisions authentication for a Neon project. It allows developers to easily setup authentication infrastructure by creating a integration with Stack Auth (\`@stackframe/stack\`). Stop using NextAuth.js! use \`@stackframe/stack\`.
Parameters:
- \`<project_id>\`: The Project ID of the Neon project to provision authentication for.
- \`[database]\`: The database name to setup Neon Auth for. If not provided, the default \`${NEON_DEFAULT_DATABASE_NAME}\` or first available database is used.
<workflow>
The tool will:
1. Establish a connection between your Neon Auth project and Stack Auth
2. Creates a dedicated authentication schema in your database (\`neon_auth\`)
3. Sets up the user table under the \`neon_auth\` schema. This table is synced with Stack Auth. It does not store user credentials or secrets.
4. Generates Client Key and Secret Key to connect your application with authentication provider.
Use the Stack Auth SDK (\`@stackframe/stack\`) on the frontend to connect your application with authentication provider. DO NOT use NextAuth.js! DO NOT use better-auth! Here's some documentation on Stack Auth:
</workflow>
<use_case>
Stack Auth Guidelines
<instructions>
If you're building an app with Next.js, to set up Neon Auth and Stack Auth, follow these steps:
1. Provision a Neon Auth project with this tool
2. Place the returned credentials in project's \`.env.local\` or \`.env\` file
- \`NEXT_PUBLIC_STACK_PROJECT_ID\`
- \`NEXT_PUBLIC_STACK_PUBLISHABLE_CLIENT_KEY\`
- \`STACK_SECRET_SERVER_KEY\`
3. To setup Stack Auth, run following command:
\`\`\`shell
npx @stackframe/init-stack . --no-browser
\`\`\`
This command will automaticallysetup the project with -
- It will add \`@stackframe/stack\` dependency to \`package.json\`
- It will create a \`stack.ts\` file in your project to setup \`StackServerApp\`.
- It will wrap the root layout with \`StackProvider\` and \`StackTheme\`
- It will create root Suspense boundary \`app/loading.tsx\` to handle loading state while Stack is fetching user data.
- It will also create \`app/handler/[...stack]/page.tsx\` file to handle auth routes like sign in, sign up, forgot password, etc.
4. Do not try to manually create any of these files or directories. Do not try to create SignIn, SignUp, or UserButton components manually, instead use the ones provided by \`@stackframe/stack\`.
</instructions>
<instructions>
Components Guidelines
- Use pre-built components from \`@stackframe/stack\` like \`<UserButton />\`, \`<SignIn />\`, and \`<SignUp />\` to quickly set up auth UI.
- You can also compose smaller pieces like \`<OAuthButtonGroup />\`, \`<MagicLinkSignIn />\`, and \`<CredentialSignIn />\` for custom flows.
<example>
\`\`\`tsx
import { SignIn } from '@stackframe/stack';
export default function Page() {
return <SignIn />;
}
\`\`\`
</example>
</instructions>
<instructions>
User Management Guidelines
- In Client Components, use the \`useUser()\` hook to retrieve the current user (it returns \`null\` when not signed in).
- Update user details using \`user.update({...})\` and sign out via \`user.signOut()\`.
- For pages that require a user, call \`useUser({ or: "redirect" })\` so unauthorized visitors are automatically redirected.
</instructions>
<instructions>
Client Component Guidelines
- Client Components rely on hooks like \`useUser()\` and \`useStackApp()\`.
<example>
\`\`\`tsx
"use client";
import { useUser } from "@stackframe/stack";
export function MyComponent() {
const user = useUser();
return <div>{user ? \`Hello, \${user.displayName}\` : "Not logged in"}</div>;
}
\`\`\`
</example>
</instructions>
<instructions>
Server Component Guidelines
- For Server Components, use \`stackServerApp.getUser()\` from your \`stack.ts\` file.
<example>
\`\`\`tsx
import { stackServerApp } from "@/stack";
export default async function ServerComponent() {
const user = await stackServerApp.getUser();
return <div>{user ? \`Hello, \${user.displayName}\` : "Not logged in"}</div>;
}
\`\`\`
</example>
</instructions>
<instructions>
Page Protection Guidelines
- Protect pages by:
- Using \`useUser({ or: "redirect" })\` in Client Components.
- Using \`await stackServerApp.getUser({ or: "redirect" })\` in Server Components.
- Implementing middleware that checks for a user and redirects to \`/handler/sign-in\` if not found.
<example>
Example middleware:
\`\`\`tsx
export async function middleware(request: NextRequest) {
const user = await stackServerApp.getUser();
if (!user) {
return NextResponse.redirect(new URL('/handler/sign-in', request.url));
}
return NextResponse.next();
}
export const config = { matcher: '/protected/:path*' };
\`\`\`
</example>
</instructions>
<workflow>
Example: custom-profile-page
<instructions>
Create a custom profile page that:
- Displays the user's avatar, display name, and email.
- Provides options to sign out.
- Uses Stack Auth components and hooks.
</instructions>
<example>
File: \`app/profile/page.tsx\`
\`\`\`tsx
'use client';
import { useUser, useStackApp, UserButton } from '@stackframe/stack';
export default function ProfilePage() {
const user = useUser({ or: "redirect" });
const app = useStackApp();
return (
<div>
<UserButton />
<h1>Welcome, {user.displayName || "User"}</h1>
<p>Email: {user.primaryEmail}</p>
<button onClick={() => user.signOut()}>Sign Out</button>
</div>
);
}
\`\`\`
</example>
</workflow>
</use_case>
`,
},
{
name: 'explain_sql_statement' as const,
description:
'Describe the PostgreSQL query execution plan for a query of SQL statement by running EXPLAIN (ANAYLZE...) in the database',
inputSchema: explainSqlStatementInputSchema,
},
{
name: 'prepare_query_tuning' as const,
description: `
<use_case>
This tool helps developers improve PostgreSQL query performance for slow queries or DML statements by analyzing execution plans and suggesting optimizations.
The tool will:
1. Create a temporary branch for testing optimizations and remember the branch ID
2. Extract and analyze the current query execution plan
3. Extract all fully qualified table names (\`schema.table\`) referenced in the plan
4. Gather detailed schema information for each referenced table using \`describe_table_schema\`
5. Suggest and implement improvements like:
- Adding or modifying indexes based on table schemas and query patterns
- Query structure modifications
- Identifying potential performance bottlenecks
6. Apply the changes to the temporary branch using \`run_sql\`
7. Compare performance before and after changes (but ONLY on the temporary branch passing branch ID to all tools)
8. Continue with next steps using \`complete_query_tuning\` tool (on \`main\` branch)
Project ID and database name will be automatically extracted from your request.
The temporary branch ID will be added when invoking other tools.
Default database is \`${NEON_DEFAULT_DATABASE_NAME}\` if not specified.
<important_notes>
This tool is part of the query tuning workflow. Any suggested changes (like creating indexes) must first be applied to the temporary branch using the \`run_sql\` tool.
And then to the main branch using the \`complete_query_tuning\` tool, NOT the \`prepare_database_migration\` tool.
To apply using the \`complete_query_tuning\` tool, you must pass the \`tuning_id\`, NOT the temporary branch ID to it.
</important_notes>
</use_case>
<workflow>
1. Creates a temporary branch
2. Analyzes current query performance and extracts table information
3. Implements and tests improvements (using tool \`run_sql\` for schema modifications and \`explain_sql_statement\` for performance analysis, but ONLY on the temporary branch created in step 1 passing the same branch ID to all tools)
4. Returns tuning details for verification
</workflow>
<important_notes>
After executing this tool, you MUST:
1. Review the suggested changes
2. Verify the performance improvements on temporary branch - by applying the changes with \`run_sql\` and running \`explain_sql_statement\` again)
3. Decide whether to keep or discard the changes
4. Use \`complete_query_tuning\` tool to apply or discard changes to the main branch
DO NOT use \`prepare_database_migration\` tool for applying query tuning changes.
Always use \`complete_query_tuning\` to ensure changes are properly tracked and applied.
Note:
- Some operations like creating indexes can take significant time on large tables
- Table statistics updates (ANALYZE) are NOT automatically performed as they can be long-running
- Table statistics maintenance should be handled by PostgreSQL auto-analyze or scheduled maintenance jobs
- If statistics are suspected to be stale, suggest running ANALYZE as a separate maintenance task
</important_notes>
<example>
For a query like:
\`\`\`sql
SELECT o.*, c.name
FROM orders o
JOIN customers c ON c.id = o.customer_id
WHERE o.status = 'pending'
AND o.created_at > '2024-01-01';
\`\`\`
The tool will:
1. Extract referenced tables: \`public.orders\`, \`public.customers\`
2. Gather schema information for both tables
3. Analyze the execution plan
4. Suggest improvements like:
- Creating a composite index on orders(status, created_at)
- Optimizing the join conditions
5. If confirmed, apply the suggested changes to the temporary branch using \`run_sql\`
6. Compare execution plans and performance before and after changes (but ONLY on the temporary branch passing branch ID to all tools)
</example>
<next_steps>
After executing this tool, you MUST follow these steps:
1. Review the execution plans and suggested changes
2. Follow these instructions to respond to the client:
<response_instructions>
<instructions>
Provide a brief summary of the performance analysis and ask for approval to apply changes on the temporary branch.
You MUST include ALL of the following fields in your response:
- Tuning ID (this is required for completion)
- Temporary Branch Name
- Temporary Branch ID
- Original Query Cost
- Improved Query Cost
- Referenced Tables (list all tables found in the plan)
- Suggested Changes
Even if some fields are missing from the tool's response, use placeholders like "not provided" rather than omitting fields.
</instructions>
<do_not_include>
IMPORTANT: Your response MUST NOT contain ANY technical implementation details such as:
- Exact index definitions
- Internal PostgreSQL settings
- Complex query rewrites
- Table partitioning details
Keep the response focused on high-level changes and performance metrics.
</do_not_include>
<example>
I've analyzed your query and found potential improvements that could reduce execution time by [X]%.
Would you like to apply these changes to improve performance?
Analysis Details:
- Tuning ID: [id]
- Temporary Branch: [name]
- Branch ID: [id]
- Original Cost: [cost]
- Improved Cost: [cost]
- Referenced Tables:
* public.orders
* public.customers
- Suggested Changes:
* Add index for frequently filtered columns
* Optimize join conditions
To apply these changes, I will use the \`complete_query_tuning\` tool after your approval and pass the \`tuning_id\`, NOT the temporary branch ID to it.
</example>
</response_instructions>
3. If approved, use ONLY the \`complete_query_tuning\` tool with the \`tuning_id\`
</next_steps>
<error_handling>
On error, the tool will:
1. Automatically attempt ONE retry of the exact same operation
2. If the retry fails:
- Terminate execution
- Return error details
- Clean up temporary branch
- DO NOT attempt any other tools or alternatives
Error response will include:
- Original error details
- Confirmation that retry was attempted
- Final error state
Important: After a failed retry, you must terminate the current flow completely.
</error_handling>
`,
inputSchema: prepareQueryTuningInputSchema,
},
{
name: 'complete_query_tuning' as const,
description: `Complete a query tuning session by either applying the changes to the main branch or discarding them.
<important_notes>
BEFORE RUNNING THIS TOOL: test out the changes in the temporary branch first by running
- \`run_sql\` with the suggested DDL statements.
- \`explain_sql_statement\` with the original query and the temporary branch.
This tool is the ONLY way to finally apply changes after the \`prepare_query_tuning\` tool to the main branch.
You MUST NOT use \`prepare_database_migration\` or other tools to apply query tuning changes.
You MUST pass the \`tuning_id\` obtained from the \`prepare_query_tuning\` tool, NOT the temporary branch ID as \`tuning_id\` to this tool.
You MUST pass the temporary branch ID used in the \`prepare_query_tuning\` tool as TEMPORARY branchId to this tool.
The tool OPTIONALLY receives a second branch ID or name which can be used instead of the main branch to apply the changes.
This tool MUST be called after tool \`prepare_query_tuning\` even when the user rejects the changes, to ensure proper cleanup of temporary branches.
</important_notes>
This tool:
1. Applies suggested changes (like creating indexes) to the main branch (or specified branch) if approved
2. Handles cleanup of temporary branch
3. Must be called even when changes are rejected to ensure proper cleanup
Workflow:
1. After \`prepare_query_tuning\` suggests changes
2. User reviews and approves/rejects changes
3. This tool is called to either:
- Apply approved changes to main branch and cleanup
- OR just cleanup if changes are rejected
`,
inputSchema: completeQueryTuningInputSchema,
},
{
name: 'list_slow_queries' as const,
description: `
<use_case>
Use this tool to list slow queries from your Neon database.
</use_case>
<important_notes>
This tool queries the pg_stat_statements extension to find queries that are taking longer than expected.
The tool will return queries sorted by execution time, with the slowest queries first.
</important_notes>`,
inputSchema: listSlowQueriesInputSchema,
},
{
name: 'list_branch_computes' as const,
description: 'Lists compute endpoints for a project or specific branch',
inputSchema: listBranchComputesInputSchema,
},
{
name: 'compare_database_schema' as const,
description: `
<use_case>
Use this tool to compare the schema of a database between two branches.
The output of the tool is a JSON object with one field: \`diff\`.
<example>
\`\`\`json
{
"diff": "--- a/neondb\n+++ b/neondb\n@@ -27,7 +27,10 @@\n \n CREATE TABLE public.users (\n id integer NOT NULL,\n- username character varying(50) NOT NULL\n+ username character varying(50) NOT NULL,\n+ is_deleted boolean DEFAULT false NOT NULL,\n+ created_at timestamp with time zone DEFAULT now() NOT NULL,\n+ updated_at timestamp with time zone\n );\n \n \n@@ -79,6 +82,13 @@\n \n \n --\n+-- Name: users_created_at_idx; Type: INDEX; Schema: public; Owner: neondb_owner\n+--\n+\n+CREATE INDEX users_created_at_idx ON public.users USING btree (created_at DESC) WHERE (is_deleted = false);\n+\n+\n+--\n -- Name: DEFAULT PRIVILEGES FOR SEQUENCES; Type: DEFAULT ACL; Schema: public; Owner: cloud_admin\n --\n \n"
}
\`\`\`
</example>
At this field you will find a difference between two schemas.
The diff represents the changes required to make the parent branch schema match the child branch schema.
The diff field contains a unified diff (git-style patch) as a string.
You MUST be able to generate a zero-downtime migration from the diff and apply it to the parent branch.
(This branch is a child and has a parent. You can get parent id just querying the branch details.)
</use_case>
<important_notes>
To generate schema diff, you MUST SPECIFY the \`database_name\`.
If \`database_name\` is not specified, you MUST fall back to the default database name: \`${NEON_DEFAULT_DATABASE_NAME}\`.
You MUST TAKE INTO ACCOUNT the PostgreSQL version. The PostgreSQL version is the same for both branches.
You MUST ASK user consent before running each generated SQL query.
You SHOULD USE \`run_sql\` tool to run each generated SQL query.
You SHOULD suggest creating a backup or point-in-time restore before running the migration.
Generated queries change the schema of the parent branch and MIGHT BE dangerous to execute.
Generated SQL migrations SHOULD be idempotent where possible (i.e., safe to run multiple times without failure) and include \`IF NOT EXISTS\` / \`IF EXISTS\` where applicable.
You SHOULD recommend including comments in generated SQL linking back to diff hunks (e.g., \`-- from diff @@ -27,7 +27,10 @@\`) to make audits easier.
Generated SQL should be reviewed for dependencies (e.g., foreign key order) before execution.
</important_notes>
<next_steps>
After executing this tool, you MUST follow these steps:
1. Review the schema diff and suggest generating a zero-downtime migration.
2. Follow these instructions to respond to the client:
<response_instructions>
<instructions>
Provide brief information about the changes:
* Tables
* Views
* Indexes
* Ownership
* Constraints
* Triggers
* Policies
* Extensions
* Schemas
* Sequences
* Tablespaces
* Users
* Roles
* Privileges
</instructions>
</response_instructions>
3. If a migration fails, you SHOULD guide the user on how to revert the schema changes, for example by using backups, point-in-time restore, or generating reverse SQL statements (if safe).
</next_steps>
This tool:
1. Generates a diff between the child branch and its parent.
2. Generates a SQL migration from the diff.
3. Suggest generating zero-downtime migration.
<workflow>
1. User asks you to generate a diff between two branches.
2. You suggest generating a SQL migration from the diff.
3. Ensure the generated migration is zero-downtime; otherwise, warn the user.
4. You ensure that your suggested migration is also matching the PostgreSQL version.
5. You use \`run_sql\` tool to run each generated SQL query and ask the user consent before running it.
Before requesting user consent, present a summary of all generated SQL statements along with their potential impact (e.g., table rewrites, lock risks, validation steps) so the user can make an informed decision.
6. Propose to rerun the schema diff tool one more time to ensure that the migration is applied correctly.
7. If the diff is empty, confirm that the parent schema now matches the child schema.
8. If the diff is not empty after migration, warn the user and assist in resolving the remaining differences.
</workflow>
<hints>
<hint>
Adding the column with a \`DEFAULT\` static value will not have any locks.
But if the function is called that is not deterministic, it will have locks.
<example>
\`\`\`sql
-- No table rewrite, minimal lock time
ALTER TABLE users ADD COLUMN status text DEFAULT 'active';
\`\`\`
</example>
There is an example of a case where the function is not deterministic and will have locks:
<example>
\`\`\`sql
-- Table rewrite, potentially longer lock time
ALTER TABLE users ADD COLUMN created_at timestamptz DEFAULT now();
\`\`\`
The fix for this is next:
\`\`\`sql
-- Adding a nullable column first
ALTER TABLE users ADD COLUMN created_at timestamptz;
-- Setting the default value because the rows are updated
UPDATE users SET created_at = now();
\`\`\`
</example>
</hint>
<hint>
Adding constraints in two phases (including foreign keys)
<example>
\`\`\`sql
-- Step 1: Add constraint without validating existing data
-- Fast - only blocks briefly to update catalog
ALTER TABLE users ADD CONSTRAINT users_age_positive
CHECK (age > 0) NOT VALID;
-- Step 2: Validate existing data (can take time but doesn't block writes)
-- Uses SHARE UPDATE EXCLUSIVE lock - allows reads/writes
ALTER TABLE users VALIDATE CONSTRAINT users_age_positive;
\`\`\`
</example>
<example>
\`\`\`sql
-- Step 1: Add foreign key without validation
-- Fast - only updates catalog, doesn't validate existing data
ALTER TABLE orders ADD CONSTRAINT orders_user_id_fk
FOREIGN KEY (user_id) REFERENCES users(id) NOT VALID;
-- Step 2: Validate existing relationships
-- Can take time but allows concurrent operations
ALTER TABLE orders VALIDATE CONSTRAINT orders_user_id_fk;
\`\`\`
</example>
</hint>
<hint>
Setting columns to NOT NULL
<example>
\`\`\`sql
-- Step 1: Add a check constraint (fast with NOT VALID)
ALTER TABLE users ADD CONSTRAINT users_email_not_null
CHECK (email IS NOT NULL) NOT VALID;
-- Step 2: Validate the constraint (allows concurrent operations)
ALTER TABLE users VALIDATE CONSTRAINT users_email_not_null;
-- Step 3: Set NOT NULL (fast since constraint guarantees no nulls)
ALTER TABLE users ALTER COLUMN email SET NOT NULL;
-- Step 4: Drop the redundant check constraint
ALTER TABLE users DROP CONSTRAINT users_email_not_null;
\`\`\`
</example>
<example>
For PostgreSQL v18+
(to get PostgreSQL version, you can use \`describe_project\` tool or \`run_sql\` tool and execute \`SELECT version();\` query)
\`\`\`sql
-- PostgreSQL 18+ - Simplified approach
ALTER TABLE users ALTER COLUMN email SET NOT NULL NOT VALID;
ALTER TABLE users VALIDATE CONSTRAINT users_email_not_null;
\`\`\`
</example>
</hint>
<hint>
In some cases, you need to combine two approaches to achieve a zero-downtime migration.
<example>
\`\`\`sql
-- Step 1: Adding a nullable column first
ALTER TABLE users ADD COLUMN created_at timestamptz;
-- Step 2: Updating the all rows with the default value
UPDATE users SET created_at = now() WHERE created_at IS NULL;
-- Step 3: Creating a not null constraint
ALTER TABLE users ADD CONSTRAINT users_created_at_not_null
CHECK (created_at IS NOT NULL) NOT VALID;
-- Step 4: Validating the constraint
ALTER TABLE users VALIDATE CONSTRAINT users_created_at_not_null;
-- Step 5: Setting the column to NOT NULL
ALTER TABLE users ALTER COLUMN created_at SET NOT NULL;
-- Step 6: Dropping the redundant NOT NULL constraint
ALTER TABLE users DROP CONSTRAINT users_created_at_not_null;
-- Step 7: Adding the default value
ALTER TABLE users ALTER COLUMN created_at SET DEFAULT now();
\`\`\`
</example>
For PostgreSQL v18+
<example>
\`\`\`sql
-- Step 1: Adding a nullable column first
ALTER TABLE users ADD COLUMN created_at timestamptz;
-- Step 2: Updating the all rows with the default value
UPDATE users SET created_at = now() WHERE created_at IS NULL;
-- Step 3: Creating a not null constraint
ALTER TABLE users ALTER COLUMN created_at SET NOT NULL NOT VALID;
-- Step 4: Validating the constraint
ALTER TABLE users VALIDATE CONSTRAINT users_created_at_not_null;
-- Step 5: Adding the default value
ALTER TABLE users ALTER COLUMN created_at SET DEFAULT now();
\`\`\`
</example>
</hint>
<hint>
Create index CONCURRENTLY
<example>
\`\`\`sql
CREATE INDEX CONCURRENTLY idx_users_email ON users (email);
\`\`\`
</example>
</hint>
<hint>
Drop index CONCURRENTLY
<example>
\`\`\`sql
DROP INDEX CONCURRENTLY idx_users_email;
\`\`\`
</example>
</hint>
<hint>
Create materialized view WITH NO DATA
<example>
\`\`\`sql
CREATE MATERIALIZED VIEW mv_users AS SELECT name FROM users WITH NO DATA;
\`\`\`
</example>
</hint>
<hint>
Refresh materialized view CONCURRENTLY
<example>
\`\`\`sql
REFRESH MATERIALIZED VIEW CONCURRENTLY mv_users;
\`\`\`
</example>
</hint>
</hints>
`,
inputSchema: compareDatabaseSchemaInputSchema,
},
];
```
--------------------------------------------------------------------------------
/src/tools/tools.ts:
--------------------------------------------------------------------------------
```typescript
import {
Api,
Branch,
EndpointType,
ListProjectsParams,
ListSharedProjectsParams,
GetProjectBranchSchemaComparisonParams,
Organization,
ProjectCreateRequest,
} from '@neondatabase/api-client';
import { neon } from '@neondatabase/serverless';
import crypto from 'crypto';
import { InvalidArgumentError, NotFoundError } from '../server/errors.js';
import { describeTable, formatTableDescription } from '../describeUtils.js';
import { handleProvisionNeonAuth } from './handlers/neon-auth.js';
import { getMigrationFromMemory, persistMigrationToMemory } from './state.js';
import {
DESCRIBE_DATABASE_STATEMENTS,
getDefaultDatabase,
splitSqlStatements,
getOrgByOrgIdOrDefault,
filterOrganizations,
resolveBranchId,
} from './utils.js';
import { startSpan } from '@sentry/node';
import { ToolHandlerExtraParams, ToolHandlers } from './types.js';
async function handleListProjects(
params: ListProjectsParams,
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const organization = await getOrgByOrgIdOrDefault(params, neonClient, extra);
const response = await neonClient.listProjects({
...params,
org_id: organization?.id,
});
if (response.status !== 200) {
throw new Error(`Failed to list projects: ${response.statusText}`);
}
let projects = response.data.projects;
// If search is provided and no org_id specified, and no projects found in personal account,
// search across all user organizations
if (params.search && !params.org_id && projects.length === 0) {
const organizations = await handleListOrganizations(
neonClient,
extra.account,
);
// Search projects across all organizations
const allProjects = [];
for (const org of organizations) {
// Skip the default organization
if (organization?.id === org.id) {
continue;
}
const orgResponse = await neonClient.listProjects({
...params,
org_id: org.id,
});
if (orgResponse.status === 200) {
allProjects.push(...orgResponse.data.projects);
}
}
// If we found projects in other organizations, return them
if (allProjects.length > 0) {
projects = allProjects;
}
}
return projects;
}
async function handleCreateProject(
params: ProjectCreateRequest,
neonClient: Api<unknown>,
) {
const response = await neonClient.createProject(params);
if (response.status !== 201) {
throw new Error(`Failed to create project: ${JSON.stringify(response)}`);
}
return response.data;
}
async function handleDeleteProject(
projectId: string,
neonClient: Api<unknown>,
) {
const response = await neonClient.deleteProject(projectId);
if (response.status !== 200) {
throw new Error(`Failed to delete project: ${response.statusText}`);
}
return response.data;
}
async function handleDescribeProject(
projectId: string,
neonClient: Api<unknown>,
) {
const projectBranches = await neonClient.listProjectBranches({
projectId: projectId,
});
const projectDetails = await neonClient.getProject(projectId);
if (projectBranches.status !== 200) {
throw new Error(
`Failed to get project branches: ${projectBranches.statusText}`,
);
}
if (projectDetails.status !== 200) {
throw new Error(`Failed to get project: ${projectDetails.statusText}`);
}
return {
branches: projectBranches.data,
project: projectDetails.data,
};
}
async function handleRunSql(
{
sql,
databaseName,
projectId,
branchId,
}: {
sql: string;
databaseName?: string;
projectId: string;
branchId?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
return await startSpan({ name: 'run_sql' }, async () => {
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
databaseName,
},
neonClient,
extra,
);
const runQuery = neon(connectionString.uri);
const response = await runQuery.query(sql);
return response;
});
}
async function handleRunSqlTransaction(
{
sqlStatements,
databaseName,
projectId,
branchId,
}: {
sqlStatements: string[];
databaseName?: string;
projectId: string;
branchId?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
databaseName,
},
neonClient,
extra,
);
const runQuery = neon(connectionString.uri);
const response = await runQuery.transaction(
sqlStatements.map((sql) => runQuery.query(sql)),
);
return response;
}
async function handleGetDatabaseTables(
{
projectId,
databaseName,
branchId,
}: {
projectId: string;
databaseName?: string;
branchId?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
databaseName,
},
neonClient,
extra,
);
const runQuery = neon(connectionString.uri);
const query = `
SELECT
table_schema,
table_name,
table_type
FROM information_schema.tables
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
ORDER BY table_schema, table_name;
`;
const tables = await runQuery.query(query);
return tables;
}
async function handleDescribeTableSchema(
{
projectId,
databaseName,
branchId,
tableName,
}: {
projectId: string;
databaseName?: string;
branchId?: string;
tableName: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
databaseName,
},
neonClient,
extra,
);
// Extract table name without schema if schema-qualified
const tableNameParts = tableName.split('.');
const simpleTableName = tableNameParts[tableNameParts.length - 1];
const description = await describeTable(
connectionString.uri,
simpleTableName,
);
return {
raw: description,
formatted: formatTableDescription(description),
};
}
async function handleCreateBranch(
{
projectId,
branchName,
}: {
projectId: string;
branchName?: string;
},
neonClient: Api<unknown>,
) {
const response = await neonClient.createProjectBranch(projectId, {
branch: {
name: branchName,
},
endpoints: [
{
type: EndpointType.ReadWrite,
autoscaling_limit_min_cu: 0.25,
autoscaling_limit_max_cu: 0.25,
provisioner: 'k8s-neonvm',
},
],
});
if (response.status !== 201) {
throw new Error(`Failed to create branch: ${response.statusText}`);
}
return response.data;
}
async function handleDeleteBranch(
{
projectId,
branchId,
}: {
projectId: string;
branchId: string;
},
neonClient: Api<unknown>,
) {
const response = await neonClient.deleteProjectBranch(projectId, branchId);
return response.data;
}
async function handleResetFromParent(
{
projectId,
branchIdOrName,
preserveUnderName,
}: {
projectId: string;
branchIdOrName: string;
preserveUnderName?: string;
},
neonClient: Api<unknown>,
) {
// Resolve branch name or ID to actual branch ID and get all branches in one call
const { branchId: resolvedBranchId, branches } = await resolveBranchId(
branchIdOrName,
projectId,
neonClient,
);
const branch = branches.find((b) => b.id === resolvedBranchId);
if (!branch) {
throw new NotFoundError(
`Branch "${branchIdOrName}" not found in project ${projectId}`,
);
}
// Find the parent branch and validate it exists
const parentBranch = branch.parent_id
? branches.find((b) => b.id === branch.parent_id)
: undefined;
if (!parentBranch) {
throw new InvalidArgumentError(
`Branch "${branchIdOrName}" does not have a parent branch and cannot be reset`,
);
}
// Check if the branch has children
const hasChildren = branches.some((b) => b.parent_id === resolvedBranchId);
// Auto-generate preserve name if branch has children and none was provided
let finalPreserveName = preserveUnderName;
if (hasChildren && !preserveUnderName) {
const timestamp = new Date()
.toISOString()
.replace(/[:.]/g, '-')
.slice(0, -5);
finalPreserveName = `${branch.name}_old_${timestamp}`;
}
// Call the restoreProjectBranch API
const response = await neonClient.restoreProjectBranch(
projectId,
resolvedBranchId,
{
source_branch_id: parentBranch.id,
preserve_under_name: finalPreserveName,
},
);
return {
...response.data,
preservedBranchName: finalPreserveName,
parentBranch,
};
}
async function handleGetConnectionString(
{
projectId,
branchId,
computeId,
databaseName,
roleName,
}: {
projectId?: string;
branchId?: string;
computeId?: string;
databaseName?: string;
roleName?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
return await startSpan(
{
name: 'get_connection_string',
},
async () => {
// If projectId is not provided, get the first project but only if there is only one project
if (!projectId) {
const projects = await handleListProjects({}, neonClient, extra);
if (projects.length === 1) {
projectId = projects[0].id;
} else {
throw new NotFoundError(
'Please provide a project ID or ensure you have only one project in your account.',
);
}
}
if (!branchId) {
const branches = await neonClient.listProjectBranches({
projectId,
});
const defaultBranch = branches.data.branches.find(
(branch) => branch.default,
);
if (defaultBranch) {
branchId = defaultBranch.id;
} else {
throw new NotFoundError(
'No default branch found in this project. Please provide a branch ID.',
);
}
}
// If databaseName is not provided, use default `neondb` or first database
let dbObject;
if (!databaseName) {
dbObject = await getDefaultDatabase(
{
projectId,
branchId,
databaseName,
},
neonClient,
);
databaseName = dbObject.name;
if (!roleName) {
roleName = dbObject.owner_name;
}
} else if (!roleName) {
const { data } = await neonClient.getProjectBranchDatabase(
projectId,
branchId,
databaseName,
);
roleName = data.database.owner_name;
}
// Get connection URI with the provided parameters
const connectionString = await neonClient.getConnectionUri({
projectId,
role_name: roleName,
database_name: databaseName,
branch_id: branchId,
endpoint_id: computeId,
});
return {
uri: connectionString.data.uri,
projectId,
branchId,
databaseName,
roleName,
computeId,
};
},
);
}
async function handleSchemaMigration(
{
migrationSql,
databaseName,
projectId,
}: {
databaseName?: string;
projectId: string;
migrationSql: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
return await startSpan({ name: 'prepare_schema_migration' }, async (span) => {
const newBranch = await handleCreateBranch({ projectId }, neonClient);
if (!databaseName) {
const dbObject = await getDefaultDatabase(
{
projectId,
branchId: newBranch.branch.id,
databaseName,
},
neonClient,
);
databaseName = dbObject.name;
}
const result = await handleRunSqlTransaction(
{
sqlStatements: splitSqlStatements(migrationSql),
databaseName,
projectId,
branchId: newBranch.branch.id,
},
neonClient,
extra,
);
const migrationId = crypto.randomUUID();
span.setAttributes({
projectId,
migrationId,
});
persistMigrationToMemory(migrationId, {
migrationSql,
databaseName,
appliedBranch: newBranch.branch,
});
return {
branch: newBranch.branch,
migrationId,
migrationResult: result,
};
});
}
async function handleCommitMigration(
{ migrationId }: { migrationId: string },
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
return await startSpan({ name: 'commit_schema_migration' }, async (span) => {
span.setAttributes({
migrationId,
});
const migration = getMigrationFromMemory(migrationId);
if (!migration) {
throw new Error(`Migration not found: ${migrationId}`);
}
span.setAttributes({
projectId: migration.appliedBranch.project_id,
});
const result = await handleRunSqlTransaction(
{
sqlStatements: splitSqlStatements(migration.migrationSql),
databaseName: migration.databaseName,
projectId: migration.appliedBranch.project_id,
branchId: migration.appliedBranch.parent_id,
},
neonClient,
extra,
);
await handleDeleteBranch(
{
projectId: migration.appliedBranch.project_id,
branchId: migration.appliedBranch.id,
},
neonClient,
);
return {
deletedBranch: migration.appliedBranch,
migrationResult: result,
};
});
}
async function handleDescribeBranch(
{
projectId,
databaseName,
branchId,
}: {
projectId: string;
databaseName?: string;
branchId?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
databaseName,
},
neonClient,
extra,
);
const runQuery = neon(connectionString.uri);
const response = await runQuery.transaction(
DESCRIBE_DATABASE_STATEMENTS.map((sql) => runQuery.query(sql)),
);
return response;
}
async function handleExplainSqlStatement(
{
params,
}: {
params: {
sql: string;
databaseName?: string;
projectId: string;
branchId?: string;
analyze: boolean;
};
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
const explainPrefix = params.analyze
? 'EXPLAIN (ANALYZE, VERBOSE, BUFFERS, FILECACHE, FORMAT JSON)'
: 'EXPLAIN (VERBOSE, FORMAT JSON)';
const explainSql = `${explainPrefix} ${params.sql}`;
const result = await handleRunSql(
{
sql: explainSql,
databaseName: params.databaseName,
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text' as const,
text: JSON.stringify(result, null, 2),
},
],
};
}
async function createTemporaryBranch(
projectId: string,
neonClient: Api<unknown>,
): Promise<{ branch: Branch }> {
const result = await handleCreateBranch({ projectId }, neonClient);
if (!result?.branch) {
throw new Error('Failed to create temporary branch');
}
return result;
}
type QueryTuningParams = {
sql: string;
databaseName: string;
projectId: string;
};
type CompleteTuningParams = {
suggestedSqlStatements?: string[];
applyChanges?: boolean;
tuningId: string;
databaseName: string;
projectId: string;
temporaryBranch: Branch;
shouldDeleteTemporaryBranch?: boolean;
branch?: Branch;
};
type QueryTuningResult = {
tuningId: string;
databaseName: string;
projectId: string;
temporaryBranch: Branch;
originalPlan: any;
tableSchemas: any[];
sql: string;
baselineMetrics: QueryMetrics;
};
type CompleteTuningResult = {
appliedChanges?: string[];
results?: any;
deletedBranches?: string[];
message: string;
};
async function handleQueryTuning(
params: QueryTuningParams,
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
): Promise<QueryTuningResult> {
let tempBranch: Branch | undefined;
const tuningId = crypto.randomUUID();
try {
// Create temporary branch
const newBranch = await createTemporaryBranch(params.projectId, neonClient);
if (!newBranch.branch) {
throw new Error('Failed to create temporary branch: branch is undefined');
}
tempBranch = newBranch.branch;
// Ensure all operations use the temporary branch
const branchParams = {
...params,
branchId: tempBranch.id,
};
// First, get the execution plan with table information
const executionPlan = await handleExplainSqlStatement(
{
params: {
sql: branchParams.sql,
databaseName: branchParams.databaseName,
projectId: branchParams.projectId,
branchId: tempBranch.id,
analyze: true,
},
},
neonClient,
extra,
);
// Extract table names from the plan
const tableNames = extractTableNamesFromPlan(executionPlan);
if (tableNames.length === 0) {
throw new NotFoundError(
'No tables found in execution plan. Cannot proceed with optimization.',
);
}
// Get schema information for all referenced tables in parallel
const tableSchemas = await Promise.all(
tableNames.map(async (tableName) => {
try {
const schema = await handleDescribeTableSchema(
{
tableName,
databaseName: branchParams.databaseName,
projectId: branchParams.projectId,
branchId: newBranch.branch.id,
},
neonClient,
extra,
);
return {
tableName,
schema: schema.raw,
formatted: schema.formatted,
};
} catch (error) {
throw new Error(
`Failed to get schema for table ${tableName}: ${(error as Error).message}`,
);
}
}),
);
// Get the baseline execution metrics
const baselineMetrics = extractExecutionMetrics(executionPlan);
// Return the information for analysis
const result: QueryTuningResult = {
tuningId,
databaseName: params.databaseName,
projectId: params.projectId,
temporaryBranch: tempBranch,
originalPlan: executionPlan,
tableSchemas,
sql: params.sql,
baselineMetrics,
};
return result;
} catch (error) {
// Always attempt to clean up the temporary branch if it was created
if (tempBranch) {
try {
await handleDeleteBranch(
{
projectId: params.projectId,
branchId: tempBranch.id,
},
neonClient,
);
} catch {
// No need to handle cleanup error
}
}
throw error;
}
}
// Helper function to extract execution metrics from EXPLAIN output
function extractExecutionMetrics(plan: any): QueryMetrics {
try {
const planJson =
typeof plan.content?.[0]?.text === 'string'
? JSON.parse(plan.content[0].text)
: plan;
const metrics: QueryMetrics = {
executionTime: 0,
planningTime: 0,
totalCost: 0,
actualRows: 0,
bufferUsage: {
shared: { hit: 0, read: 0, written: 0, dirtied: 0 },
local: { hit: 0, read: 0, written: 0, dirtied: 0 },
},
};
// Extract planning and execution time if available
if (planJson?.[0]?.['Planning Time']) {
metrics.planningTime = planJson[0]['Planning Time'];
}
if (planJson?.[0]?.['Execution Time']) {
metrics.executionTime = planJson[0]['Execution Time'];
}
// Recursively process plan nodes to accumulate costs and buffer usage
function processNode(node: any) {
if (!node || typeof node !== 'object') return;
// Accumulate costs
if (node['Total Cost']) {
metrics.totalCost = Math.max(metrics.totalCost, node['Total Cost']);
}
if (node['Actual Rows']) {
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.actualRows += node['Actual Rows'];
}
if (node['Shared Hit Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.shared.hit += node['Shared Hit Blocks'];
if (node['Shared Read Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.shared.read += node['Shared Read Blocks'];
if (node['Shared Written Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.shared.written += node['Shared Written Blocks'];
if (node['Shared Dirtied Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.shared.dirtied += node['Shared Dirtied Blocks'];
if (node['Local Hit Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.local.hit += node['Local Hit Blocks'];
if (node['Local Read Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.local.read += node['Local Read Blocks'];
if (node['Local Written Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.local.written += node['Local Written Blocks'];
if (node['Local Dirtied Blocks'])
// eslint-disable-next-line @typescript-eslint/restrict-plus-operands
metrics.bufferUsage.local.dirtied += node['Local Dirtied Blocks'];
// Process child nodes
if (Array.isArray(node.Plans)) {
node.Plans.forEach(processNode);
}
}
if (planJson?.[0]?.Plan) {
processNode(planJson[0].Plan);
}
return metrics;
} catch {
return {
executionTime: 0,
planningTime: 0,
totalCost: 0,
actualRows: 0,
bufferUsage: {
shared: { hit: 0, read: 0, written: 0, dirtied: 0 },
local: { hit: 0, read: 0, written: 0, dirtied: 0 },
},
};
}
}
// Types for query metrics
type BufferMetrics = {
hit: number;
read: number;
written: number;
dirtied: number;
};
type QueryMetrics = {
executionTime: number;
planningTime: number;
totalCost: number;
actualRows: number;
bufferUsage: {
shared: BufferMetrics;
local: BufferMetrics;
};
};
// Function to extract table names from an execution plan
function extractTableNamesFromPlan(planResult: any): string[] {
const tableNames = new Set<string>();
function recursivelyExtractFromNode(node: any) {
if (!node || typeof node !== 'object') return;
// Check if current node has relation information
if (node['Relation Name'] && node.Schema) {
const tableName = `${node.Schema}.${node['Relation Name']}`;
tableNames.add(tableName);
}
// Recursively process all object properties and array elements
if (Array.isArray(node)) {
node.forEach((item) => {
recursivelyExtractFromNode(item);
});
} else {
Object.values(node).forEach((value) => {
recursivelyExtractFromNode(value);
});
}
}
try {
// Start with the raw plan result
recursivelyExtractFromNode(planResult);
// If we have content[0].text, also parse and process that
if (planResult?.content?.[0]?.text) {
try {
const parsedContent = JSON.parse(planResult.content[0].text);
recursivelyExtractFromNode(parsedContent);
} catch {
// No need to handle parse error
}
}
} catch {
// No need to handle extraction error
}
const result = Array.from(tableNames);
return result;
}
async function handleCompleteTuning(
params: CompleteTuningParams,
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
): Promise<CompleteTuningResult> {
let results;
const operationLog: string[] = [];
try {
// Validate branch information
if (!params.temporaryBranch) {
throw new Error(
'Branch information is required for completing query tuning',
);
}
// Only proceed with changes if we have both suggestedChanges and branch
if (
params.applyChanges &&
params.suggestedSqlStatements &&
params.suggestedSqlStatements.length > 0
) {
operationLog.push('Applying optimizations to main branch...');
results = await handleRunSqlTransaction(
{
sqlStatements: params.suggestedSqlStatements,
databaseName: params.databaseName,
projectId: params.projectId,
branchId: params.branch?.id,
},
neonClient,
extra,
);
operationLog.push('Successfully applied optimizations to main branch.');
} else {
operationLog.push(
'No changes were applied (either none suggested or changes were discarded).',
);
}
// Only delete branch if shouldDeleteTemporaryBranch is true
if (params.shouldDeleteTemporaryBranch && params.temporaryBranch) {
operationLog.push('Cleaning up temporary branch...');
await handleDeleteBranch(
{
projectId: params.projectId,
branchId: params.temporaryBranch.id,
},
neonClient,
);
operationLog.push('Successfully cleaned up temporary branch.');
}
const result: CompleteTuningResult = {
appliedChanges:
params.applyChanges && params.suggestedSqlStatements
? params.suggestedSqlStatements
: undefined,
results,
deletedBranches:
params.shouldDeleteTemporaryBranch && params.temporaryBranch
? [params.temporaryBranch.id]
: undefined,
message: operationLog.join('\n'),
};
return result;
} catch (error) {
throw new Error(
`Failed to complete query tuning: ${(error as Error).message}`,
);
}
}
async function handleListSlowQueries(
{
projectId,
branchId,
databaseName,
computeId,
limit = 10,
}: {
projectId: string;
branchId?: string;
databaseName?: string;
computeId?: string;
limit?: number;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
// Get connection string
const connectionString = await handleGetConnectionString(
{
projectId,
branchId,
computeId,
databaseName,
},
neonClient,
extra,
);
// Connect to the database
const sql = neon(connectionString.uri);
// First, check if pg_stat_statements extension is installed
const checkExtensionQuery = `
SELECT EXISTS (
SELECT 1 FROM pg_extension WHERE extname = 'pg_stat_statements'
) as extension_exists;
`;
const extensionCheck = await sql.query(checkExtensionQuery);
const extensionExists = extensionCheck[0]?.extension_exists;
if (!extensionExists) {
throw new NotFoundError(
`pg_stat_statements extension is not installed on the database. Please install it using the following command: CREATE EXTENSION pg_stat_statements;`,
);
}
// Query to get slow queries
const slowQueriesQuery = `
SELECT
query,
calls,
total_exec_time,
mean_exec_time,
rows,
shared_blks_hit,
shared_blks_read,
shared_blks_written,
shared_blks_dirtied,
temp_blks_read,
temp_blks_written,
wal_records,
wal_fpi,
wal_bytes
FROM pg_stat_statements
WHERE query NOT LIKE '%pg_stat_statements%'
AND query NOT LIKE '%EXPLAIN%'
ORDER BY mean_exec_time DESC
LIMIT $1;
`;
const slowQueries = await sql.query(slowQueriesQuery, [limit]);
// Format the results
const formattedQueries = slowQueries.map((query: any) => {
return {
query: query.query,
calls: query.calls,
total_exec_time_ms: query.total_exec_time,
mean_exec_time_ms: query.mean_exec_time,
rows: query.rows,
shared_blocks: {
hit: query.shared_blks_hit,
read: query.shared_blks_read,
written: query.shared_blks_written,
dirtied: query.shared_blks_dirtied,
},
temp_blocks: {
read: query.temp_blks_read,
written: query.temp_blks_written,
},
io_time: {
read_ms: query.blk_read_time,
write_ms: query.blk_write_time,
},
wal: {
records: query.wal_records,
full_page_images: query.wal_fpi,
bytes: query.wal_bytes,
},
};
});
return {
slow_queries: formattedQueries,
total_queries_found: formattedQueries.length,
};
}
async function handleListBranchComputes(
{
projectId,
branchId,
}: {
projectId?: string;
branchId?: string;
},
neonClient: Api<unknown>,
extra: ToolHandlerExtraParams,
) {
// If projectId is not provided, get the first project but only if there is only one project
if (!projectId) {
const projects = await handleListProjects({}, neonClient, extra);
if (projects.length === 1) {
projectId = projects[0].id;
} else {
throw new InvalidArgumentError(
'Please provide a project ID or ensure you have only one project in your account.',
);
}
}
let endpoints;
if (branchId) {
const response = await neonClient.listProjectBranchEndpoints(
projectId,
branchId,
);
endpoints = response.data.endpoints;
} else {
const response = await neonClient.listProjectEndpoints(projectId);
endpoints = response.data.endpoints;
}
return endpoints.map((endpoint) => ({
compute_id: endpoint.id,
compute_type: endpoint.type,
compute_size:
endpoint.autoscaling_limit_min_cu !== endpoint.autoscaling_limit_max_cu
? `${endpoint.autoscaling_limit_min_cu}-${endpoint.autoscaling_limit_max_cu}`
: endpoint.autoscaling_limit_min_cu,
last_active: endpoint.last_active,
...endpoint,
}));
}
async function handleListOrganizations(
neonClient: Api<unknown>,
account: ToolHandlerExtraParams['account'],
search?: string,
): Promise<Organization[]> {
if (account.isOrg) {
const orgId = account.id;
const { data } = await neonClient.getOrganization(orgId);
return filterOrganizations([data], search);
}
const { data: response } = await neonClient.getCurrentUserOrganizations();
const organizations = response.organizations || [];
return filterOrganizations(organizations, search);
}
async function handleListSharedProjects(
params: ListSharedProjectsParams,
neonClient: Api<unknown>,
) {
const response = await neonClient.listSharedProjects(params);
return response.data.projects;
}
async function handleCompareDatabaseSchema(
params: GetProjectBranchSchemaComparisonParams,
neonClient: Api<unknown>,
) {
const response = await neonClient.getProjectBranchSchemaComparison(params);
return response.data;
}
export const NEON_HANDLERS = {
list_projects: async ({ params }, neonClient, extra) => {
const organization = await getOrgByOrgIdOrDefault(
params,
neonClient,
extra,
);
const projects = await handleListProjects(
{ ...params, org_id: organization?.id },
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(
{
organization: organization
? {
name: organization.name,
id: organization.id,
}
: undefined,
projects,
},
null,
2,
),
},
],
};
},
create_project: async ({ params }, neonClient, extra) => {
try {
const organization = await getOrgByOrgIdOrDefault(
params,
neonClient,
extra,
);
const result = await handleCreateProject(
{ project: { name: params.name, org_id: organization?.id } },
neonClient,
);
// Get the connection string for the newly created project
const connectionString = await handleGetConnectionString(
{
projectId: result.project.id,
branchId: result.branch.id,
databaseName: result.databases[0].name,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: [
`Your Neon project is created ${organization ? `in organization "${organization.name}"` : ''} and is ready.`,
`The project_id is "${result.project.id}"`,
`The branch name is "${result.branch.name}" (ID: ${result.branch.id})`,
`There is one database available on this branch, called "${result.databases[0].name}",`,
'but you can create more databases using SQL commands.',
'',
'Connection string details:',
`URI: ${connectionString.uri}`,
`Project ID: ${connectionString.projectId}`,
`Branch ID: ${connectionString.branchId}`,
`Database: ${connectionString.databaseName}`,
`Role: ${connectionString.roleName}`,
'',
'You can use this connection string with any PostgreSQL client to connect to your Neon database.',
'For example, with psql:',
`psql "${connectionString.uri}"`,
].join('\n'),
},
],
};
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
return {
isError: true,
content: [
{
type: 'text',
text: [
'An error occurred while creating the project.',
'Error details:',
message,
'If you have reached the Neon project limit, please upgrade your account in this link: https://console.neon.tech/app/billing',
].join('\n'),
},
],
};
}
},
delete_project: async ({ params }, neonClient) => {
await handleDeleteProject(params.projectId, neonClient);
return {
content: [
{
type: 'text',
text: [
'Project deleted successfully.',
`Project ID: ${params.projectId}`,
].join('\n'),
},
],
};
},
describe_project: async ({ params }, neonClient) => {
const result = await handleDescribeProject(params.projectId, neonClient);
return {
content: [
{
type: 'text',
text: `This project is called ${result.project.project.name}.`,
},
{
type: 'text',
text: `It contains the following branches (use the describe branch tool to learn more about each branch): ${JSON.stringify(
result.branches,
null,
2,
)}`,
},
],
};
},
run_sql: async ({ params }, neonClient, extra) => {
const result = await handleRunSql(
{
sql: params.sql,
databaseName: params.databaseName,
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
extra,
);
return {
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
};
},
run_sql_transaction: async ({ params }, neonClient, extra) => {
const result = await handleRunSqlTransaction(
{
sqlStatements: params.sqlStatements,
databaseName: params.databaseName,
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
extra,
);
return {
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
};
},
describe_table_schema: async ({ params }, neonClient, extra) => {
const result = await handleDescribeTableSchema(
{
tableName: params.tableName,
databaseName: params.databaseName,
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
extra,
);
return {
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
};
},
get_database_tables: async ({ params }, neonClient, extra) => {
const result = await handleGetDatabaseTables(
{
projectId: params.projectId,
branchId: params.branchId,
databaseName: params.databaseName,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(result, null, 2),
},
],
};
},
create_branch: async ({ params }, neonClient) => {
const result = await handleCreateBranch(
{
projectId: params.projectId,
branchName: params.branchName,
},
neonClient,
);
return {
content: [
{
type: 'text',
text: [
'Branch created successfully.',
`Project ID: ${result.branch.project_id}`,
`Branch ID: ${result.branch.id}`,
`Branch name: ${result.branch.name}`,
`Parent branch: ${result.branch.parent_id}`,
].join('\n'),
},
],
};
},
prepare_database_migration: async ({ params }, neonClient, extra) => {
const result = await handleSchemaMigration(
{
migrationSql: params.migrationSql,
databaseName: params.databaseName,
projectId: params.projectId,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: `
<status>Migration created successfully in temporary branch</status>
<details>
<migration_id>${result.migrationId}</migration_id>
<temporary_branch>
<name>${result.branch.name}</name>
<id>${result.branch.id}</id>
</temporary_branch>
</details>
<execution_result>${JSON.stringify(result.migrationResult, null, 2)}</execution_result>
<next_actions>
You MUST follow these steps:
1. Test this migration using \`run_sql\` tool on branch \`${result.branch.name}\`
2. Verify the changes meet your requirements
3. If satisfied, use \`complete_database_migration\` with migration_id: ${result.migrationId}
</next_actions>
`,
},
],
};
},
complete_database_migration: async ({ params }, neonClient, extra) => {
const result = await handleCommitMigration(
{
migrationId: params.migrationId,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: `Result: ${JSON.stringify(
{
deletedBranch: result.deletedBranch,
migrationResult: result.migrationResult,
},
null,
2,
)}`,
},
],
};
},
describe_branch: async ({ params }, neonClient, extra) => {
const result = await handleDescribeBranch(
{
projectId: params.projectId,
branchId: params.branchId,
databaseName: params.databaseName,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: ['Database Structure:', JSON.stringify(result, null, 2)].join(
'\n',
),
},
],
};
},
delete_branch: async ({ params }, neonClient) => {
await handleDeleteBranch(
{
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
);
return {
content: [
{
type: 'text',
text: [
'Branch deleted successfully.',
`Project ID: ${params.projectId}`,
`Branch ID: ${params.branchId}`,
].join('\n'),
},
],
};
},
reset_from_parent: async ({ params }, neonClient) => {
const result = await handleResetFromParent(
{
projectId: params.projectId,
branchIdOrName: params.branchIdOrName,
preserveUnderName: params.preserveUnderName,
},
neonClient,
);
const parentInfo = `${result.parentBranch.name} (${result.parentBranch.id})`;
const messages = [
'Branch reset from parent successfully.',
`Project: ${params.projectId}`,
`Branch: ${params.branchIdOrName}`,
`Reset to parent branch: ${parentInfo}`,
];
if (result.preservedBranchName) {
messages.push(
params.preserveUnderName
? `Previous state preserved as: ${params.preserveUnderName}`
: `Previous state auto-preserved as: ${result.preservedBranchName} (branch had children)`,
);
} else {
messages.push('Previous state was not preserved');
}
return {
content: [
{
type: 'text',
text: messages.join('\n'),
},
],
};
},
get_connection_string: async ({ params }, neonClient, extra) => {
const result = await handleGetConnectionString(
{
projectId: params.projectId,
branchId: params.branchId,
computeId: params.computeId,
databaseName: params.databaseName,
roleName: params.roleName,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: [
'Connection string details:',
`URI: ${result.uri}`,
`Project ID: ${result.projectId}`,
`Database: ${result.databaseName}`,
`Role: ${result.roleName}`,
result.branchId
? `Branch ID: ${result.branchId}`
: 'Using default branch',
result.computeId
? `Compute ID: ${result.computeId}`
: 'Using default compute',
'',
'You can use this connection string with any PostgreSQL client to connect to your Neon database.',
].join('\n'),
},
],
};
},
provision_neon_auth: async ({ params }, neonClient) => {
const result = await handleProvisionNeonAuth(
{
projectId: params.projectId,
database: params.database,
},
neonClient,
);
return result;
},
explain_sql_statement: async ({ params }, neonClient, extra) => {
const result = await handleExplainSqlStatement(
{ params },
neonClient,
extra,
);
return result;
},
prepare_query_tuning: async ({ params }, neonClient, extra) => {
const result = await handleQueryTuning(
{
sql: params.sql,
databaseName: params.databaseName,
projectId: params.projectId,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(
{
tuningId: result.tuningId,
databaseName: result.databaseName,
projectId: result.projectId,
temporaryBranch: result.temporaryBranch,
executionPlan: result.originalPlan,
tableSchemas: result.tableSchemas,
sql: result.sql,
},
null,
2,
),
},
],
};
},
complete_query_tuning: async ({ params }, neonClient, extra) => {
const result = await handleCompleteTuning(
{
suggestedSqlStatements: params.suggestedSqlStatements,
applyChanges: params.applyChanges,
tuningId: params.tuningId,
databaseName: params.databaseName,
projectId: params.projectId,
temporaryBranch: {
id: params.temporaryBranchId,
project_id: params.projectId,
} as Branch,
shouldDeleteTemporaryBranch: params.shouldDeleteTemporaryBranch,
branch: params.branchId
? ({ id: params.branchId, project_id: params.projectId } as Branch)
: undefined,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(result, null, 2),
},
],
};
},
list_slow_queries: async ({ params }, neonClient, extra) => {
const result = await handleListSlowQueries(
{
projectId: params.projectId,
branchId: params.branchId,
databaseName: params.databaseName,
computeId: params.computeId,
limit: params.limit,
},
neonClient,
extra,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(result, null, 2),
},
],
};
},
list_branch_computes: async ({ params }, neonClient, extra) => {
const result = await handleListBranchComputes(
{
projectId: params.projectId,
branchId: params.branchId,
},
neonClient,
extra,
);
return {
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
};
},
list_organizations: async ({ params }, neonClient, extra) => {
const organizations = await handleListOrganizations(
neonClient,
extra.account,
params.search,
);
return {
content: [
{
type: 'text',
text: JSON.stringify(organizations, null, 2),
},
],
};
},
list_shared_projects: async ({ params }, neonClient) => {
const sharedProjects = await handleListSharedProjects(params, neonClient);
return {
content: [
{
type: 'text',
text: JSON.stringify(
{
shared_projects: sharedProjects,
count: sharedProjects.length,
},
null,
2,
),
},
],
};
},
compare_database_schema: async ({ params }, neonClient) => {
const result = await handleCompareDatabaseSchema(
{
projectId: params.projectId,
branchId: params.branchId,
db_name: params.databaseName,
},
neonClient,
);
return {
content: [{ type: 'text', text: JSON.stringify(result, null, 2) }],
};
},
} satisfies ToolHandlers;
```