This is page 2 of 3. Use http://codebase.md/supabase-community/supabase-mcp?page={x} to view the full context.
# Directory Structure
```
├── .github
│ └── workflows
│ └── tests.yml
├── .gitignore
├── .nvmrc
├── .vscode
│ └── settings.json
├── biome.json
├── CONTRIBUTING.md
├── docs
│ └── production.md
├── LICENSE
├── package.json
├── packages
│ ├── mcp-server-postgrest
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── index.ts
│ │ │ ├── server.test.ts
│ │ │ ├── server.ts
│ │ │ ├── stdio.ts
│ │ │ └── util.ts
│ │ ├── tsconfig.json
│ │ └── tsup.config.ts
│ ├── mcp-server-supabase
│ │ ├── .gitignore
│ │ ├── package.json
│ │ ├── scripts
│ │ │ └── registry
│ │ │ ├── login.sh
│ │ │ └── update-version.ts
│ │ ├── server.json
│ │ ├── src
│ │ │ ├── content-api
│ │ │ │ ├── graphql.test.ts
│ │ │ │ ├── graphql.ts
│ │ │ │ └── index.ts
│ │ │ ├── edge-function.test.ts
│ │ │ ├── edge-function.ts
│ │ │ ├── index.test.ts
│ │ │ ├── index.ts
│ │ │ ├── logs.ts
│ │ │ ├── management-api
│ │ │ │ ├── index.ts
│ │ │ │ └── types.ts
│ │ │ ├── password.test.ts
│ │ │ ├── password.ts
│ │ │ ├── pg-meta
│ │ │ │ ├── columns.sql
│ │ │ │ ├── extensions.sql
│ │ │ │ ├── index.ts
│ │ │ │ ├── tables.sql
│ │ │ │ └── types.ts
│ │ │ ├── platform
│ │ │ │ ├── api-platform.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── types.ts
│ │ │ ├── pricing.ts
│ │ │ ├── regions.ts
│ │ │ ├── server.test.ts
│ │ │ ├── server.ts
│ │ │ ├── tools
│ │ │ │ ├── account-tools.ts
│ │ │ │ ├── branching-tools.ts
│ │ │ │ ├── database-operation-tools.ts
│ │ │ │ ├── debugging-tools.ts
│ │ │ │ ├── development-tools.ts
│ │ │ │ ├── docs-tools.ts
│ │ │ │ ├── edge-function-tools.ts
│ │ │ │ ├── storage-tools.ts
│ │ │ │ └── util.ts
│ │ │ ├── transports
│ │ │ │ ├── stdio.ts
│ │ │ │ ├── util.test.ts
│ │ │ │ └── util.ts
│ │ │ ├── types
│ │ │ │ └── sql.d.ts
│ │ │ ├── types.test.ts
│ │ │ ├── types.ts
│ │ │ ├── util.test.ts
│ │ │ └── util.ts
│ │ ├── test
│ │ │ ├── e2e
│ │ │ │ ├── functions.e2e.ts
│ │ │ │ ├── projects.e2e.ts
│ │ │ │ ├── prompt-injection.e2e.ts
│ │ │ │ ├── setup.ts
│ │ │ │ └── utils.ts
│ │ │ ├── extensions.d.ts
│ │ │ ├── extensions.ts
│ │ │ ├── mocks.ts
│ │ │ ├── plugins
│ │ │ │ └── text-loader.ts
│ │ │ └── stdio.integration.ts
│ │ ├── tsconfig.json
│ │ ├── tsup.config.ts
│ │ ├── vitest.config.ts
│ │ ├── vitest.setup.ts
│ │ └── vitest.workspace.ts
│ └── mcp-utils
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ ├── index.ts
│ │ ├── server.test.ts
│ │ ├── server.ts
│ │ ├── stream-transport.ts
│ │ ├── types.ts
│ │ ├── util.test.ts
│ │ └── util.ts
│ ├── tsconfig.json
│ └── tsup.config.ts
├── pnpm-lock.yaml
├── pnpm-workspace.yaml
├── README.md
└── supabase
├── config.toml
├── migrations
│ ├── 20241220232417_todos.sql
│ └── 20250109000000_add_todo_policies.sql
└── seed.sql
```
# Files
--------------------------------------------------------------------------------
/packages/mcp-utils/src/server.ts:
--------------------------------------------------------------------------------
```typescript
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import {
CallToolRequestSchema,
ListResourcesRequestSchema,
ListResourceTemplatesRequestSchema,
ListToolsRequestSchema,
ReadResourceRequestSchema,
type ClientCapabilities,
type Implementation,
type ListResourcesResult,
type ListResourceTemplatesResult,
type ReadResourceResult,
type ServerCapabilities,
type ListToolsResult,
} from '@modelcontextprotocol/sdk/types.js';
import type { z } from 'zod';
import zodToJsonSchema from 'zod-to-json-schema';
import type {
ExpandRecursively,
ExtractNotification,
ExtractParams,
ExtractRequest,
ExtractResult,
} from './types.js';
import { assertValidUri, compareUris, matchUriTemplate } from './util.js';
export type Scheme = string;
export type Annotations = NonNullable<
ListToolsResult['tools'][number]['annotations']
>;
export type Resource<Uri extends string = string, Result = unknown> = {
uri: Uri;
name: string;
description?: string;
mimeType?: string;
read(uri: `${Scheme}://${Uri}`): Promise<Result>;
};
export type ResourceTemplate<Uri extends string = string, Result = unknown> = {
uriTemplate: Uri;
name: string;
description?: string;
mimeType?: string;
read(
uri: `${Scheme}://${Uri}`,
params: {
[Param in ExtractParams<Uri>]: string;
}
): Promise<Result>;
};
export type Tool<
Params extends z.ZodObject<any> = z.ZodObject<any>,
Result = unknown,
> = {
description: Prop<string>;
annotations?: Annotations;
parameters: Params;
execute(params: z.infer<Params>): Promise<Result>;
};
/**
* Helper function to define an MCP resource while preserving type information.
*/
export function resource<Uri extends string, Result>(
uri: Uri,
resource: Omit<Resource<Uri, Result>, 'uri'>
): Resource<Uri, Result> {
return {
uri,
...resource,
};
}
/**
* Helper function to define an MCP resource with a URI template while preserving type information.
*/
export function resourceTemplate<Uri extends string, Result>(
uriTemplate: Uri,
resource: Omit<ResourceTemplate<Uri, Result>, 'uriTemplate'>
): ResourceTemplate<Uri, Result> {
return {
uriTemplate,
...resource,
};
}
/**
* Helper function to define a JSON resource while preserving type information.
*/
export function jsonResource<Uri extends string, Result>(
uri: Uri,
resource: Omit<Resource<Uri, Result>, 'uri' | 'mimeType'>
): Resource<Uri, Result> {
return {
uri,
mimeType: 'application/json' as const,
...resource,
};
}
/**
* Helper function to define a JSON resource with a URI template while preserving type information.
*/
export function jsonResourceTemplate<Uri extends string, Result>(
uriTemplate: Uri,
resource: Omit<ResourceTemplate<Uri, Result>, 'uriTemplate' | 'mimeType'>
): ResourceTemplate<Uri, Result> {
return {
uriTemplate,
mimeType: 'application/json' as const,
...resource,
};
}
/**
* Helper function to define a list of resources that share a common URI scheme.
*/
export function resources<Scheme extends string>(
scheme: Scheme,
resources: (Resource | ResourceTemplate)[]
): (
| Resource<`${Scheme}://${string}`>
| ResourceTemplate<`${Scheme}://${string}`>
)[] {
return resources.map((resource) => {
if ('uri' in resource) {
const url = new URL(resource.uri, `${scheme}://`);
const uri = decodeURI(url.href) as `${Scheme}://${typeof resource.uri}`;
return {
...resource,
uri,
};
}
const url = new URL(resource.uriTemplate, `${scheme}://`);
const uriTemplate = decodeURI(
url.href
) as `${Scheme}://${typeof resource.uriTemplate}`;
return {
...resource,
uriTemplate,
};
});
}
/**
* Helper function to create a JSON resource response.
*/
export function jsonResourceResponse<Uri extends string, Response>(
uri: Uri,
response: Response
) {
return {
uri,
mimeType: 'application/json',
text: JSON.stringify(response),
};
}
/**
* Helper function to define an MCP tool while preserving type information.
*/
export function tool<Params extends z.ZodObject<any>, Result>(
tool: Tool<Params, Result>
) {
return tool;
}
export type InitData = {
clientInfo: Implementation;
clientCapabilities: ClientCapabilities;
};
type ToolCallBaseDetails = {
name: string;
arguments: Record<string, unknown>;
annotations?: Annotations;
};
type ToolCallSuccessDetails = ToolCallBaseDetails & {
success: true;
data: unknown;
};
type ToolCallErrorDetails = ToolCallBaseDetails & {
success: false;
error: unknown;
};
export type ToolCallDetails = ToolCallSuccessDetails | ToolCallErrorDetails;
export type InitCallback = (initData: InitData) => void | Promise<void>;
export type ToolCallCallback = (details: ToolCallDetails) => void;
export type PropCallback<T> = () => T | Promise<T>;
export type Prop<T> = T | PropCallback<T>;
export type McpServerOptions = {
/**
* The name of the MCP server. This will be sent to the client as part of
* the initialization process.
*/
name: string;
/**
* The title of the MCP server. This is a human-readable name that can be
* displayed in the client UI.
*
* If not provided, the name will be used as the title.
*/
title?: string;
/**
* The version of the MCP server. This will be sent to the client as part of
* the initialization process.
*/
version: string;
/**
* Callback for when initialization has fully completed with the client.
*/
onInitialize?: InitCallback;
/**
* Callback for after a tool is called.
*/
onToolCall?: ToolCallCallback;
/**
* Resources to be served by the server. These can be defined as a static
* object or as a function that dynamically returns the object synchronously
* or asynchronously.
*
* If defined as a function, the function will be called whenever the client
* asks for the list of resources or reads a resource. This allows for dynamic
* resources that can change after the server has started.
*/
resources?: Prop<
(Resource<string, unknown> | ResourceTemplate<string, unknown>)[]
>;
/**
* Tools to be served by the server. These can be defined as a static object
* or as a function that dynamically returns the object synchronously or
* asynchronously.
*
* If defined as a function, the function will be called whenever the client
* asks for the list of tools or invokes a tool. This allows for dynamic tools
* that can change after the server has started.
*/
tools?: Prop<Record<string, Tool>>;
};
/**
* Creates an MCP server with the given options.
*
* Simplifies the process of creating an MCP server by providing a high-level
* API for defining resources and tools.
*/
export function createMcpServer(options: McpServerOptions) {
const capabilities: ServerCapabilities = {};
if (options.resources) {
capabilities.resources = {};
}
if (options.tools) {
capabilities.tools = {};
}
const server = new Server(
{
name: options.name,
title: options.title,
version: options.version,
},
{
capabilities,
}
);
async function getResources() {
if (!options.resources) {
throw new Error('resources not available');
}
return typeof options.resources === 'function'
? await options.resources()
: options.resources;
}
async function getTools() {
if (!options.tools) {
throw new Error('tools not available');
}
return typeof options.tools === 'function'
? await options.tools()
: options.tools;
}
server.oninitialized = async () => {
const clientInfo = server.getClientVersion();
const clientCapabilities = server.getClientCapabilities();
if (!clientInfo) {
throw new Error('client info not available after initialization');
}
if (!clientCapabilities) {
throw new Error('client capabilities not available after initialization');
}
const initData: InitData = {
clientInfo,
clientCapabilities,
};
await options.onInitialize?.(initData);
};
if (options.resources) {
server.setRequestHandler(
ListResourcesRequestSchema,
async (): Promise<ListResourcesResult> => {
const allResources = await getResources();
return {
resources: allResources
.filter((resource) => 'uri' in resource)
.map(({ uri, name, description, mimeType }) => {
return {
uri,
name,
description,
mimeType,
};
}),
};
}
);
server.setRequestHandler(
ListResourceTemplatesRequestSchema,
async (): Promise<ListResourceTemplatesResult> => {
const allResources = await getResources();
return {
resourceTemplates: allResources
.filter((resource) => 'uriTemplate' in resource)
.map(({ uriTemplate, name, description, mimeType }) => {
return {
uriTemplate,
name,
description,
mimeType,
};
}),
};
}
);
server.setRequestHandler(
ReadResourceRequestSchema,
async (request): Promise<ReadResourceResult> => {
try {
const allResources = await getResources();
const { uri } = request.params;
const resources = allResources.filter(
(resource) => 'uri' in resource
);
const resource = resources.find((resource) =>
compareUris(resource.uri, uri)
);
if (resource) {
const result = await resource.read(uri as `${string}://${string}`);
const contents = Array.isArray(result) ? result : [result];
return {
contents,
};
}
const resourceTemplates = allResources.filter(
(resource) => 'uriTemplate' in resource
);
const resourceTemplateUris = resourceTemplates.map(
({ uriTemplate }) => assertValidUri(uriTemplate)
);
const templateMatch = matchUriTemplate(uri, resourceTemplateUris);
if (!templateMatch) {
throw new Error('resource not found');
}
const resourceTemplate = resourceTemplates.find(
(r) => r.uriTemplate === templateMatch.uri
);
if (!resourceTemplate) {
throw new Error('resource not found');
}
const result = await resourceTemplate.read(
uri as `${string}://${string}`,
templateMatch.params
);
const contents = Array.isArray(result) ? result : [result];
return {
contents,
};
} catch (error) {
return {
isError: true,
content: [
{
type: 'text',
text: JSON.stringify({ error: enumerateError(error) }),
},
],
} as any;
}
}
);
}
if (options.tools) {
server.setRequestHandler(
ListToolsRequestSchema,
async (): Promise<ListToolsResult> => {
const tools = await getTools();
return {
tools: await Promise.all(
Object.entries(tools).map(
async ([name, { description, annotations, parameters }]) => {
const inputSchema = zodToJsonSchema(parameters);
if (!('properties' in inputSchema)) {
throw new Error('tool parameters must be a ZodObject');
}
return {
name,
description:
typeof description === 'function'
? await description()
: description,
annotations,
inputSchema,
};
}
)
),
} satisfies ListToolsResult;
}
);
server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
const tools = await getTools();
const toolName = request.params.name;
if (!(toolName in tools)) {
throw new Error('tool not found');
}
const tool = tools[toolName];
if (!tool) {
throw new Error('tool not found');
}
const args = tool.parameters
.strict()
.parse(request.params.arguments ?? {});
const executeWithCallback = async (tool: Tool) => {
// Wrap success or error in a result value
const res = await tool
.execute(args)
.then((data: unknown) => ({ success: true as const, data }))
.catch((error) => ({ success: false as const, error }));
try {
options.onToolCall?.({
name: toolName,
arguments: args,
annotations: tool.annotations,
...res,
});
} catch (error) {
// Don't fail the tool call if the callback fails
console.error('Failed to run tool callback', error);
}
// Unwrap result
if (!res.success) {
throw res.error;
}
return res.data;
};
const result = await executeWithCallback(tool);
const content = result
? [{ type: 'text', text: JSON.stringify(result) }]
: [];
return {
content,
};
} catch (error) {
return {
isError: true,
content: [
{
type: 'text',
text: JSON.stringify({ error: enumerateError(error) }),
},
],
};
}
});
}
// Expand types recursively for better intellisense
type Request = ExpandRecursively<ExtractRequest<typeof server>>;
type Notification = ExpandRecursively<ExtractNotification<typeof server>>;
type Result = ExpandRecursively<ExtractResult<typeof server>>;
return server as Server<Request, Notification, Result>;
}
function enumerateError(error: unknown) {
if (!error) {
return error;
}
if (typeof error !== 'object') {
return error;
}
const newError: Record<string, unknown> = {};
const errorProps = ['name', 'message'] as const;
for (const prop of errorProps) {
if (prop in error) {
newError[prop] = (error as Record<string, unknown>)[prop];
}
}
return newError;
}
```
--------------------------------------------------------------------------------
/packages/mcp-server-postgrest/src/server.test.ts:
--------------------------------------------------------------------------------
```typescript
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { AuthClient } from '@supabase/auth-js';
import { StreamTransport } from '@supabase/mcp-utils';
import { describe, expect, test } from 'vitest';
import { createPostgrestMcpServer } from './server.js';
// Requires local Supabase stack running
const API_URL = 'http://127.0.0.1:54321';
const REST_API_URL = `${API_URL}/rest/v1`;
const AUTH_API_URL = `${API_URL}/auth/v1`;
/**
* Sets up a client and server for testing.
*/
async function setup() {
const clientTransport = new StreamTransport();
const serverTransport = new StreamTransport();
clientTransport.readable.pipeTo(serverTransport.writable);
serverTransport.readable.pipeTo(clientTransport.writable);
const client = new Client(
{
name: 'TestClient',
version: '0.1.0',
},
{
capabilities: {},
}
);
const authClient = new AuthClient({
url: AUTH_API_URL,
});
await authClient.signUp({
email: '[email protected]',
password: 'password',
});
const authResponse = await authClient.signInWithPassword({
email: '[email protected]',
password: 'password',
});
if (authResponse.error) {
throw new Error(authResponse.error.message);
}
const server = createPostgrestMcpServer({
apiUrl: REST_API_URL,
schema: 'public',
apiKey: authResponse.data.session.access_token,
});
await server.connect(serverTransport);
await client.connect(clientTransport);
// Clear existing todos
const deleteOutput = await client.callTool({
name: 'postgrestRequest',
arguments: {
method: 'DELETE',
path: '/todos?id=gt.0',
},
});
if (deleteOutput.isError) {
throw new Error(JSON.stringify(deleteOutput.content));
}
const todoSeeds = [
{
title: 'Buy groceries',
description: 'Purchase milk, eggs, and bread from the store',
due_date: '2023-10-15',
is_completed: false,
},
{
title: 'Complete project report',
description:
'Finalize and submit the project report by the end of the week',
due_date: '2023-10-20',
is_completed: false,
},
{
title: 'Doctor appointment',
description: 'Annual check-up with Dr. Smith at 10 AM',
due_date: '2023-10-18',
is_completed: false,
},
{
title: 'Call plumber',
description: 'Fix the leaking sink in the kitchen',
due_date: '2023-10-16',
is_completed: false,
},
{
title: 'Read book',
description: 'Finish reading "The Great Gatsby"',
due_date: '2023-10-22',
is_completed: false,
},
];
// Seed todos
const output = await client.callTool({
name: 'postgrestRequest',
arguments: {
method: 'POST',
path: '/todos',
body: todoSeeds,
},
});
if (output.isError) {
throw new Error(JSON.stringify(output.content));
}
return { client, clientTransport, server, serverTransport };
}
describe('resources', () => {
test('list', async () => {
const { client } = await setup();
const { resources } = await client.listResources();
expect(resources).toHaveLength(1);
const [firstResource] = resources;
if (!firstResource) {
throw new Error('no resources');
}
expect(firstResource).toMatchInlineSnapshot(`
{
"description": "OpenAPI spec for the PostgREST API",
"mimeType": "application/json",
"name": "OpenAPI spec",
"uri": "postgrest:///spec",
}
`);
});
test('read', async () => {
const { client } = await setup();
const { contents } = await client.readResource({
uri: 'postgrest:///spec',
});
const [firstContent] = contents;
expect(firstContent).toMatchInlineSnapshot(`
{
"mimeType": "application/json",
"text": "{"swagger":"2.0","info":{"description":"","title":"standard public schema","version":"12.2.0 (ec89f6b)"},"host":"0.0.0.0:3000","basePath":"/","schemes":["http"],"consumes":["application/json","application/vnd.pgrst.object+json;nulls=stripped","application/vnd.pgrst.object+json","text/csv"],"produces":["application/json","application/vnd.pgrst.object+json;nulls=stripped","application/vnd.pgrst.object+json","text/csv"],"paths":{"/":{"get":{"produces":["application/openapi+json","application/json"],"responses":{"200":{"description":"OK"}},"summary":"OpenAPI description (this document)","tags":["Introspection"]}},"/todos":{"get":{"parameters":[{"$ref":"#/parameters/rowFilter.todos.id"},{"$ref":"#/parameters/rowFilter.todos.title"},{"$ref":"#/parameters/rowFilter.todos.description"},{"$ref":"#/parameters/rowFilter.todos.due_date"},{"$ref":"#/parameters/rowFilter.todos.is_completed"},{"$ref":"#/parameters/rowFilter.todos.user_id"},{"$ref":"#/parameters/select"},{"$ref":"#/parameters/order"},{"$ref":"#/parameters/range"},{"$ref":"#/parameters/rangeUnit"},{"$ref":"#/parameters/offset"},{"$ref":"#/parameters/limit"},{"$ref":"#/parameters/preferCount"}],"responses":{"200":{"description":"OK","schema":{"items":{"$ref":"#/definitions/todos"},"type":"array"}},"206":{"description":"Partial Content"}},"summary":"Table to manage todo items with details such as title, description, due date, and completion status.","tags":["todos"]},"post":{"parameters":[{"$ref":"#/parameters/body.todos"},{"$ref":"#/parameters/select"},{"$ref":"#/parameters/preferPost"}],"responses":{"201":{"description":"Created"}},"summary":"Table to manage todo items with details such as title, description, due date, and completion status.","tags":["todos"]},"delete":{"parameters":[{"$ref":"#/parameters/rowFilter.todos.id"},{"$ref":"#/parameters/rowFilter.todos.title"},{"$ref":"#/parameters/rowFilter.todos.description"},{"$ref":"#/parameters/rowFilter.todos.due_date"},{"$ref":"#/parameters/rowFilter.todos.is_completed"},{"$ref":"#/parameters/rowFilter.todos.user_id"},{"$ref":"#/parameters/preferReturn"}],"responses":{"204":{"description":"No Content"}},"summary":"Table to manage todo items with details such as title, description, due date, and completion status.","tags":["todos"]},"patch":{"parameters":[{"$ref":"#/parameters/rowFilter.todos.id"},{"$ref":"#/parameters/rowFilter.todos.title"},{"$ref":"#/parameters/rowFilter.todos.description"},{"$ref":"#/parameters/rowFilter.todos.due_date"},{"$ref":"#/parameters/rowFilter.todos.is_completed"},{"$ref":"#/parameters/rowFilter.todos.user_id"},{"$ref":"#/parameters/body.todos"},{"$ref":"#/parameters/preferReturn"}],"responses":{"204":{"description":"No Content"}},"summary":"Table to manage todo items with details such as title, description, due date, and completion status.","tags":["todos"]}}},"definitions":{"todos":{"description":"Table to manage todo items with details such as title, description, due date, and completion status.","required":["id","title","user_id"],"properties":{"id":{"description":"Note:\\nThis is a Primary Key.<pk/>","format":"bigint","type":"integer"},"title":{"format":"text","type":"string"},"description":{"format":"text","type":"string"},"due_date":{"format":"date","type":"string"},"is_completed":{"default":false,"format":"boolean","type":"boolean"},"user_id":{"default":"auth.uid()","format":"uuid","type":"string"}},"type":"object"}},"parameters":{"preferParams":{"name":"Prefer","description":"Preference","required":false,"enum":["params=single-object"],"in":"header","type":"string"},"preferReturn":{"name":"Prefer","description":"Preference","required":false,"enum":["return=representation","return=minimal","return=none"],"in":"header","type":"string"},"preferCount":{"name":"Prefer","description":"Preference","required":false,"enum":["count=none"],"in":"header","type":"string"},"preferPost":{"name":"Prefer","description":"Preference","required":false,"enum":["return=representation","return=minimal","return=none","resolution=ignore-duplicates","resolution=merge-duplicates"],"in":"header","type":"string"},"select":{"name":"select","description":"Filtering Columns","required":false,"in":"query","type":"string"},"on_conflict":{"name":"on_conflict","description":"On Conflict","required":false,"in":"query","type":"string"},"order":{"name":"order","description":"Ordering","required":false,"in":"query","type":"string"},"range":{"name":"Range","description":"Limiting and Pagination","required":false,"in":"header","type":"string"},"rangeUnit":{"name":"Range-Unit","description":"Limiting and Pagination","required":false,"default":"items","in":"header","type":"string"},"offset":{"name":"offset","description":"Limiting and Pagination","required":false,"in":"query","type":"string"},"limit":{"name":"limit","description":"Limiting and Pagination","required":false,"in":"query","type":"string"},"body.todos":{"name":"todos","description":"todos","required":false,"in":"body","schema":{"$ref":"#/definitions/todos"}},"rowFilter.todos.id":{"name":"id","required":false,"format":"bigint","in":"query","type":"string"},"rowFilter.todos.title":{"name":"title","required":false,"format":"text","in":"query","type":"string"},"rowFilter.todos.description":{"name":"description","required":false,"format":"text","in":"query","type":"string"},"rowFilter.todos.due_date":{"name":"due_date","required":false,"format":"date","in":"query","type":"string"},"rowFilter.todos.is_completed":{"name":"is_completed","required":false,"format":"boolean","in":"query","type":"string"},"rowFilter.todos.user_id":{"name":"user_id","required":false,"format":"uuid","in":"query","type":"string"}},"externalDocs":{"description":"PostgREST Documentation","url":"https://postgrest.org/en/v12.2/api.html"}}",
"uri": "postgrest:///spec",
}
`);
});
});
describe('tools', () => {
test('list', async () => {
const { client } = await setup();
const { tools } = await client.listTools();
expect(tools).toHaveLength(2);
const [firstTool, secondTool] = tools;
if (!firstTool) {
throw new Error('no tools');
}
expect(firstTool).toMatchInlineSnapshot(`
{
"description": "Performs an HTTP request against the PostgREST API",
"inputSchema": {
"$schema": "http://json-schema.org/draft-07/schema#",
"additionalProperties": false,
"properties": {
"body": {
"anyOf": [
{
"additionalProperties": {},
"type": "object",
},
{
"items": {
"additionalProperties": {},
"type": "object",
},
"type": "array",
},
],
},
"method": {
"enum": [
"GET",
"POST",
"PUT",
"PATCH",
"DELETE",
],
"type": "string",
},
"path": {
"type": "string",
},
},
"required": [
"method",
"path",
],
"type": "object",
},
"name": "postgrestRequest",
}
`);
if (!secondTool) {
throw new Error('missing second tool');
}
expect(secondTool).toMatchInlineSnapshot(`
{
"description": "Converts SQL query to a PostgREST API request (method, path)",
"inputSchema": {
"$schema": "http://json-schema.org/draft-07/schema#",
"additionalProperties": false,
"properties": {
"sql": {
"type": "string",
},
},
"required": [
"sql",
],
"type": "object",
},
"name": "sqlToRest",
}
`);
});
test('execute', async () => {
const { client } = await setup();
const output = await client.callTool({
name: 'postgrestRequest',
arguments: {
method: 'GET',
path: '/todos?select=title,description,due_date,is_completed&order=id.asc',
},
});
const [firstContent] = output.content as any[];
if (!firstContent) {
throw new Error('no content');
}
const result = JSON.parse(firstContent.text);
expect(result).toMatchInlineSnapshot([
{
description: 'Purchase milk, eggs, and bread from the store',
due_date: '2023-10-15',
is_completed: false,
title: 'Buy groceries',
},
{
description:
'Finalize and submit the project report by the end of the week',
due_date: '2023-10-20',
is_completed: false,
title: 'Complete project report',
},
{
description: 'Annual check-up with Dr. Smith at 10 AM',
due_date: '2023-10-18',
is_completed: false,
title: 'Doctor appointment',
},
{
description: 'Fix the leaking sink in the kitchen',
due_date: '2023-10-16',
is_completed: false,
title: 'Call plumber',
},
{
description: 'Finish reading "The Great Gatsby"',
due_date: '2023-10-22',
is_completed: false,
title: 'Read book',
},
]);
});
test('execute with body', async () => {
const { client } = await setup();
const output = await client.callTool({
name: 'postgrestRequest',
arguments: {
method: 'POST',
path: '/todos',
body: {
title: 'Test',
description: 'Test',
due_date: '2023-10-15',
is_completed: false,
},
},
});
const [firstContent] = output.content as any[];
if (!firstContent) {
throw new Error('no content');
}
const [result] = JSON.parse(firstContent.text);
expect(result).toMatchObject({
title: 'Test',
description: 'Test',
due_date: '2023-10-15',
is_completed: false,
});
// Clean up
await client.callTool({
name: 'postgrestRequest',
arguments: {
method: 'DELETE',
path: `/todos?id=eq.${result.id}`,
},
});
});
test('sql-to-rest', async () => {
const { client } = await setup();
const output = await client.callTool({
name: 'sqlToRest',
arguments: {
sql: 'SELECT * FROM todos ORDER BY id ASC',
},
});
const [firstContent] = output.content as any[];
if (!firstContent) {
throw new Error('no content');
}
const result = JSON.parse(firstContent.text);
expect(result).toMatchInlineSnapshot(`
{
"method": "GET",
"path": "/todos?order=id.asc",
}
`);
});
});
```
--------------------------------------------------------------------------------
/packages/mcp-server-supabase/src/platform/api-platform.ts:
--------------------------------------------------------------------------------
```typescript
import {
getMultipartBoundary,
parseMultipartStream,
} from '@mjackson/multipart-parser';
import type { InitData } from '@supabase/mcp-utils';
import { fileURLToPath } from 'node:url';
import packageJson from '../../package.json' with { type: 'json' };
import { getDeploymentId, normalizeFilename } from '../edge-function.js';
import { getLogQuery } from '../logs.js';
import {
assertSuccess,
createManagementApiClient,
} from '../management-api/index.js';
import { generatePassword } from '../password.js';
import {
applyMigrationOptionsSchema,
createBranchOptionsSchema,
createProjectOptionsSchema,
deployEdgeFunctionOptionsSchema,
executeSqlOptionsSchema,
getLogsOptionsSchema,
resetBranchOptionsSchema,
type AccountOperations,
type ApiKey,
type ApiKeyType,
type ApplyMigrationOptions,
type BranchingOperations,
type CreateBranchOptions,
type CreateProjectOptions,
type DatabaseOperations,
type DebuggingOperations,
type DeployEdgeFunctionOptions,
type DevelopmentOperations,
type SuccessResponse,
type EdgeFunction,
type EdgeFunctionsOperations,
type EdgeFunctionWithBody,
type ExecuteSqlOptions,
type GetLogsOptions,
type ResetBranchOptions,
type StorageConfig,
type StorageOperations,
type SupabasePlatform,
} from './index.js';
const { version } = packageJson;
const SUCCESS_RESPONSE: SuccessResponse = { success: true };
export type SupabaseApiPlatformOptions = {
/**
* The access token for the Supabase Management API.
*/
accessToken: string;
/**
* The API URL for the Supabase Management API.
*/
apiUrl?: string;
};
/**
* Creates a Supabase platform implementation using the Supabase Management API.
*/
export function createSupabaseApiPlatform(
options: SupabaseApiPlatformOptions
): SupabasePlatform {
const { accessToken, apiUrl } = options;
const managementApiUrl = apiUrl ?? 'https://api.supabase.com';
let managementApiClient = createManagementApiClient(
managementApiUrl,
accessToken
);
const account: AccountOperations = {
async listOrganizations() {
const response = await managementApiClient.GET('/v1/organizations');
assertSuccess(response, 'Failed to fetch organizations');
return response.data;
},
async getOrganization(organizationId: string) {
const response = await managementApiClient.GET(
'/v1/organizations/{slug}',
{
params: {
path: {
slug: organizationId,
},
},
}
);
assertSuccess(response, 'Failed to fetch organization');
return response.data;
},
async listProjects() {
const response = await managementApiClient.GET('/v1/projects');
assertSuccess(response, 'Failed to fetch projects');
return response.data;
},
async getProject(projectId: string) {
const response = await managementApiClient.GET('/v1/projects/{ref}', {
params: {
path: {
ref: projectId,
},
},
});
assertSuccess(response, 'Failed to fetch project');
return response.data;
},
async createProject(options: CreateProjectOptions) {
const { name, organization_id, region, db_pass } =
createProjectOptionsSchema.parse(options);
const response = await managementApiClient.POST('/v1/projects', {
body: {
name,
region,
organization_id,
db_pass:
db_pass ??
generatePassword({
length: 16,
numbers: true,
uppercase: true,
lowercase: true,
}),
},
});
assertSuccess(response, 'Failed to create project');
return response.data;
},
async pauseProject(projectId: string) {
const response = await managementApiClient.POST(
'/v1/projects/{ref}/pause',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to pause project');
},
async restoreProject(projectId: string) {
const response = await managementApiClient.POST(
'/v1/projects/{ref}/restore',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to restore project');
},
};
const database: DatabaseOperations = {
async executeSql<T>(projectId: string, options: ExecuteSqlOptions) {
const { query, parameters, read_only } =
executeSqlOptionsSchema.parse(options);
const response = await managementApiClient.POST(
'/v1/projects/{ref}/database/query',
{
params: {
path: {
ref: projectId,
},
},
body: {
query,
parameters,
read_only,
},
}
);
assertSuccess(response, 'Failed to execute SQL query');
return response.data as unknown as T[];
},
async listMigrations(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/database/migrations',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to fetch migrations');
return response.data;
},
async applyMigration(projectId: string, options: ApplyMigrationOptions) {
const { name, query } = applyMigrationOptionsSchema.parse(options);
const response = await managementApiClient.POST(
'/v1/projects/{ref}/database/migrations',
{
params: {
path: {
ref: projectId,
},
},
body: {
name,
query,
},
}
);
assertSuccess(response, 'Failed to apply migration');
// Intentionally don't return the result of the migration
// to avoid prompt injection attacks. If the migration failed,
// it will throw an error.
},
};
const debugging: DebuggingOperations = {
async getLogs(projectId: string, options: GetLogsOptions) {
const { service, iso_timestamp_start, iso_timestamp_end } =
getLogsOptionsSchema.parse(options);
const sql = getLogQuery(service);
const response = await managementApiClient.GET(
'/v1/projects/{ref}/analytics/endpoints/logs.all',
{
params: {
path: {
ref: projectId,
},
query: {
sql,
iso_timestamp_start,
iso_timestamp_end,
},
},
}
);
assertSuccess(response, 'Failed to fetch logs');
return response.data;
},
async getSecurityAdvisors(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/advisors/security',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to fetch security advisors');
return response.data;
},
async getPerformanceAdvisors(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/advisors/performance',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to fetch performance advisors');
return response.data;
},
};
const development: DevelopmentOperations = {
async getProjectUrl(projectId: string): Promise<string> {
const apiUrl = new URL(managementApiUrl);
return `https://${projectId}.${getProjectDomain(apiUrl.hostname)}`;
},
async getPublishableKeys(projectId: string): Promise<ApiKey[]> {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/api-keys',
{
params: {
path: {
ref: projectId,
},
query: {
reveal: false,
},
},
}
);
assertSuccess(response, 'Failed to fetch API keys');
// Try to check if legacy JWT-based keys are enabled
// If this fails, we'll continue without the disabled field
let legacyKeysEnabled: boolean | undefined = undefined;
try {
const legacyKeysResponse = await managementApiClient.GET(
'/v1/projects/{ref}/api-keys/legacy',
{
params: {
path: {
ref: projectId,
},
},
}
);
if (legacyKeysResponse.response.ok) {
legacyKeysEnabled = legacyKeysResponse.data?.enabled ?? true;
}
} catch (error) {
// If we can't fetch legacy key status, continue without it
legacyKeysEnabled = undefined;
}
// Filter for client-safe keys: legacy 'anon' or publishable type
const clientKeys =
response.data?.filter(
(key) => key.name === 'anon' || key.type === 'publishable'
) ?? [];
if (clientKeys.length === 0) {
throw new Error(
'No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.'
);
}
return clientKeys.map((key) => ({
api_key: key.api_key!,
name: key.name,
type: (key.type === 'publishable'
? 'publishable'
: 'legacy') satisfies ApiKeyType,
// Only include disabled field if we successfully fetched legacy key status
...(legacyKeysEnabled !== undefined && {
disabled: key.type === 'legacy' && !legacyKeysEnabled,
}),
description: key.description ?? undefined,
id: key.id ?? undefined,
}));
},
async generateTypescriptTypes(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/types/typescript',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to fetch TypeScript types');
return response.data;
},
};
const functions: EdgeFunctionsOperations = {
async listEdgeFunctions(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/functions',
{
params: {
path: {
ref: projectId,
},
},
}
);
assertSuccess(response, 'Failed to fetch Edge Functions');
return response.data.map((edgeFunction) => {
const deploymentId = getDeploymentId(
projectId,
edgeFunction.id,
edgeFunction.version
);
const entrypoint_path = edgeFunction.entrypoint_path
? normalizeFilename({
deploymentId,
filename: fileURLToPath(edgeFunction.entrypoint_path, {
windows: false,
}),
})
: undefined;
const import_map_path = edgeFunction.import_map_path
? normalizeFilename({
deploymentId,
filename: fileURLToPath(edgeFunction.import_map_path, {
windows: false,
}),
})
: undefined;
return {
...edgeFunction,
entrypoint_path,
import_map_path,
};
});
},
async getEdgeFunction(projectId: string, functionSlug: string) {
const functionResponse = await managementApiClient.GET(
'/v1/projects/{ref}/functions/{function_slug}',
{
params: {
path: {
ref: projectId,
function_slug: functionSlug,
},
},
}
);
if (functionResponse.error) {
throw functionResponse.error;
}
assertSuccess(functionResponse, 'Failed to fetch Edge Function');
const edgeFunction = functionResponse.data;
const deploymentId = getDeploymentId(
projectId,
edgeFunction.id,
edgeFunction.version
);
const entrypoint_path = edgeFunction.entrypoint_path
? normalizeFilename({
deploymentId,
filename: fileURLToPath(edgeFunction.entrypoint_path, {
windows: false,
}),
})
: undefined;
const import_map_path = edgeFunction.import_map_path
? normalizeFilename({
deploymentId,
filename: fileURLToPath(edgeFunction.import_map_path, {
windows: false,
}),
})
: undefined;
const bodyResponse = await managementApiClient.GET(
'/v1/projects/{ref}/functions/{function_slug}/body',
{
params: {
path: {
ref: projectId,
function_slug: functionSlug,
},
},
headers: {
Accept: 'multipart/form-data',
},
parseAs: 'stream',
}
);
assertSuccess(bodyResponse, 'Failed to fetch Edge Function files');
const contentType = bodyResponse.response.headers.get('content-type');
if (!contentType || !contentType.startsWith('multipart/form-data')) {
throw new Error(
`Unexpected content type: ${contentType}. Expected multipart/form-data.`
);
}
const boundary = getMultipartBoundary(contentType);
if (!boundary) {
throw new Error('No multipart boundary found in response headers');
}
if (!bodyResponse.data) {
throw new Error('No data received from Edge Function body');
}
const files: EdgeFunctionWithBody['files'] = [];
const parts = parseMultipartStream(bodyResponse.data, { boundary });
for await (const part of parts) {
if (part.isFile && part.filename) {
files.push({
name: normalizeFilename({
deploymentId,
filename: part.filename,
}),
content: part.text,
});
}
}
return {
...edgeFunction,
entrypoint_path,
import_map_path,
files,
};
},
async deployEdgeFunction(
projectId: string,
options: DeployEdgeFunctionOptions
) {
let {
name,
entrypoint_path,
import_map_path,
files: inputFiles,
} = deployEdgeFunctionOptionsSchema.parse(options);
let existingEdgeFunction: EdgeFunction | undefined;
try {
existingEdgeFunction = await functions.getEdgeFunction(projectId, name);
} catch (error) {}
const import_map_file = inputFiles.find((file) =>
['deno.json', 'import_map.json'].includes(file.name)
);
// Use existing import map path or file name heuristic if not provided
import_map_path ??=
existingEdgeFunction?.import_map_path ?? import_map_file?.name;
const response = await managementApiClient.POST(
'/v1/projects/{ref}/functions/deploy',
{
params: {
path: {
ref: projectId,
},
query: { slug: name },
},
body: {
metadata: {
name,
entrypoint_path,
import_map_path,
},
file: inputFiles as any, // We need to pass file name and content to our serializer
},
bodySerializer(body) {
const formData = new FormData();
const blob = new Blob([JSON.stringify(body.metadata)], {
type: 'application/json',
});
formData.append('metadata', blob);
body.file?.forEach((f: any) => {
const file: { name: string; content: string } = f;
const blob = new Blob([file.content], {
type: 'application/typescript',
});
formData.append('file', blob, file.name);
});
return formData;
},
}
);
assertSuccess(response, 'Failed to deploy Edge Function');
return response.data;
},
};
const branching: BranchingOperations = {
async listBranches(projectId: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/branches',
{
params: {
path: {
ref: projectId,
},
},
}
);
// There are no branches if branching is disabled
if (response.response.status === 422) return [];
assertSuccess(response, 'Failed to list branches');
return response.data;
},
async createBranch(projectId: string, options: CreateBranchOptions) {
const { name } = createBranchOptionsSchema.parse(options);
const createBranchResponse = await managementApiClient.POST(
'/v1/projects/{ref}/branches',
{
params: {
path: {
ref: projectId,
},
},
body: {
branch_name: name,
},
}
);
assertSuccess(createBranchResponse, 'Failed to create branch');
return createBranchResponse.data;
},
async deleteBranch(branchId: string) {
const response = await managementApiClient.DELETE(
'/v1/branches/{branch_id}',
{
params: {
path: {
branch_id: branchId,
},
},
}
);
assertSuccess(response, 'Failed to delete branch');
},
async mergeBranch(branchId: string) {
const response = await managementApiClient.POST(
'/v1/branches/{branch_id}/merge',
{
params: {
path: {
branch_id: branchId,
},
},
body: {},
}
);
assertSuccess(response, 'Failed to merge branch');
},
async resetBranch(branchId: string, options: ResetBranchOptions) {
const { migration_version } = resetBranchOptionsSchema.parse(options);
const response = await managementApiClient.POST(
'/v1/branches/{branch_id}/reset',
{
params: {
path: {
branch_id: branchId,
},
},
body: {
migration_version,
},
}
);
assertSuccess(response, 'Failed to reset branch');
},
async rebaseBranch(branchId: string) {
const response = await managementApiClient.POST(
'/v1/branches/{branch_id}/push',
{
params: {
path: {
branch_id: branchId,
},
},
body: {},
}
);
assertSuccess(response, 'Failed to rebase branch');
},
};
const storage: StorageOperations = {
// Storage methods
async listAllBuckets(project_id: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/storage/buckets',
{
params: {
path: {
ref: project_id,
},
},
}
);
assertSuccess(response, 'Failed to list storage buckets');
return response.data;
},
async getStorageConfig(project_id: string) {
const response = await managementApiClient.GET(
'/v1/projects/{ref}/config/storage',
{
params: {
path: {
ref: project_id,
},
},
}
);
assertSuccess(response, 'Failed to get storage config');
return response.data;
},
async updateStorageConfig(projectId: string, config: StorageConfig) {
const response = await managementApiClient.PATCH(
'/v1/projects/{ref}/config/storage',
{
params: {
path: {
ref: projectId,
},
},
body: {
fileSizeLimit: config.fileSizeLimit,
features: {
imageTransformation: {
enabled: config.features.imageTransformation.enabled,
},
s3Protocol: {
enabled: config.features.s3Protocol.enabled,
},
},
},
}
);
assertSuccess(response, 'Failed to update storage config');
},
};
const platform: SupabasePlatform = {
async init(info: InitData) {
const { clientInfo } = info;
if (!clientInfo) {
throw new Error('Client info is required');
}
// Re-initialize the management API client with the user agent
managementApiClient = createManagementApiClient(
managementApiUrl,
accessToken,
{
'User-Agent': `supabase-mcp/${version} (${clientInfo.name}/${clientInfo.version})`,
}
);
},
account,
database,
debugging,
development,
functions,
branching,
storage,
};
return platform;
}
function getProjectDomain(apiHostname: string) {
switch (apiHostname) {
case 'api.supabase.com':
return 'supabase.co';
case 'api.supabase.green':
return 'supabase.green';
default:
return 'supabase.red';
}
}
```
--------------------------------------------------------------------------------
/packages/mcp-server-supabase/test/mocks.ts:
--------------------------------------------------------------------------------
```typescript
import { PGlite, type PGliteInterface } from '@electric-sql/pglite';
import { source } from 'common-tags';
import { format } from 'date-fns';
import { buildSchema, parse, validate } from 'graphql';
import { http, HttpResponse } from 'msw';
import { customAlphabet } from 'nanoid';
import { join } from 'node:path/posix';
import { expect } from 'vitest';
import { z } from 'zod';
import packageJson from '../package.json' with { type: 'json' };
import {
getQueryFields,
graphqlRequestSchema,
} from '../src/content-api/graphql.js';
import { getDeploymentId, getPathPrefix } from '../src/edge-function.js';
import type { components } from '../src/management-api/types.js';
const { version } = packageJson;
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz', 20);
export const API_URL = 'https://api.supabase.com';
export const CONTENT_API_URL = 'https://supabase.com/docs/api/graphql';
export const MCP_SERVER_NAME = 'supabase-mcp';
export const MCP_SERVER_VERSION = version;
export const MCP_CLIENT_NAME = 'test-client';
export const MCP_CLIENT_VERSION = '1.0.0';
export const ACCESS_TOKEN = 'dummy-token';
export const COUNTRY_CODE = 'US';
export const CLOSEST_REGION = 'us-east-2';
export const contentApiMockSchema = source`
schema {
query: RootQueryType
}
type RootQueryType {
"""Get the GraphQL schema for this endpoint"""
schema: String!
"""Search the Supabase docs for content matching a query string"""
searchDocs(query: String!, limit: Int): SearchResultCollection
}
"""Document that matches a search query"""
interface SearchResult {
"""The title of the matching result"""
title: String
"""The URL of the matching result"""
href: String
"""The full content of the matching result"""
content: String
}
"""A collection of search results containing content from Supabase docs"""
type SearchResultCollection {
"""A list of edges containing nodes in this collection"""
edges: [SearchResultEdge!]!
"""The nodes in this collection, directly accessible"""
nodes: [SearchResult!]!
}
"""An edge in a collection of SearchResults"""
type SearchResultEdge {
"""The SearchResult at the end of the edge"""
node: SearchResult!
}
`;
type Organization = components['schemas']['V1OrganizationSlugResponse'];
type Project = components['schemas']['V1ProjectWithDatabaseResponse'];
type Branch = components['schemas']['BranchResponse'];
export type Migration = {
version: string;
name: string;
query: string;
};
export const mockOrgs = new Map<string, Organization>();
export const mockProjects = new Map<string, MockProject>();
export const mockBranches = new Map<string, MockBranch>();
export const mockContentApiSchemaLoadCount = { value: 0 };
export const mockContentApi = [
http.post(CONTENT_API_URL, async ({ request }) => {
const json = await request.json();
const { query } = graphqlRequestSchema.parse(json);
const schema = buildSchema(contentApiMockSchema);
const document = parse(query);
const validationErrors = validate(schema, document);
const [queryName] = getQueryFields(document);
if (queryName === 'schema') {
mockContentApiSchemaLoadCount.value++;
return HttpResponse.json({
data: {
schema: contentApiMockSchema,
},
});
}
if (validationErrors.length > 0) {
throw Error('Invalid query made to Content API');
}
return HttpResponse.json({
data: {
dummy: true,
},
});
}),
];
export const mockManagementApi = [
/**
* Check authorization
*/
http.all(`${API_URL}/*`, ({ request }) => {
const authHeader = request.headers.get('Authorization');
const accessToken = authHeader?.replace('Bearer ', '');
if (accessToken !== ACCESS_TOKEN) {
return HttpResponse.json({ message: 'Unauthorized' }, { status: 401 });
}
}),
/**
* Check user agent
*/
http.all(`${API_URL}/*`, ({ request }) => {
const userAgent = request.headers.get('user-agent');
expect(userAgent).toBe(
`${MCP_SERVER_NAME}/${MCP_SERVER_VERSION} (${MCP_CLIENT_NAME}/${MCP_CLIENT_VERSION})`
);
}),
/**
* List all projects
*/
http.get(`${API_URL}/v1/projects`, () => {
return HttpResponse.json(
Array.from(mockProjects.values()).map((project) => project.details)
);
}),
/**
* Get details for a project
*/
http.get<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId`,
({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json(project.details);
}
),
/**
* Create a new project
*/
http.post(`${API_URL}/v1/projects`, async ({ request }) => {
const bodySchema = z.object({
name: z.string(),
region: z.string(),
organization_id: z.string(),
db_pass: z.string(),
});
const body = await request.json();
const { name, region, organization_id } = bodySchema.parse(body);
const project = await createProject({
name,
region,
organization_id,
});
const { database, ...projectResponse } = project.details;
return HttpResponse.json(projectResponse);
}),
/**
* Pause a project
*/
http.post<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/pause`,
({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
project.status = 'INACTIVE';
return HttpResponse.json(project.details);
}
),
/**
* Restore a project
*/
http.post<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/restore`,
({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
project.status = 'ACTIVE_HEALTHY';
return HttpResponse.json(project.details);
}
),
/**
* List organizations
*/
http.get(`${API_URL}/v1/organizations`, () => {
return HttpResponse.json(
Array.from(mockOrgs.values()).map(({ id, name }) => ({ id, name }))
);
}),
/**
* Get details for an organization
*/
http.get(`${API_URL}/v1/organizations/:id`, ({ params }) => {
const organization = Array.from(mockOrgs.values()).find(
(org) => org.id === params.id
);
return HttpResponse.json(organization);
}),
/**
* Get the API keys for a project
*/
http.get(`${API_URL}/v1/projects/:projectId/api-keys`, ({ params }) => {
return HttpResponse.json([
{
name: 'anon',
api_key: 'dummy-anon-key',
type: 'legacy',
id: 'anon-key-id',
},
{
name: 'publishable-key-1',
api_key: 'sb_publishable_dummy_key_1',
type: 'publishable',
id: 'publishable-key-1-id',
description: 'Main publishable key',
},
]);
}),
/**
* Check if legacy API keys are enabled
*/
http.get(
`${API_URL}/v1/projects/:projectId/api-keys/legacy`,
({ params }) => {
return HttpResponse.json({ enabled: false });
}
),
/**
* Execute a SQL query on a project's database
*/
http.post<
{ projectId: string },
{ query: string; parameters?: unknown[]; read_only?: boolean }
>(
`${API_URL}/v1/projects/:projectId/database/query`,
async ({ params, request }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const { db } = project;
const { query, parameters, read_only } = await request.json();
try {
// Use transaction to prevent race conditions if tests are parallelized
const result = await db.transaction(async (tx) => {
// Set role before executing query
await tx.exec(
`SET ROLE ${read_only ? 'supabase_read_only_role' : 'postgres'};`
);
// Use query() method with parameters if provided, otherwise use exec()
const queryResult =
parameters && parameters.length > 0
? await tx.query(query, parameters)
: await tx.exec(query);
// Reset role
await tx.exec('RESET ROLE;');
return queryResult;
});
// Handle different response formats
if (Array.isArray(result)) {
// exec() returns an array of results
const lastStatementResults = result.at(-1);
if (!lastStatementResults) {
return HttpResponse.json(
{ message: 'Failed to execute query' },
{ status: 500 }
);
}
return HttpResponse.json(lastStatementResults.rows);
} else {
// query() returns a single result object
return HttpResponse.json(result.rows);
}
} catch (error) {
throw error;
}
}
),
/**
* Lists all Edge Functions for a project
*/
http.get<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/functions`,
({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json(
Array.from(project.edge_functions.values()).map(
(edgeFunction) => edgeFunction.details
)
);
}
),
/**
* Get details for an Edge Function
*/
http.get<{ projectId: string; functionSlug: string }>(
`${API_URL}/v1/projects/:projectId/functions/:functionSlug`,
({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const edgeFunction = project.edge_functions.get(params.functionSlug);
if (!edgeFunction) {
return HttpResponse.json(
{ message: 'Edge Function not found' },
{ status: 404 }
);
}
return HttpResponse.json(edgeFunction.details);
}
),
/**
* Gets the files for an Edge Function
*/
http.get<{ projectId: string; functionSlug: string }>(
`${API_URL}/v1/projects/:projectId/functions/:functionSlug/body`,
({ params, request }) => {
if (request.headers.get('Accept') !== 'multipart/form-data') {
return HttpResponse.json(
{
message:
'Invalid Accept header. Must be multipart/form-data for testing',
},
{ status: 406 }
);
}
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const edgeFunction = project.edge_functions.get(params.functionSlug);
if (!edgeFunction) {
return HttpResponse.json(
{ message: 'Edge Function not found' },
{ status: 404 }
);
}
const formData = new FormData();
for (const file of edgeFunction.files) {
formData.append('file', file, file.name);
}
return HttpResponse.formData(formData);
}
),
/**
* Deploys an Edge Function
*/
http.post<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/functions/deploy`,
async ({ params, request }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const formData = await request.formData();
const metadataSchema = z.object({
name: z.string(),
entrypoint_path: z.string(),
import_map_path: z.string().optional(),
});
const metadataFormValue = formData.get('metadata');
const metadataString =
metadataFormValue instanceof File
? await metadataFormValue.text()
: (metadataFormValue ?? undefined);
if (!metadataString) {
throw new Error('Metadata is required');
}
const metadata = metadataSchema.parse(JSON.parse(metadataString));
const fileFormValues = formData.getAll('file');
const files = fileFormValues.map((file) => {
if (typeof file === 'string') {
throw new Error('Multipart file is a string instead of a File');
}
return file;
});
const edgeFunction = await project.deployEdgeFunction(metadata, files);
return HttpResponse.json(edgeFunction.details);
}
),
/**
* List migrations for a project
*/
http.get<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/database/migrations`,
async ({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const { migrations } = project;
const modified = migrations.map(({ version, name }) => ({
version,
name,
}));
return HttpResponse.json(modified);
}
),
/**
* Create a new migration for a project
*/
http.post<{ projectId: string }, { name: string; query: string }>(
`${API_URL}/v1/projects/:projectId/database/migrations`,
async ({ params, request }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const { db, migrations } = project;
const { name, query } = await request.json();
const [results] = await db.exec(query);
if (!results) {
return HttpResponse.json(
{ message: 'Failed to execute query' },
{ status: 500 }
);
}
migrations.push({
version: format(new Date(), 'yyyyMMddHHmmss'),
name,
query,
});
return HttpResponse.json(results.rows);
}
),
/**
* Get logs for a project
*/
http.get<{ projectId: string }, { sql: string }>(
`${API_URL}/v1/projects/:projectId/analytics/endpoints/logs.all`,
async ({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json([]);
}
),
/**
* Get security advisors for a project
*/
http.get<{ projectId: string }, { sql: string }>(
`${API_URL}/v1/projects/:projectId/advisors/security`,
async ({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json({
lints: [],
});
}
),
/**
* Get performance advisors for a project
*/
http.get<{ projectId: string }, { sql: string }>(
`${API_URL}/v1/projects/:projectId/advisors/performance`,
async ({ params }) => {
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json({
lints: [],
});
}
),
/**
* Create a new branch for a project
*/
http.post<{ projectId: string }, { branch_name: string }>(
`${API_URL}/v1/projects/:projectId/branches`,
async ({ params, request }) => {
const { branch_name } = await request.json();
const project = mockProjects.get(params.projectId);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const projectBranches = Array.from(mockBranches.values()).filter(
(branch) => branch.parent_project_ref === project.id
);
if (projectBranches.length === 0) {
// If this is the first branch, set it as the default branch pointing to the same project
const defaultBranch = new MockBranch({
name: branch_name,
project_ref: project.id,
parent_project_ref: project.id,
is_default: true,
});
defaultBranch.status = 'MIGRATIONS_PASSED';
mockBranches.set(defaultBranch.id, defaultBranch);
}
const branch = await createBranch({
name: branch_name,
parent_project_ref: project.id,
});
return HttpResponse.json(branch.details);
}
),
/**
* List all branches for a project
*/
http.get<{ projectId: string }>(
`${API_URL}/v1/projects/:projectId/branches`,
async ({ params }) => {
const projectBranches = Array.from(mockBranches.values()).filter(
(branch) => branch.parent_project_ref === params.projectId
);
if (projectBranches.length === 0) {
return HttpResponse.json(
{ message: 'Preview branching is not enabled for this project.' },
{ status: 422 }
);
}
return HttpResponse.json(projectBranches.map((branch) => branch.details));
}
),
/**
* Get details for a branch
*/
http.delete<{ branchId: string }>(
`${API_URL}/v1/branches/:branchId`,
async ({ params }) => {
const branch = mockBranches.get(params.branchId);
if (!branch) {
return HttpResponse.json(
{ message: 'Branch not found' },
{ status: 404 }
);
}
// if default branch, return error
if (branch.is_default) {
return HttpResponse.json(
{ message: 'Cannot delete the default branch.' },
{ status: 422 }
);
}
const project = mockProjects.get(branch.project_ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
await project.destroy();
mockProjects.delete(project.id);
mockBranches.delete(branch.id);
return HttpResponse.json({ message: 'ok' });
}
),
/**
* Merges migrations from a development branch to production
*/
http.post<{ branchId: string }>(
`${API_URL}/v1/branches/:branchId/merge`,
async ({ params }) => {
const branch = mockBranches.get(params.branchId);
if (!branch) {
return HttpResponse.json(
{ message: 'Branch not found' },
{ status: 404 }
);
}
const parentProject = mockProjects.get(branch.parent_project_ref);
if (!parentProject) {
return HttpResponse.json(
{ message: 'Parent project not found' },
{ status: 404 }
);
}
const project = mockProjects.get(branch.project_ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
// Simulate merge by resetting the parent DB and running branch migrations
parentProject.migrations = [...project.migrations];
await parentProject.resetDb();
try {
await parentProject.applyMigrations();
} catch (error) {
return HttpResponse.json(
{ message: 'Failed to apply migrations' },
{ status: 500 }
);
}
return HttpResponse.json({ message: 'ok' });
}
),
/**
* Resets a branch and re-runs migrations
*/
http.post<{ branchId: string }, { migration_version?: string }>(
`${API_URL}/v1/branches/:branchId/reset`,
async ({ params, request }) => {
const branch = mockBranches.get(params.branchId);
if (!branch) {
return HttpResponse.json(
{ message: 'Branch not found' },
{ status: 404 }
);
}
const project = mockProjects.get(branch.project_ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
// Clear migrations below the specified version
const body = await request.json();
if (body.migration_version) {
const target = body.migration_version;
project.migrations = project.migrations.filter(
(m) => m.version <= target
);
}
// Reset the DB a re-run migrations
await project.resetDb();
try {
await project.applyMigrations();
branch.status = 'MIGRATIONS_PASSED';
} catch (error) {
branch.status = 'MIGRATIONS_FAILED';
return HttpResponse.json(
{ message: 'Failed to apply migrations' },
{ status: 500 }
);
}
return HttpResponse.json({ message: 'ok' });
}
),
/**
* Rebase migrations from production on a development branch
*/
http.post<{ branchId: string }>(
`${API_URL}/v1/branches/:branchId/push`,
async ({ params }) => {
const branch = mockBranches.get(params.branchId);
if (!branch) {
return HttpResponse.json(
{ message: 'Branch not found' },
{ status: 404 }
);
}
const parentProject = mockProjects.get(branch.parent_project_ref);
if (!parentProject) {
return HttpResponse.json(
{ message: 'Parent project not found' },
{ status: 404 }
);
}
const project = mockProjects.get(branch.project_ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
// Simulate rebase by resetting the branch DB and running production migrations
project.migrations = [...parentProject.migrations];
await project.resetDb();
try {
await project.applyMigrations();
branch.status = 'MIGRATIONS_PASSED';
} catch (error) {
branch.status = 'MIGRATIONS_FAILED';
return HttpResponse.json(
{ message: 'Failed to apply migrations' },
{ status: 500 }
);
}
return HttpResponse.json({ message: 'ok' });
}
),
/**
* List storage buckets
*/
http.get<{ ref: string }>(
`${API_URL}/v1/projects/:ref/storage/buckets`,
({ params }) => {
const project = mockProjects.get(params.ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
const buckets = Array.from(project.storage_buckets.values()).map(
(bucket) => ({
id: bucket.id,
name: bucket.name,
public: bucket.public,
created_at: bucket.created_at.toISOString(),
updated_at: bucket.updated_at.toISOString(),
})
);
return HttpResponse.json(buckets);
}
),
/**
* Get storage config
*/
http.get<{ ref: string }>(
`${API_URL}/v1/projects/:ref/config/storage`,
({ params }) => {
const project = mockProjects.get(params.ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
return HttpResponse.json({
fileSizeLimit: 50,
features: {
imageTransformation: { enabled: true },
s3Protocol: { enabled: false },
},
});
}
),
/**
* Update storage config
*/
http.patch<{ ref: string }>(
`${API_URL}/v1/projects/:ref/config/storage`,
async ({ params, request }) => {
const project = mockProjects.get(params.ref);
if (!project) {
return HttpResponse.json(
{ message: 'Project not found' },
{ status: 404 }
);
}
// Accept any valid config
try {
await request.json();
return new HttpResponse(null, { status: 204 });
} catch (e) {
return HttpResponse.json(
{ message: 'Invalid request body' },
{ status: 400 }
);
}
}
),
];
export async function createOrganization(options: MockOrganizationOptions) {
const org = new MockOrganization(options);
mockOrgs.set(org.id, org);
return org;
}
export async function createProject(options: MockProjectOptions) {
const project = new MockProject(options);
mockProjects.set(project.id, project);
// Change the project status to ACTIVE_HEALTHY after a delay
setTimeout(async () => {
project.status = 'ACTIVE_HEALTHY';
}, 0);
return project;
}
export async function createBranch(options: {
name: string;
parent_project_ref: string;
}) {
const parentProject = mockProjects.get(options.parent_project_ref);
if (!parentProject) {
throw new Error(`Project with id ${options.parent_project_ref} not found`);
}
const project = new MockProject({
name: `${parentProject.name} - ${options.name}`,
region: parentProject.region,
organization_id: parentProject.organization_id,
});
const branch = new MockBranch({
name: options.name,
project_ref: project.id,
parent_project_ref: options.parent_project_ref,
is_default: false,
});
mockProjects.set(project.id, project);
mockBranches.set(branch.id, branch);
project.migrations = [...parentProject.migrations];
// Run migrations on the new branch in the background
setTimeout(async () => {
try {
await project.applyMigrations();
branch.status = 'MIGRATIONS_PASSED';
} catch (error) {
branch.status = 'MIGRATIONS_FAILED';
console.error('Migration error:', error);
}
}, 0);
return branch;
}
export type MockOrganizationOptions = {
name: Organization['name'];
plan: Organization['plan'];
allowed_release_channels: Organization['allowed_release_channels'];
opt_in_tags?: Organization['opt_in_tags'];
};
export class MockOrganization {
id: string;
name: Organization['name'];
plan: Organization['plan'];
allowed_release_channels: Organization['allowed_release_channels'];
opt_in_tags: Organization['opt_in_tags'];
get details(): Organization {
return {
id: this.id,
name: this.name,
plan: this.plan,
allowed_release_channels: this.allowed_release_channels,
opt_in_tags: this.opt_in_tags,
};
}
constructor(options: MockOrganizationOptions) {
this.id = nanoid();
this.name = options.name;
this.plan = options.plan;
this.allowed_release_channels = options.allowed_release_channels;
this.opt_in_tags = options.opt_in_tags ?? [];
}
}
export type MockEdgeFunctionOptions = {
name: string;
entrypoint_path: string;
import_map_path?: string;
};
export class MockEdgeFunction {
projectId: string;
id: string;
slug: string;
version: number;
name: string;
status: 'ACTIVE' | 'REMOVED' | 'THROTTLED';
entrypoint_path: string;
import_map_path?: string;
import_map: boolean;
verify_jwt: boolean;
created_at: Date;
updated_at: Date;
files: File[] = [];
async setFiles(files: File[]) {
this.files = [];
for (const file of files) {
this.files.push(
new File([file], `${join(this.pathPrefix, file.name)}`, {
type: file.type,
})
);
}
}
get deploymentId() {
return getDeploymentId(this.projectId, this.id, this.version);
}
get pathPrefix() {
return getPathPrefix(this.deploymentId);
}
get details() {
return {
id: this.id,
slug: this.slug,
version: this.version,
name: this.name,
status: this.status,
entrypoint_path: this.entrypoint_path,
import_map_path: this.import_map_path,
import_map: this.import_map,
verify_jwt: this.verify_jwt,
created_at: this.created_at.toISOString(),
updated_at: this.updated_at.toISOString(),
};
}
constructor(
projectId: string,
{ name, entrypoint_path, import_map_path }: MockEdgeFunctionOptions
) {
this.projectId = projectId;
this.id = crypto.randomUUID();
this.slug = name;
this.version = 1;
this.name = name;
this.status = 'ACTIVE';
this.entrypoint_path = `file://${join(this.pathPrefix, entrypoint_path)}`;
this.import_map_path = import_map_path
? `file://${join(this.pathPrefix, import_map_path)}`
: undefined;
this.import_map = !!import_map_path;
this.verify_jwt = true;
this.created_at = new Date();
this.updated_at = new Date();
}
update({ name, entrypoint_path, import_map_path }: MockEdgeFunctionOptions) {
this.name = name;
this.version += 1;
this.entrypoint_path = `file://${join(this.pathPrefix, entrypoint_path)}`;
this.import_map_path = import_map_path
? `file://${join(this.pathPrefix, import_map_path)}`
: undefined;
this.import_map = !!import_map_path;
this.updated_at = new Date();
}
}
export type MockStorageBucketOptions = {
name: string;
isPublic: boolean;
};
export class MockStorageBucket {
id: string;
name: string;
public: boolean;
created_at: Date;
updated_at: Date;
constructor({ name, isPublic }: MockStorageBucketOptions) {
this.id = crypto.randomUUID();
this.name = name;
this.public = isPublic;
this.created_at = new Date();
this.updated_at = new Date();
}
}
export type MockProjectOptions = {
name: string;
region: string;
organization_id: string;
};
export class MockProject {
id: string;
organization_id: string;
name: string;
region: string;
created_at: Date;
status: Project['status'];
database: {
host: string;
version: string;
postgres_engine: string;
release_channel: string;
};
migrations: Migration[] = [];
edge_functions = new Map<string, MockEdgeFunction>();
storage_buckets = new Map<string, MockStorageBucket>();
#db?: PGliteInterface;
// Lazy load the database connection
get db() {
if (!this.#db) {
this.#db = new PGlite();
this.#db.waitReady.then(() => {
this.#db!.exec(`
CREATE ROLE supabase_read_only_role;
GRANT pg_read_all_data TO supabase_read_only_role;
`);
});
}
return this.#db;
}
get details(): Project {
return {
id: this.id,
organization_id: this.organization_id,
name: this.name,
region: this.region,
created_at: this.created_at.toISOString(),
status: this.status,
database: this.database,
};
}
constructor({ name, region, organization_id }: MockProjectOptions) {
this.id = nanoid();
this.name = name;
this.region = region;
this.organization_id = organization_id;
this.created_at = new Date();
this.status = 'UNKNOWN';
this.database = {
host: `db.${this.id}.supabase.co`,
version: '15.1',
postgres_engine: '15',
release_channel: 'ga',
};
}
async applyMigrations() {
for (const migration of this.migrations) {
const [results] = await this.db.exec(migration.query);
if (!results) {
throw new Error(`Failed to execute migration ${migration.name}`);
}
}
}
async resetDb() {
if (this.#db) {
await this.#db.close();
}
this.#db = undefined;
return this.db;
}
async deployEdgeFunction(
options: MockEdgeFunctionOptions,
files: File[] = []
) {
const edgeFunction = new MockEdgeFunction(this.id, options);
const existingFunction = this.edge_functions.get(edgeFunction.slug);
if (existingFunction) {
existingFunction.update(options);
await existingFunction.setFiles(files);
return existingFunction;
}
await edgeFunction.setFiles(files);
this.edge_functions.set(edgeFunction.slug, edgeFunction);
return edgeFunction;
}
async destroy() {
if (this.#db) {
await this.#db.close();
}
}
createStorageBucket(
name: string,
isPublic: boolean = false
): MockStorageBucket {
const id = nanoid();
const bucket: MockStorageBucket = {
id,
name,
public: isPublic,
created_at: new Date(),
updated_at: new Date(),
};
this.storage_buckets.set(id, bucket);
return bucket;
}
}
export type MockBranchOptions = {
name: string;
project_ref: string;
parent_project_ref: string;
is_default: boolean;
};
export class MockBranch {
id: string;
name: string;
project_ref: string;
parent_project_ref: string;
is_default: boolean;
persistent: boolean;
status: Branch['status'];
created_at: Date;
updated_at: Date;
get details(): Branch {
return {
id: this.id,
name: this.name,
project_ref: this.project_ref,
parent_project_ref: this.parent_project_ref,
is_default: this.is_default,
persistent: this.persistent,
status: this.status,
created_at: this.created_at.toISOString(),
updated_at: this.updated_at.toISOString(),
};
}
constructor({
name,
project_ref,
parent_project_ref,
is_default,
}: MockBranchOptions) {
this.id = nanoid();
this.name = name;
this.project_ref = project_ref;
this.parent_project_ref = parent_project_ref;
this.is_default = is_default;
this.persistent = false;
this.status = 'CREATING_PROJECT';
this.created_at = new Date();
this.updated_at = new Date();
}
}
```
--------------------------------------------------------------------------------
/packages/mcp-server-supabase/src/server.test.ts:
--------------------------------------------------------------------------------
```typescript
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import {
CallToolResultSchema,
type CallToolRequest,
} from '@modelcontextprotocol/sdk/types.js';
import { StreamTransport } from '@supabase/mcp-utils';
import { codeBlock, stripIndent } from 'common-tags';
import { setupServer } from 'msw/node';
import { beforeEach, describe, expect, test } from 'vitest';
import {
ACCESS_TOKEN,
API_URL,
contentApiMockSchema,
mockContentApiSchemaLoadCount,
createOrganization,
createProject,
createBranch,
MCP_CLIENT_NAME,
MCP_CLIENT_VERSION,
mockBranches,
mockContentApi,
mockManagementApi,
mockOrgs,
mockProjects,
} from '../test/mocks.js';
import { createSupabaseApiPlatform } from './platform/api-platform.js';
import { BRANCH_COST_HOURLY, PROJECT_COST_MONTHLY } from './pricing.js';
import { createSupabaseMcpServer } from './server.js';
import type { SupabasePlatform } from './platform/types.js';
beforeEach(async () => {
mockOrgs.clear();
mockProjects.clear();
mockBranches.clear();
mockContentApiSchemaLoadCount.value = 0;
const server = setupServer(...mockContentApi, ...mockManagementApi);
server.listen({ onUnhandledRequest: 'error' });
});
type SetupOptions = {
accessToken?: string;
projectId?: string;
platform?: SupabasePlatform;
readOnly?: boolean;
features?: string[];
};
/**
* Sets up an MCP client and server for testing.
*/
async function setup(options: SetupOptions = {}) {
const { accessToken = ACCESS_TOKEN, projectId, readOnly, features } = options;
const clientTransport = new StreamTransport();
const serverTransport = new StreamTransport();
clientTransport.readable.pipeTo(serverTransport.writable);
serverTransport.readable.pipeTo(clientTransport.writable);
const client = new Client(
{
name: MCP_CLIENT_NAME,
version: MCP_CLIENT_VERSION,
},
{
capabilities: {},
}
);
const platform =
options.platform ??
createSupabaseApiPlatform({
accessToken,
apiUrl: API_URL,
});
const server = createSupabaseMcpServer({
platform,
projectId,
readOnly,
features,
});
await server.connect(serverTransport);
await client.connect(clientTransport);
/**
* Calls a tool with the given parameters.
*
* Wrapper around the `client.callTool` method to handle the response and errors.
*/
async function callTool(params: CallToolRequest['params']) {
const output = await client.callTool(params);
const { content } = CallToolResultSchema.parse(output);
const [textContent] = content;
if (!textContent) {
return undefined;
}
if (textContent.type !== 'text') {
throw new Error('tool result content is not text');
}
if (textContent.text === '') {
throw new Error('tool result content is empty');
}
const result = JSON.parse(textContent.text);
if (output.isError) {
throw new Error(result.error.message);
}
return result;
}
return { client, clientTransport, callTool, server, serverTransport };
}
describe('tools', () => {
test('list organizations', async () => {
const { callTool } = await setup();
const org1 = await createOrganization({
name: 'Org 1',
plan: 'free',
allowed_release_channels: ['ga'],
});
const org2 = await createOrganization({
name: 'Org 2',
plan: 'free',
allowed_release_channels: ['ga'],
});
const result = await callTool({
name: 'list_organizations',
arguments: {},
});
expect(result).toEqual([
{ id: org1.id, name: org1.name },
{ id: org2.id, name: org2.name },
]);
});
test('get organization', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const result = await callTool({
name: 'get_organization',
arguments: {
id: org.id,
},
});
expect(result).toEqual(org);
});
test('get next project cost for free org', async () => {
const { callTool } = await setup();
const freeOrg = await createOrganization({
name: 'Free Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const result = await callTool({
name: 'get_cost',
arguments: {
type: 'project',
organization_id: freeOrg.id,
},
});
expect(result).toEqual(
'The new project will cost $0 monthly. You must repeat this to the user and confirm their understanding.'
);
});
test('get next project cost for paid org with 0 projects', async () => {
const { callTool } = await setup();
const paidOrg = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const result = await callTool({
name: 'get_cost',
arguments: {
type: 'project',
organization_id: paidOrg.id,
},
});
expect(result).toEqual(
'The new project will cost $0 monthly. You must repeat this to the user and confirm their understanding.'
);
});
test('get next project cost for paid org with > 0 active projects', async () => {
const { callTool } = await setup();
const paidOrg = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const priorProject = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: paidOrg.id,
});
priorProject.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_cost',
arguments: {
type: 'project',
organization_id: paidOrg.id,
},
});
expect(result).toEqual(
`The new project will cost $${PROJECT_COST_MONTHLY} monthly. You must repeat this to the user and confirm their understanding.`
);
});
test('get next project cost for paid org with > 0 inactive projects', async () => {
const { callTool } = await setup();
const paidOrg = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const priorProject = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: paidOrg.id,
});
priorProject.status = 'INACTIVE';
const result = await callTool({
name: 'get_cost',
arguments: {
type: 'project',
organization_id: paidOrg.id,
},
});
expect(result).toEqual(
`The new project will cost $0 monthly. You must repeat this to the user and confirm their understanding.`
);
});
test('get branch cost', async () => {
const { callTool } = await setup();
const paidOrg = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const result = await callTool({
name: 'get_cost',
arguments: {
type: 'branch',
organization_id: paidOrg.id,
},
});
expect(result).toEqual(
`The new branch will cost $${BRANCH_COST_HOURLY} hourly. You must repeat this to the user and confirm their understanding.`
);
});
test('list projects', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project1 = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const project2 = await createProject({
name: 'Project 2',
region: 'us-east-1',
organization_id: org.id,
});
const result = await callTool({
name: 'list_projects',
arguments: {},
});
expect(result).toEqual([project1.details, project2.details]);
});
test('get project', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const result = await callTool({
name: 'get_project',
arguments: {
id: project.id,
},
});
expect(result).toEqual(project.details);
});
test('create project', async () => {
const { callTool } = await setup();
const freeOrg = await createOrganization({
name: 'Free Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'project',
recurrence: 'monthly',
amount: 0,
},
});
const newProject = {
name: 'New Project',
region: 'us-east-1',
organization_id: freeOrg.id,
confirm_cost_id,
};
const result = await callTool({
name: 'create_project',
arguments: newProject,
});
const { confirm_cost_id: _, ...projectInfo } = newProject;
expect(result).toEqual({
...projectInfo,
id: expect.stringMatching(/^.+$/),
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
status: 'UNKNOWN',
});
});
test('create project in read-only mode throws an error', async () => {
const { callTool } = await setup({ readOnly: true });
const freeOrg = await createOrganization({
name: 'Free Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'project',
recurrence: 'monthly',
amount: 0,
},
});
const newProject = {
name: 'New Project',
region: 'us-east-1',
organization_id: freeOrg.id,
confirm_cost_id,
};
const result = callTool({
name: 'create_project',
arguments: newProject,
});
await expect(result).rejects.toThrow(
'Cannot create a project in read-only mode.'
);
});
test('create project without region fails', async () => {
const { callTool } = await setup();
const freeOrg = await createOrganization({
name: 'Free Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'project',
recurrence: 'monthly',
amount: 0,
},
});
const newProject = {
name: 'New Project',
organization_id: freeOrg.id,
confirm_cost_id,
};
const createProjectPromise = callTool({
name: 'create_project',
arguments: newProject,
});
await expect(createProjectPromise).rejects.toThrow();
});
test('create project without cost confirmation fails', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const newProject = {
name: 'New Project',
region: 'us-east-1',
organization_id: org.id,
};
const createProjectPromise = callTool({
name: 'create_project',
arguments: newProject,
});
await expect(createProjectPromise).rejects.toThrow(
'User must confirm understanding of costs before creating a project.'
);
});
test('pause project', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
await callTool({
name: 'pause_project',
arguments: {
project_id: project.id,
},
});
expect(project.status).toEqual('INACTIVE');
});
test('pause project in read-only mode throws an error', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = callTool({
name: 'pause_project',
arguments: {
project_id: project.id,
},
});
await expect(result).rejects.toThrow(
'Cannot pause a project in read-only mode.'
);
});
test('restore project', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'INACTIVE';
await callTool({
name: 'restore_project',
arguments: {
project_id: project.id,
},
});
expect(project.status).toEqual('ACTIVE_HEALTHY');
});
test('restore project in read-only mode throws an error', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'INACTIVE';
const result = callTool({
name: 'restore_project',
arguments: {
project_id: project.id,
},
});
await expect(result).rejects.toThrow(
'Cannot restore a project in read-only mode.'
);
});
test('get project url', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_project_url',
arguments: {
project_id: project.id,
},
});
expect(result).toEqual(`https://${project.id}.supabase.co`);
});
test('get anon or publishable keys', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_publishable_keys',
arguments: {
project_id: project.id,
},
});
expect(result).toBeInstanceOf(Array);
expect(result.length).toBe(2);
// Check legacy anon key
const anonKey = result.find((key: any) => key.name === 'anon');
expect(anonKey).toBeDefined();
expect(anonKey.api_key).toEqual('dummy-anon-key');
expect(anonKey.type).toEqual('legacy');
expect(anonKey.id).toEqual('anon-key-id');
expect(anonKey.disabled).toBe(true);
// Check publishable key
const publishableKey = result.find(
(key: any) => key.type === 'publishable'
);
expect(publishableKey).toBeDefined();
expect(publishableKey.api_key).toEqual('sb_publishable_dummy_key_1');
expect(publishableKey.type).toEqual('publishable');
expect(publishableKey.description).toEqual('Main publishable key');
});
test('list storage buckets', async () => {
const { callTool } = await setup({ features: ['storage'] });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
project.createStorageBucket('bucket1', true);
project.createStorageBucket('bucket2', false);
const result = await callTool({
name: 'list_storage_buckets',
arguments: {
project_id: project.id,
},
});
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(2);
expect(result[0]).toEqual(
expect.objectContaining({
name: 'bucket1',
public: true,
created_at: expect.any(String),
updated_at: expect.any(String),
})
);
expect(result[1]).toEqual(
expect.objectContaining({
name: 'bucket2',
public: false,
created_at: expect.any(String),
updated_at: expect.any(String),
})
);
});
test('get storage config', async () => {
const { callTool } = await setup({ features: ['storage'] });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_storage_config',
arguments: {
project_id: project.id,
},
});
expect(result).toEqual({
fileSizeLimit: expect.any(Number),
features: {
imageTransformation: { enabled: expect.any(Boolean) },
s3Protocol: { enabled: expect.any(Boolean) },
},
});
});
test('update storage config', async () => {
const { callTool } = await setup({ features: ['storage'] });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const config = {
fileSizeLimit: 50,
features: {
imageTransformation: { enabled: true },
s3Protocol: { enabled: false },
},
};
const result = await callTool({
name: 'update_storage_config',
arguments: {
project_id: project.id,
config,
},
});
expect(result).toEqual({ success: true });
});
test('update storage config in read-only mode throws an error', async () => {
const { callTool } = await setup({ readOnly: true, features: ['storage'] });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const config = {
fileSizeLimit: 50,
features: {
imageTransformation: { enabled: true },
s3Protocol: { enabled: false },
},
};
const result = callTool({
name: 'update_storage_config',
arguments: {
project_id: project.id,
config,
},
});
await expect(result).rejects.toThrow(
'Cannot update storage config in read-only mode.'
);
});
test('execute sql', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const query = 'select 1+1 as sum';
const result = await callTool({
name: 'execute_sql',
arguments: {
project_id: project.id,
query,
},
});
expect(result).toContain('untrusted user data');
expect(result).toMatch(/<untrusted-data-\w{8}-\w{4}-\w{4}-\w{4}-\w{12}>/);
expect(result).toContain(JSON.stringify([{ sum: 2 }]));
expect(result).toMatch(/<\/untrusted-data-\w{8}-\w{4}-\w{4}-\w{4}-\w{12}>/);
});
test('can run read queries in read-only mode', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const query = 'select 1+1 as sum';
const result = await callTool({
name: 'execute_sql',
arguments: {
project_id: project.id,
query,
},
});
expect(result).toContain('untrusted user data');
expect(result).toMatch(/<untrusted-data-\w{8}-\w{4}-\w{4}-\w{4}-\w{12}>/);
expect(result).toContain(JSON.stringify([{ sum: 2 }]));
expect(result).toMatch(/<\/untrusted-data-\w{8}-\w{4}-\w{4}-\w{4}-\w{12}>/);
});
test('cannot run write queries in read-only mode', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const query =
'create table test (id integer generated always as identity primary key)';
const resultPromise = callTool({
name: 'execute_sql',
arguments: {
project_id: project.id,
query,
},
});
await expect(resultPromise).rejects.toThrow(
'permission denied for schema public'
);
});
test('apply migration, list migrations, check tables', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const name = 'test_migration';
const query =
'create table test (id integer generated always as identity primary key)';
const result = await callTool({
name: 'apply_migration',
arguments: {
project_id: project.id,
name,
query,
},
});
expect(result).toEqual({ success: true });
const listMigrationsResult = await callTool({
name: 'list_migrations',
arguments: {
project_id: project.id,
},
});
expect(listMigrationsResult).toEqual([
{
name,
version: expect.stringMatching(/^\d{14}$/),
},
]);
const listTablesResult = await callTool({
name: 'list_tables',
arguments: {
project_id: project.id,
schemas: ['public'],
},
});
expect(listTablesResult).toEqual([
{
schema: 'public',
name: 'test',
rls_enabled: false,
rows: 0,
columns: [
{
name: 'id',
data_type: 'integer',
format: 'int4',
options: ['identity', 'updatable'],
identity_generation: 'ALWAYS',
},
],
primary_keys: ['id'],
},
]);
});
test('cannot apply migration in read-only mode', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const name = 'test-migration';
const query =
'create table test (id integer generated always as identity primary key)';
const resultPromise = callTool({
name: 'apply_migration',
arguments: {
project_id: project.id,
name,
query,
},
});
await expect(resultPromise).rejects.toThrow(
'Cannot apply migration in read-only mode.'
);
});
test('list tables only under a specific schema', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
await project.db.exec('create schema test;');
await project.db.exec(
'create table public.test_1 (id serial primary key);'
);
await project.db.exec('create table test.test_2 (id serial primary key);');
const result = await callTool({
name: 'list_tables',
arguments: {
project_id: project.id,
schemas: ['test'],
},
});
expect(result).toEqual(
expect.arrayContaining([expect.objectContaining({ name: 'test_2' })])
);
expect(result).not.toEqual(
expect.arrayContaining([expect.objectContaining({ name: 'test_1' })])
);
});
test('listing all tables excludes system schemas', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'list_tables',
arguments: {
project_id: project.id,
},
});
expect(result).not.toEqual(
expect.arrayContaining([
expect.objectContaining({ schema: 'pg_catalog' }),
])
);
expect(result).not.toEqual(
expect.arrayContaining([
expect.objectContaining({ schema: 'information_schema' }),
])
);
expect(result).not.toEqual(
expect.arrayContaining([expect.objectContaining({ schema: 'pg_toast' })])
);
});
test('list_tables is not vulnerable to SQL injection via schemas parameter', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'SQLi Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'SQLi Project',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
// Attempt SQL injection via schemas parameter using payload from HackerOne report
// This payload attempts to break out of the string and inject a division by zero expression
// Reference: https://linear.app/supabase/issue/AI-139
const maliciousSchema = "public') OR (SELECT 1)=1/0--";
// With proper parameterization, this should NOT throw "division by zero" error
// The literal schema name doesn't exist, so it should return empty array
// WITHOUT parameterization, this would throw: "division by zero" error
const maliciousResult = await callTool({
name: 'list_tables',
arguments: {
project_id: project.id,
schemas: [maliciousSchema],
},
});
// Should return empty array without errors, proving the SQL injection was prevented
expect(maliciousResult).toEqual([]);
});
test('list extensions', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'list_extensions',
arguments: {
project_id: project.id,
},
});
expect(result).toMatchInlineSnapshot(`
[
{
"comment": "PL/pgSQL procedural language",
"default_version": "1.0",
"installed_version": "1.0",
"name": "plpgsql",
"schema": "pg_catalog",
},
]
`);
});
test('invalid access token', async () => {
const { callTool } = await setup({ accessToken: 'bad-token' });
const listOrganizationsPromise = callTool({
name: 'list_organizations',
arguments: {},
});
await expect(listOrganizationsPromise).rejects.toThrow('Unauthorized.');
});
test('invalid sql for apply_migration', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const name = 'test-migration';
const query = 'invalid sql';
const applyMigrationPromise = callTool({
name: 'apply_migration',
arguments: {
project_id: project.id,
name,
query,
},
});
await expect(applyMigrationPromise).rejects.toThrow(
'syntax error at or near "invalid"'
);
});
test('invalid sql for execute_sql', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const query = 'invalid sql';
const executeSqlPromise = callTool({
name: 'execute_sql',
arguments: {
project_id: project.id,
query,
},
});
await expect(executeSqlPromise).rejects.toThrow(
'syntax error at or near "invalid"'
);
});
test('get logs for each service type', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const services = [
'api',
'branch-action',
'postgres',
'edge-function',
'auth',
'storage',
'realtime',
] as const;
for (const service of services) {
const result = await callTool({
name: 'get_logs',
arguments: {
project_id: project.id,
service,
},
});
expect(result).toEqual([]);
}
});
test('get security advisors', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_advisors',
arguments: {
project_id: project.id,
type: 'security',
},
});
expect(result).toEqual({ lints: [] });
});
test('get performance advisors', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'get_advisors',
arguments: {
project_id: project.id,
type: 'performance',
},
});
expect(result).toEqual({ lints: [] });
});
test('get logs for invalid service type', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const invalidService = 'invalid-service';
const getLogsPromise = callTool({
name: 'get_logs',
arguments: {
project_id: project.id,
service: invalidService,
},
});
await expect(getLogsPromise).rejects.toThrow('Invalid enum value');
});
test('list edge functions', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const indexContent = codeBlock`
Deno.serve(async (req: Request) => {
return new Response('Hello world!', { headers: { 'Content-Type': 'text/plain' } })
});
`;
const edgeFunction = await project.deployEdgeFunction(
{
name: 'hello-world',
entrypoint_path: 'index.ts',
},
[
new File([indexContent], 'index.ts', {
type: 'application/typescript',
}),
]
);
const result = await callTool({
name: 'list_edge_functions',
arguments: {
project_id: project.id,
},
});
expect(result).toEqual([
{
id: edgeFunction.id,
slug: edgeFunction.slug,
version: edgeFunction.version,
name: edgeFunction.name,
status: edgeFunction.status,
entrypoint_path: 'index.ts',
import_map_path: undefined,
import_map: false,
verify_jwt: true,
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
updated_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
},
]);
});
test('get edge function', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const indexContent = codeBlock`
Deno.serve(async (req: Request) => {
return new Response('Hello world!', { headers: { 'Content-Type': 'text/plain' } })
});
`;
const edgeFunction = await project.deployEdgeFunction(
{
name: 'hello-world',
entrypoint_path: 'index.ts',
},
[
new File([indexContent], 'index.ts', {
type: 'application/typescript',
}),
]
);
const result = await callTool({
name: 'get_edge_function',
arguments: {
project_id: project.id,
function_slug: edgeFunction.slug,
},
});
expect(result).toEqual({
id: edgeFunction.id,
slug: edgeFunction.slug,
version: edgeFunction.version,
name: edgeFunction.name,
status: edgeFunction.status,
entrypoint_path: 'index.ts',
import_map_path: undefined,
import_map: false,
verify_jwt: true,
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
updated_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
files: [
{
name: 'index.ts',
content: indexContent,
},
],
});
});
test('deploy new edge function', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const functionName = 'hello-world';
const functionCode = 'console.log("Hello, world!");';
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: functionCode,
},
],
},
});
expect(result).toEqual({
id: expect.stringMatching(/^.+$/),
slug: functionName,
version: 1,
name: functionName,
status: 'ACTIVE',
entrypoint_path: expect.stringMatching(/index\.ts$/),
import_map_path: undefined,
import_map: false,
verify_jwt: true,
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
updated_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
});
});
test('deploy edge function in read-only mode throws an error', async () => {
const { callTool } = await setup({ readOnly: true });
const org = await createOrganization({
name: 'test-org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'test-app',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const functionName = 'hello-world';
const functionCode = 'console.log("Hello, world!");';
const result = callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: functionCode,
},
],
},
});
await expect(result).rejects.toThrow(
'Cannot deploy an edge function in read-only mode.'
);
});
test('deploy new version of existing edge function', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const functionName = 'hello-world';
const edgeFunction = await project.deployEdgeFunction(
{
name: functionName,
entrypoint_path: 'index.ts',
},
[
new File(['console.log("Hello, world!");'], 'index.ts', {
type: 'application/typescript',
}),
]
);
expect(edgeFunction.version).toEqual(1);
const originalCreatedAt = edgeFunction.created_at.getTime();
const originalUpdatedAt = edgeFunction.updated_at.getTime();
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: 'console.log("Hello, world! v2");',
},
],
},
});
expect(result).toEqual({
id: edgeFunction.id,
slug: functionName,
version: 2,
name: functionName,
status: 'ACTIVE',
entrypoint_path: expect.stringMatching(/index\.ts$/),
import_map_path: undefined,
import_map: false,
verify_jwt: true,
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
updated_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
});
expect(new Date(result.created_at).getTime()).toEqual(originalCreatedAt);
expect(new Date(result.updated_at).getTime()).toBeGreaterThan(
originalUpdatedAt
);
});
test('custom edge function import map', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const functionName = 'hello-world';
const functionCode = 'console.log("Hello, world!");';
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
import_map_path: 'custom-map.json',
files: [
{
name: 'index.ts',
content: functionCode,
},
{
name: 'custom-map.json',
content: '{}',
},
],
},
});
expect(result.import_map).toBe(true);
expect(result.import_map_path).toMatch(/custom-map\.json$/);
});
test('default edge function import map to deno.json', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const functionName = 'hello-world';
const functionCode = 'console.log("Hello, world!");';
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: functionCode,
},
{
name: 'deno.json',
content: '{}',
},
],
},
});
expect(result.import_map).toBe(true);
expect(result.import_map_path).toMatch(/deno\.json$/);
});
test('default edge function import map to import_map.json', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const functionName = 'hello-world';
const functionCode = 'console.log("Hello, world!");';
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: functionCode,
},
{
name: 'import_map.json',
content: '{}',
},
],
},
});
expect(result.import_map).toBe(true);
expect(result.import_map_path).toMatch(/import_map\.json$/);
});
test('updating edge function with missing import_map_path defaults to previous value', async () => {
const { callTool } = await setup();
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const functionName = 'hello-world';
const edgeFunction = await project.deployEdgeFunction(
{
name: functionName,
entrypoint_path: 'index.ts',
import_map_path: 'custom-map.json',
},
[
new File(['console.log("Hello, world!");'], 'index.ts', {
type: 'application/typescript',
}),
new File(['{}'], 'custom-map.json', {
type: 'application/json',
}),
]
);
const result = await callTool({
name: 'deploy_edge_function',
arguments: {
project_id: project.id,
name: functionName,
files: [
{
name: 'index.ts',
content: 'console.log("Hello, world! v2");',
},
{
name: 'custom-map.json',
content: '{}',
},
],
},
});
expect(result.import_map).toBe(true);
expect(result.import_map_path).toMatch(/custom-map\.json$/);
});
test('create branch', async () => {
const { callTool } = await setup({
features: ['account', 'branching'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branchName = 'test-branch';
const result = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: branchName,
confirm_cost_id,
},
});
expect(result).toEqual({
id: expect.stringMatching(/^.+$/),
name: branchName,
project_ref: expect.stringMatching(/^.+$/),
parent_project_ref: project.id,
is_default: false,
persistent: false,
status: 'CREATING_PROJECT',
created_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
updated_at: expect.stringMatching(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/
),
});
});
test('create branch in read-only mode throws an error', async () => {
const { callTool } = await setup({
readOnly: true,
features: ['account', 'branching'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branchName = 'test-branch';
const result = callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: branchName,
confirm_cost_id,
},
});
await expect(result).rejects.toThrow(
'Cannot create a branch in read-only mode.'
);
});
test('create branch without cost confirmation fails', async () => {
const { callTool } = await setup({ features: ['branching'] });
const org = await createOrganization({
name: 'Paid Org',
plan: 'pro',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const branchName = 'test-branch';
const createBranchPromise = callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: branchName,
},
});
await expect(createBranchPromise).rejects.toThrow(
'User must confirm understanding of costs before creating a branch.'
);
});
test('delete branch', async () => {
const { callTool } = await setup({
features: ['account', 'branching'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branch = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: 'test-branch',
confirm_cost_id,
},
});
const listBranchesResult = await callTool({
name: 'list_branches',
arguments: {
project_id: project.id,
},
});
expect(listBranchesResult).toContainEqual(
expect.objectContaining({ id: branch.id })
);
expect(listBranchesResult).toHaveLength(2);
await callTool({
name: 'delete_branch',
arguments: {
branch_id: branch.id,
},
});
const listBranchesResultAfterDelete = await callTool({
name: 'list_branches',
arguments: {
project_id: project.id,
},
});
expect(listBranchesResultAfterDelete).not.toContainEqual(
expect.objectContaining({ id: branch.id })
);
expect(listBranchesResultAfterDelete).toHaveLength(1);
const mainBranch = listBranchesResultAfterDelete[0];
const deleteBranchPromise = callTool({
name: 'delete_branch',
arguments: {
branch_id: mainBranch.id,
},
});
await expect(deleteBranchPromise).rejects.toThrow(
'Cannot delete the default branch.'
);
});
test('delete branch in read-only mode throws an error', async () => {
const { callTool } = await setup({
readOnly: true,
features: ['account', 'branching'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const branch = await createBranch({
name: 'test-branch',
parent_project_ref: project.id,
});
const listBranchesResult = await callTool({
name: 'list_branches',
arguments: {
project_id: project.id,
},
});
expect(listBranchesResult).toHaveLength(1);
expect(listBranchesResult).toContainEqual(
expect.objectContaining({ id: branch.id })
);
const result = callTool({
name: 'delete_branch',
arguments: {
branch_id: branch.id,
},
});
await expect(result).rejects.toThrow(
'Cannot delete a branch in read-only mode.'
);
});
test('list branches', async () => {
const { callTool } = await setup({ features: ['branching'] });
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const result = await callTool({
name: 'list_branches',
arguments: {
project_id: project.id,
},
});
expect(result).toStrictEqual([]);
});
test('merge branch', async () => {
const { callTool } = await setup({
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branch = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: 'test-branch',
confirm_cost_id,
},
});
const migrationName = 'sample_migration';
const migrationQuery =
'create table sample (id integer generated always as identity primary key)';
await callTool({
name: 'apply_migration',
arguments: {
project_id: branch.project_ref,
name: migrationName,
query: migrationQuery,
},
});
await callTool({
name: 'merge_branch',
arguments: {
branch_id: branch.id,
},
});
// Check that the migration was applied to the parent project
const listResult = await callTool({
name: 'list_migrations',
arguments: {
project_id: project.id,
},
});
expect(listResult).toContainEqual({
name: migrationName,
version: expect.stringMatching(/^\d{14}$/),
});
});
test('merge branch in read-only mode throws an error', async () => {
const { callTool } = await setup({
readOnly: true,
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const branch = await createBranch({
name: 'test-branch',
parent_project_ref: project.id,
});
const result = callTool({
name: 'merge_branch',
arguments: {
branch_id: branch.id,
},
});
await expect(result).rejects.toThrow(
'Cannot merge a branch in read-only mode.'
);
});
test('reset branch', async () => {
const { callTool } = await setup({
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branch = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: 'test-branch',
confirm_cost_id,
},
});
// Create a table via execute_sql so that it is untracked
const query =
'create table test_untracked (id integer generated always as identity primary key)';
await callTool({
name: 'execute_sql',
arguments: {
project_id: branch.project_ref,
query,
},
});
const firstTablesResult = await callTool({
name: 'list_tables',
arguments: {
project_id: branch.project_ref,
},
});
expect(firstTablesResult).toContainEqual(
expect.objectContaining({ name: 'test_untracked' })
);
await callTool({
name: 'reset_branch',
arguments: {
branch_id: branch.id,
},
});
const secondTablesResult = await callTool({
name: 'list_tables',
arguments: {
project_id: branch.project_ref,
},
});
// Expect the untracked table to be removed after reset
expect(secondTablesResult).not.toContainEqual(
expect.objectContaining({ name: 'test_untracked' })
);
});
test('reset branch in read-only mode throws an error', async () => {
const { callTool } = await setup({
readOnly: true,
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const branch = await createBranch({
name: 'test-branch',
parent_project_ref: project.id,
});
const result = callTool({
name: 'reset_branch',
arguments: {
branch_id: branch.id,
},
});
await expect(result).rejects.toThrow(
'Cannot reset a branch in read-only mode.'
);
});
test('revert migrations', async () => {
const { callTool } = await setup({
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branch = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: 'test-branch',
confirm_cost_id,
},
});
const migrationName = 'sample_migration';
const migrationQuery =
'create table sample (id integer generated always as identity primary key)';
await callTool({
name: 'apply_migration',
arguments: {
project_id: branch.project_ref,
name: migrationName,
query: migrationQuery,
},
});
// Check that migration has been applied to the branch
const firstListResult = await callTool({
name: 'list_migrations',
arguments: {
project_id: branch.project_ref,
},
});
expect(firstListResult).toContainEqual({
name: migrationName,
version: expect.stringMatching(/^\d{14}$/),
});
const firstTablesResult = await callTool({
name: 'list_tables',
arguments: {
project_id: branch.project_ref,
},
});
expect(firstTablesResult).toContainEqual(
expect.objectContaining({ name: 'sample' })
);
await callTool({
name: 'reset_branch',
arguments: {
branch_id: branch.id,
migration_version: '0',
},
});
// Check that all migrations have been reverted
const secondListResult = await callTool({
name: 'list_migrations',
arguments: {
project_id: branch.project_ref,
},
});
expect(secondListResult).toStrictEqual([]);
const secondTablesResult = await callTool({
name: 'list_tables',
arguments: {
project_id: branch.project_ref,
},
});
expect(secondTablesResult).not.toContainEqual(
expect.objectContaining({ name: 'sample' })
);
});
test('rebase branch', async () => {
const { callTool } = await setup({
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const confirm_cost_id = await callTool({
name: 'confirm_cost',
arguments: {
type: 'branch',
recurrence: 'hourly',
amount: BRANCH_COST_HOURLY,
},
});
const branch = await callTool({
name: 'create_branch',
arguments: {
project_id: project.id,
name: 'test-branch',
confirm_cost_id,
},
});
const migrationName = 'sample_migration';
const migrationQuery =
'create table sample (id integer generated always as identity primary key)';
await callTool({
name: 'apply_migration',
arguments: {
project_id: project.id,
name: migrationName,
query: migrationQuery,
},
});
await callTool({
name: 'rebase_branch',
arguments: {
branch_id: branch.id,
},
});
// Check that the production migration was applied to the branch
const listResult = await callTool({
name: 'list_migrations',
arguments: {
project_id: branch.project_ref,
},
});
expect(listResult).toContainEqual({
name: migrationName,
version: expect.stringMatching(/^\d{14}$/),
});
});
test('rebase branch in read-only mode throws an error', async () => {
const { callTool } = await setup({
readOnly: true,
features: ['account', 'branching', 'database'],
});
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const branch = await createBranch({
name: 'test-branch',
parent_project_ref: project.id,
});
const result = callTool({
name: 'rebase_branch',
arguments: {
branch_id: branch.id,
},
});
await expect(result).rejects.toThrow(
'Cannot rebase a branch in read-only mode.'
);
});
// We use snake_case because it aligns better with most MCP clients
test('all tools follow snake_case naming convention', async () => {
const { client } = await setup();
const { tools } = await client.listTools();
for (const tool of tools) {
expect(tool.name, 'expected tool name to be snake_case').toMatch(
/^[a-z0-9_]+$/
);
const parameterNames = Object.keys(tool.inputSchema.properties ?? {});
for (const name of parameterNames) {
expect(name, 'expected parameter to be snake_case').toMatch(
/^[a-z0-9_]+$/
);
}
}
});
test('all tools provide annotations', async () => {
const { client } = await setup();
const { tools } = await client.listTools();
for (const tool of tools) {
expect(tool.annotations, `${tool.name} tool`).toBeDefined();
expect(tool.annotations!.title, `${tool.name} tool`).toBeDefined();
expect(tool.annotations!.readOnlyHint, `${tool.name} tool`).toBeDefined();
expect(
tool.annotations!.destructiveHint,
`${tool.name} tool`
).toBeDefined();
expect(
tool.annotations!.idempotentHint,
`${tool.name} tool`
).toBeDefined();
expect(
tool.annotations!.openWorldHint,
`${tool.name} tool`
).toBeDefined();
}
});
});
describe('feature groups', () => {
test('account tools', async () => {
const { client } = await setup({
features: ['account'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'list_organizations',
'get_organization',
'list_projects',
'get_project',
'get_cost',
'confirm_cost',
'create_project',
'pause_project',
'restore_project',
]);
});
test('database tools', async () => {
const { client } = await setup({
features: ['database'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'list_tables',
'list_extensions',
'list_migrations',
'apply_migration',
'execute_sql',
]);
});
test('debugging tools', async () => {
const { client } = await setup({
features: ['debugging'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual(['get_logs', 'get_advisors']);
});
test('development tools', async () => {
const { client } = await setup({
features: ['development'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'get_project_url',
'get_publishable_keys',
'generate_typescript_types',
]);
});
test('docs tools', async () => {
const { client } = await setup({
features: ['docs'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual(['search_docs']);
});
test('functions tools', async () => {
const { client } = await setup({
features: ['functions'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'list_edge_functions',
'get_edge_function',
'deploy_edge_function',
]);
});
test('branching tools', async () => {
const { client } = await setup({
features: ['branching'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'create_branch',
'list_branches',
'delete_branch',
'merge_branch',
'reset_branch',
'rebase_branch',
]);
});
test('storage tools', async () => {
const { client } = await setup({
features: ['storage'],
});
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'list_storage_buckets',
'get_storage_config',
'update_storage_config',
]);
});
test('invalid group fails', async () => {
const setupPromise = setup({
features: ['my-invalid-group'],
});
await expect(setupPromise).rejects.toThrow('Invalid enum value');
});
test('duplicate group behaves like single group', async () => {
const { client: duplicateClient } = await setup({
features: ['account', 'account'],
});
const { tools } = await duplicateClient.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'list_organizations',
'get_organization',
'list_projects',
'get_project',
'get_cost',
'confirm_cost',
'create_project',
'pause_project',
'restore_project',
]);
});
test('tools filtered to available platform operations', async () => {
const platform: SupabasePlatform = {
database: {
executeSql() {
throw new Error('Not implemented');
},
listMigrations() {
throw new Error('Not implemented');
},
applyMigration() {
throw new Error('Not implemented');
},
},
};
const { client } = await setup({ platform });
const { tools } = await client.listTools();
const toolNames = tools.map((tool) => tool.name);
expect(toolNames).toEqual([
'search_docs',
'list_tables',
'list_extensions',
'list_migrations',
'apply_migration',
'execute_sql',
]);
});
test('unimplemented feature group produces custom error message', async () => {
const platform: SupabasePlatform = {
database: {
executeSql() {
throw new Error('Not implemented');
},
listMigrations() {
throw new Error('Not implemented');
},
applyMigration() {
throw new Error('Not implemented');
},
},
};
const setupPromise = setup({ platform, features: ['account'] });
await expect(setupPromise).rejects.toThrow(
"This platform does not support the 'account' feature group"
);
});
});
describe('project scoped tools', () => {
test('no account level tools should exist', async () => {
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const { client } = await setup({ projectId: project.id });
const result = await client.listTools();
const accountLevelToolNames = [
'list_organizations',
'get_organization',
'list_projects',
'get_project',
'get_cost',
'confirm_cost',
'create_project',
'pause_project',
'restore_project',
];
const toolNames = result.tools.map((tool) => tool.name);
for (const accountLevelToolName of accountLevelToolNames) {
expect(
toolNames,
`tool ${accountLevelToolName} should not be available in project scope`
).not.toContain(accountLevelToolName);
}
});
test('no tool should accept a project_id', async () => {
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
const { client } = await setup({ projectId: project.id });
const result = await client.listTools();
expect(result.tools).toBeDefined();
expect(Array.isArray(result.tools)).toBe(true);
for (const tool of result.tools) {
const schemaProperties = tool.inputSchema.properties ?? {};
expect(
'project_id' in schemaProperties,
`tool ${tool.name} should not accept a project_id`
).toBe(false);
}
});
test('invalid project ID should throw an error', async () => {
const { callTool } = await setup({ projectId: 'invalid-project-id' });
const listTablesPromise = callTool({
name: 'list_tables',
arguments: {
schemas: ['public'],
},
});
await expect(listTablesPromise).rejects.toThrow('Project not found');
});
test('passing project_id to a tool should throw an error', async () => {
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
const { callTool } = await setup({ projectId: project.id });
const listTablesPromise = callTool({
name: 'list_tables',
arguments: {
project_id: 'my-project-id',
schemas: ['public'],
},
});
await expect(listTablesPromise).rejects.toThrow('Unrecognized key');
});
test('listing tables implicitly uses the scoped project_id', async () => {
const org = await createOrganization({
name: 'My Org',
plan: 'free',
allowed_release_channels: ['ga'],
});
const project = await createProject({
name: 'Project 1',
region: 'us-east-1',
organization_id: org.id,
});
project.status = 'ACTIVE_HEALTHY';
project.db
.sql`create table test (id integer generated always as identity primary key)`;
const { callTool } = await setup({ projectId: project.id });
const result = await callTool({
name: 'list_tables',
arguments: {
schemas: ['public'],
},
});
expect(result).toEqual([
expect.objectContaining({
name: 'test',
schema: 'public',
columns: [
expect.objectContaining({
name: 'id',
options: expect.arrayContaining(['identity']),
}),
],
}),
]);
});
});
describe('docs tools', () => {
test('gets content', async () => {
const { callTool } = await setup();
const query = stripIndent`
query ContentQuery {
searchDocs(query: "typescript") {
nodes {
title
href
}
}
}
`;
const result = await callTool({
name: 'search_docs',
arguments: {
graphql_query: query,
},
});
expect(result).toEqual({ dummy: true });
});
test('tool description contains schema', async () => {
const { client } = await setup();
const { tools } = await client.listTools();
const tool = tools.find((tool) => tool.name === 'search_docs');
if (!tool) {
throw new Error('tool not found');
}
if (!tool.description) {
throw new Error('tool description not found');
}
expect(tool.description.includes(contentApiMockSchema)).toBe(true);
});
test('schema is only loaded when listing tools', async () => {
const { client, callTool } = await setup();
expect(mockContentApiSchemaLoadCount.value).toBe(0);
// "tools/list" requests fetch the schema
await client.listTools();
expect(mockContentApiSchemaLoadCount.value).toBe(1);
// "tools/call" should not fetch the schema again
await callTool({
name: 'search_docs',
arguments: {
graphql_query: '{ searchDocs(query: "test") { nodes { title } } }',
},
});
expect(mockContentApiSchemaLoadCount.value).toBe(1);
// Additional "tools/list" requests fetch the schema again
await client.listTools();
expect(mockContentApiSchemaLoadCount.value).toBe(2);
});
});
```