This is page 2 of 7. Use http://codebase.md/bytebase/dbhub?lines=false&page={x} to view the full context. # Directory Structure ``` ├── .dockerignore ├── .env.example ├── .github │ ├── CODEOWNERS │ ├── copilot-instructions.md │ └── workflows │ ├── docker-publish.yml │ ├── npm-publish.yml │ └── run-tests.yml ├── .gitignore ├── .husky │ └── pre-commit ├── .npmrc ├── .prettierrc.json ├── bun.lock ├── CLAUDE.md ├── Dockerfile ├── LICENSE ├── llms-full.txt ├── package.json ├── pnpm-lock.yaml ├── pnpm-workspace.yaml ├── README.md ├── resources │ ├── employee-sqlite │ │ ├── employee.sql │ │ ├── load_department.sql │ │ ├── load_dept_emp.sql │ │ ├── load_dept_manager.sql │ │ ├── load_employee.sql │ │ ├── load_salary1.sql │ │ ├── load_title.sql │ │ ├── object.sql │ │ ├── show_elapsed.sql │ │ └── test_employee_md5.sql │ └── images │ ├── claude-desktop.webp │ ├── cursor.webp │ ├── logo-full.svg │ ├── logo-full.webp │ ├── logo-icon-only.svg │ ├── logo-text-only.svg │ └── mcp-inspector.webp ├── scripts │ └── setup-husky.sh ├── src │ ├── __tests__ │ │ └── json-rpc-integration.test.ts │ ├── config │ │ ├── __tests__ │ │ │ ├── env.test.ts │ │ │ └── ssh-config-integration.test.ts │ │ ├── demo-loader.ts │ │ └── env.ts │ ├── connectors │ │ ├── __tests__ │ │ │ ├── mariadb.integration.test.ts │ │ │ ├── mysql.integration.test.ts │ │ │ ├── postgres-ssh.integration.test.ts │ │ │ ├── postgres.integration.test.ts │ │ │ ├── shared │ │ │ │ └── integration-test-base.ts │ │ │ ├── sqlite.integration.test.ts │ │ │ └── sqlserver.integration.test.ts │ │ ├── interface.ts │ │ ├── manager.ts │ │ ├── mariadb │ │ │ └── index.ts │ │ ├── mysql │ │ │ └── index.ts │ │ ├── postgres │ │ │ └── index.ts │ │ ├── sqlite │ │ │ └── index.ts │ │ └── sqlserver │ │ └── index.ts │ ├── index.ts │ ├── prompts │ │ ├── db-explainer.ts │ │ ├── index.ts │ │ └── sql-generator.ts │ ├── resources │ │ ├── index.ts │ │ ├── indexes.ts │ │ ├── procedures.ts │ │ ├── schema.ts │ │ ├── schemas.ts │ │ └── tables.ts │ ├── server.ts │ ├── tools │ │ ├── __tests__ │ │ │ └── execute-sql.test.ts │ │ ├── execute-sql.ts │ │ └── index.ts │ ├── types │ │ ├── sql.ts │ │ └── ssh.ts │ └── utils │ ├── __tests__ │ │ ├── safe-url.test.ts │ │ ├── ssh-config-parser.test.ts │ │ └── ssh-tunnel.test.ts │ ├── allowed-keywords.ts │ ├── dsn-obfuscate.ts │ ├── response-formatter.ts │ ├── safe-url.ts │ ├── sql-row-limiter.ts │ ├── ssh-config-parser.ts │ └── ssh-tunnel.ts ├── tsconfig.json ├── tsup.config.ts └── vitest.config.ts ``` # Files -------------------------------------------------------------------------------- /src/config/__tests__/env.test.ts: -------------------------------------------------------------------------------- ```typescript import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { buildDSNFromEnvParams, resolveDSN, resolveId } from '../env.js'; describe('Environment Configuration Tests', () => { // Store original env values to restore after tests const originalEnv = { ...process.env }; beforeEach(() => { // Clear relevant environment variables before each test delete process.env.DB_TYPE; delete process.env.DB_HOST; delete process.env.DB_PORT; delete process.env.DB_USER; delete process.env.DB_PASSWORD; delete process.env.DB_NAME; delete process.env.DSN; delete process.env.ID; }); afterEach(() => { // Restore original environment process.env = { ...originalEnv }; }); describe('buildDSNFromEnvParams', () => { it('should build PostgreSQL DSN with all parameters', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_PORT = '5432'; process.env.DB_USER = 'testuser'; process.env.DB_PASSWORD = 'testpass'; process.env.DB_NAME = 'testdb'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'postgres://testuser:testpass@localhost:5432/testdb', source: 'individual environment variables' }); }); it('should build MySQL DSN with default port when port not specified', () => { process.env.DB_TYPE = 'mysql'; process.env.DB_HOST = 'mysql.example.com'; process.env.DB_USER = 'admin'; process.env.DB_PASSWORD = 'secret'; process.env.DB_NAME = 'myapp'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'mysql://admin:[email protected]:3306/myapp', source: 'individual environment variables' }); }); it('should build MariaDB DSN with default port', () => { process.env.DB_TYPE = 'mariadb'; process.env.DB_HOST = 'mariadb.example.com'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'database'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'mariadb://user:[email protected]:3306/database', source: 'individual environment variables' }); }); it('should build SQL Server DSN with default port', () => { process.env.DB_TYPE = 'sqlserver'; process.env.DB_HOST = 'sqlserver.example.com'; process.env.DB_USER = 'sa'; process.env.DB_PASSWORD = 'strongpass'; process.env.DB_NAME = 'master'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'sqlserver://sa:[email protected]:1433/master', source: 'individual environment variables' }); }); it('should build SQLite DSN with only DB_TYPE and DB_NAME', () => { process.env.DB_TYPE = 'sqlite'; process.env.DB_NAME = '/path/to/database.db'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'sqlite:////path/to/database.db', source: 'individual environment variables' }); }); it('should handle postgresql type and normalize to postgres protocol', () => { process.env.DB_TYPE = 'postgresql'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe('postgres://user:pass@localhost:5432/db'); }); it('should properly encode special characters in password', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'test@pass:with/special#chars&more=special'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe( 'postgres://user:test%40pass%3Awith%2Fspecial%23chars%26more%3Dspecial@localhost:5432/db' ); }); it('should properly encode special characters in username', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = '[email protected]'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe( 'postgres://user%40domain.com:pass@localhost:5432/db' ); }); it('should properly encode special characters in database name', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'my-db@test'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe( 'postgres://user:pass@localhost:5432/my-db%40test' ); }); it('should handle SQLite with special characters in file path', () => { process.env.DB_TYPE = 'sqlite'; process.env.DB_NAME = '/tmp/test_db@#$.db'; const result = buildDSNFromEnvParams(); expect(result).toEqual({ dsn: 'sqlite:////tmp/test_db@#$.db', source: 'individual environment variables' }); }); it('should return null when required parameters are missing for non-SQLite databases', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; // Missing DB_USER, DB_PASSWORD, DB_NAME const result = buildDSNFromEnvParams(); expect(result).toBeNull(); }); it('should return null when DB_TYPE is missing', () => { process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result).toBeNull(); }); it('should return null when SQLite is missing DB_NAME', () => { process.env.DB_TYPE = 'sqlite'; // Missing DB_NAME const result = buildDSNFromEnvParams(); expect(result).toBeNull(); }); it('should throw error for unsupported database type', () => { process.env.DB_TYPE = 'oracle'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; expect(() => buildDSNFromEnvParams()).toThrow( 'Unsupported DB_TYPE: oracle. Supported types: postgres, postgresql, mysql, mariadb, sqlserver, sqlite' ); }); it('should use custom port when provided', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_PORT = '9999'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe('postgres://user:pass@localhost:9999/db'); }); it('should return null for empty password (required field)', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = ''; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result).toBeNull(); }); }); describe('resolveDSN integration with individual parameters', () => { it('should use DSN when both DSN and individual parameters are provided', () => { process.env.DSN = 'postgres://direct:dsn@localhost:5432/directdb'; process.env.DB_TYPE = 'mysql'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = resolveDSN(); expect(result).toEqual({ dsn: 'postgres://direct:dsn@localhost:5432/directdb', source: 'environment variable' }); }); it('should fall back to individual parameters when DSN is not provided', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = resolveDSN(); expect(result).toEqual({ dsn: 'postgres://user:pass@localhost:5432/db', source: 'individual environment variables' }); }); it('should return null when neither DSN nor complete individual parameters are provided', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; // Missing required parameters const result = resolveDSN(); expect(result).toBeNull(); }); it('should handle SQLite individual parameters correctly', () => { process.env.DB_TYPE = 'sqlite'; process.env.DB_NAME = ':memory:'; const result = resolveDSN(); expect(result).toEqual({ dsn: 'sqlite:///:memory:', source: 'individual environment variables' }); }); }); describe('edge cases and complex scenarios', () => { it('should handle password with all special URL characters', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = '!@#$%^&*()+={}[]|\\:";\'<>?,./~`'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); // Verify it builds without error and contains encoded characters expect(result).toBeTruthy(); // Note: encodeURIComponent doesn't encode ! so it remains as ! expect(result?.dsn).toContain('!'); // ! is not encoded expect(result?.dsn).toContain('%40'); // @ expect(result?.dsn).toContain('%23'); // # expect(result?.dsn).toContain('%24'); // $ expect(result?.dsn).toContain('%25'); // % }); it('should handle database names with Unicode characters', () => { process.env.DB_TYPE = 'postgres'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'тест_база_данных'; // Cyrillic characters const result = buildDSNFromEnvParams(); expect(result).toBeTruthy(); expect(result?.dsn).toContain('%D1%82%D0%B5%D1%81%D1%82'); // Encoded Cyrillic }); it('should be case insensitive for database type', () => { process.env.DB_TYPE = 'POSTGRES'; process.env.DB_HOST = 'localhost'; process.env.DB_USER = 'user'; process.env.DB_PASSWORD = 'pass'; process.env.DB_NAME = 'db'; const result = buildDSNFromEnvParams(); expect(result?.dsn).toBe('postgres://user:pass@localhost:5432/db'); }); }); describe('resolveId', () => { it('should return null when ID is not provided', () => { const result = resolveId(); expect(result).toBeNull(); }); it('should resolve ID from environment variable', () => { process.env.ID = 'prod'; const result = resolveId(); expect(result).toEqual({ id: 'prod', source: 'environment variable' }); }); it('should handle different ID formats', () => { process.env.ID = 'staging-db-01'; const result = resolveId(); expect(result).toEqual({ id: 'staging-db-01', source: 'environment variable' }); }); it('should handle numeric IDs as strings', () => { process.env.ID = '123'; const result = resolveId(); expect(result).toEqual({ id: '123', source: 'environment variable' }); }); }); }); ``` -------------------------------------------------------------------------------- /src/__tests__/json-rpc-integration.test.ts: -------------------------------------------------------------------------------- ```typescript import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { spawn, ChildProcess } from 'child_process'; import fs from 'fs'; import path from 'path'; import os from 'os'; describe('JSON RPC Integration Tests', () => { let serverProcess: ChildProcess | null = null; let testDbPath: string; let baseUrl: string; const testPort = 3001; beforeAll(async () => { // Create a temporary SQLite database file const tempDir = os.tmpdir(); testDbPath = path.join(tempDir, `json_rpc_test_${Date.now()}_${Math.random().toString(36).substr(2, 9)}.db`); baseUrl = `http://localhost:${testPort}`; // Start the server as a child process serverProcess = spawn('pnpm', ['dev'], { env: { ...process.env, DSN: `sqlite://${testDbPath}`, TRANSPORT: 'http', PORT: testPort.toString(), NODE_ENV: 'test' }, stdio: 'pipe' }); // Handle server output serverProcess.stdout?.on('data', (data) => { console.log(`Server stdout: ${data}`); }); serverProcess.stderr?.on('data', (data) => { console.error(`Server stderr: ${data}`); }); // Wait for server to start up let serverReady = false; for (let i = 0; i < 20; i++) { try { await new Promise(resolve => setTimeout(resolve, 1000)); const response = await fetch(`${baseUrl}/message`, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json, text/event-stream' }, body: JSON.stringify({ jsonrpc: '2.0', id: 'health-check', method: 'notifications/initialized' }) }); if (response.status < 500) { serverReady = true; break; } } catch (e) { // Server not ready yet, continue waiting } } if (!serverReady) { throw new Error('Server did not start within expected time'); } // Create test tables and data via HTTP request await makeJsonRpcCall('execute_sql', { sql: ` CREATE TABLE IF NOT EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR(100) NOT NULL, email VARCHAR(100) UNIQUE NOT NULL, age INTEGER ); CREATE TABLE IF NOT EXISTS orders ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER REFERENCES users(id), total DECIMAL(10,2), created_at DATETIME DEFAULT CURRENT_TIMESTAMP ); INSERT INTO users (name, email, age) VALUES ('John Doe', '[email protected]', 30), ('Jane Smith', '[email protected]', 25), ('Bob Johnson', '[email protected]', 35); INSERT INTO orders (user_id, total) VALUES (1, 99.99), (1, 149.50), (2, 75.25); ` }); }, 30000); afterAll(async () => { // Kill the server process if it's still running if (serverProcess) { serverProcess.kill('SIGTERM'); // Wait for process to exit await new Promise((resolve) => { if (serverProcess) { serverProcess.on('exit', resolve); setTimeout(() => { if (serverProcess && !serverProcess.killed) { serverProcess.kill('SIGKILL'); } resolve(void 0); }, 5000); } else { resolve(void 0); } }); } // Clean up the test database file if (testDbPath && fs.existsSync(testDbPath)) { fs.unlinkSync(testDbPath); } }); async function makeJsonRpcCall(method: string, params: any): Promise<any> { const response = await fetch(`${baseUrl}/message`, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json, text/event-stream', }, body: JSON.stringify({ jsonrpc: '2.0', id: Math.random().toString(36).substr(2, 9), method: 'tools/call', params: { name: method, arguments: params } }) }); if (!response.ok) { throw new Error(`HTTP ${response.status}: ${response.statusText}`); } // Handle different response types const contentType = response.headers.get('content-type'); if (contentType?.includes('text/event-stream')) { // Handle SSE response const text = await response.text(); const lines = text.split('\n').filter(line => line.startsWith('data: ')); if (lines.length > 0) { return JSON.parse(lines[0].substring(6)); // Remove 'data: ' prefix } throw new Error('No data in SSE response'); } else { return await response.json(); } } describe('execute_sql JSON RPC calls', () => { it('should execute a simple SELECT query successfully', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: 'SELECT * FROM users WHERE age > 25 ORDER BY age' }); expect(response).toHaveProperty('result'); expect(response.result).toHaveProperty('content'); expect(Array.isArray(response.result.content)).toBe(true); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data).toHaveProperty('rows'); expect(content.data).toHaveProperty('count'); expect(content.data.rows).toHaveLength(2); expect(content.data.rows[0].name).toBe('John Doe'); expect(content.data.rows[1].name).toBe('Bob Johnson'); }); it('should execute a JOIN query successfully', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: ` SELECT u.name, u.email, o.total FROM users u JOIN orders o ON u.id = o.user_id WHERE u.age >= 30 ORDER BY o.total DESC ` }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(2); expect(content.data.rows[0].total).toBe(149.50); expect(content.data.rows[1].total).toBe(99.99); }); it('should execute aggregate queries successfully', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: ` SELECT COUNT(*) as user_count, AVG(age) as avg_age, MIN(age) as min_age, MAX(age) as max_age FROM users ` }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(1); expect(content.data.rows[0].user_count).toBe(3); expect(content.data.rows[0].avg_age).toBe(30); expect(content.data.rows[0].min_age).toBe(25); expect(content.data.rows[0].max_age).toBe(35); }); it('should handle multiple statements in a single call', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: ` INSERT INTO users (name, email, age) VALUES ('Test User', '[email protected]', 28); SELECT COUNT(*) as total_users FROM users; ` }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(1); expect(content.data.rows[0].total_users).toBe(4); }); it('should handle SQLite-specific functions', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: ` SELECT sqlite_version() as version, datetime('now') as current_time, upper('hello world') as uppercase, length('test string') as str_length ` }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(1); expect(content.data.rows[0].version).toBeDefined(); expect(content.data.rows[0].uppercase).toBe('HELLO WORLD'); expect(content.data.rows[0].str_length).toBe(11); }); it('should return error for invalid SQL', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: 'SELECT * FROM non_existent_table' }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(false); expect(content.error).toContain('no such table'); expect(content.code).toBe('EXECUTION_ERROR'); }); it('should handle empty result sets', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: 'SELECT * FROM users WHERE age > 100' }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(0); expect(content.data.count).toBe(0); }); it('should work with SQLite transactions', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: ` BEGIN TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Transaction User', '[email protected]', 40); COMMIT; SELECT * FROM users WHERE email = '[email protected]'; ` }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows).toHaveLength(1); expect(content.data.rows[0].name).toBe('Transaction User'); expect(content.data.rows[0].age).toBe(40); }); it('should handle PRAGMA statements', async () => { const response = await makeJsonRpcCall('execute_sql', { sql: 'PRAGMA table_info(users)' }); expect(response).toHaveProperty('result'); const content = JSON.parse(response.result.content[0].text); expect(content.success).toBe(true); expect(content.data.rows.length).toBeGreaterThan(0); expect(content.data.rows.some((row: any) => row.name === 'id')).toBe(true); expect(content.data.rows.some((row: any) => row.name === 'name')).toBe(true); }); }); describe('JSON RPC protocol compliance', () => { it('should return proper JSON RPC response structure', async () => { const requestId = Math.random().toString(36).substr(2, 9); const response = await makeJsonRpcCall('execute_sql', { sql: 'SELECT 1 as test' }); expect(response).toHaveProperty('jsonrpc', '2.0'); expect(response).toHaveProperty('id'); expect(response).toHaveProperty('result'); expect(response.result).toHaveProperty('content'); }); it('should handle malformed requests gracefully', async () => { const response = await fetch(`${baseUrl}/message`, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json, text/event-stream', }, body: JSON.stringify({ // Missing required jsonrpc field id: 'test', method: 'tools/call', params: { name: 'execute_sql', arguments: { sql: 'SELECT 1' } } }) }); // The server should still respond, but with an error expect(response.status).toBeLessThan(500); }); }); }); ``` -------------------------------------------------------------------------------- /src/prompts/db-explainer.ts: -------------------------------------------------------------------------------- ```typescript import { z } from "zod"; import { ConnectorManager } from "../connectors/manager.js"; import { formatPromptSuccessResponse, formatPromptErrorResponse, } from "../utils/response-formatter.js"; // Schema for database explainer prompt export const dbExplainerSchema = { schema: z.string().optional().describe("Optional database schema to use"), table: z.string().optional().describe("Optional specific table to explain"), }; /** * Database Explainer Prompt Handler * Provides explanations about database elements */ export async function dbExplainerPromptHandler( { schema, table, }: { schema?: string; table?: string; }, _extra: any ): Promise<{ messages: { role: "assistant" | "user"; content: { type: "text"; text: string; }; }[]; references?: string[]; error?: string; code?: string; _meta?: Record<string, unknown>; [key: string]: unknown; }> { try { const connector = ConnectorManager.getCurrentConnector(); // Verify schema exists if provided if (schema) { const availableSchemas = await connector.getSchemas(); if (!availableSchemas.includes(schema)) { return formatPromptErrorResponse( `Schema '${schema}' does not exist or cannot be accessed. Available schemas: ${availableSchemas.join(", ")}`, "SCHEMA_NOT_FOUND" ); } } // Get list of available tables in the specified schema const tables = await connector.getTables(schema); // Process the table parameter if provided const normalizedTable = table?.toLowerCase() || ""; // Check if table parameter matches a table in the database const matchingTable = tables.find((t) => t.toLowerCase() === normalizedTable); if (matchingTable && table) { try { // Explain the table const columns = await connector.getTableSchema(matchingTable, schema); if (columns.length === 0) { return formatPromptErrorResponse( `Table '${matchingTable}' exists but has no columns or cannot be accessed.`, "EMPTY_TABLE_SCHEMA" ); } // Create a table structure description const schemaInfo = schema ? ` in schema '${schema}'` : ""; const tableDescription = `Table: ${matchingTable}${schemaInfo} Structure: ${columns.map((col) => `- ${col.column_name} (${col.data_type})${col.is_nullable === "YES" ? ", nullable" : ""}${col.column_default ? `, default: ${col.column_default}` : ""}`).join("\n")} Purpose: This table appears to store ${determineTablePurpose(matchingTable, columns)} Relationships: ${determineRelationships(matchingTable, columns)}`; return formatPromptSuccessResponse(tableDescription); } catch (error) { return formatPromptErrorResponse( `Error retrieving schema for table '${matchingTable}': ${(error as Error).message}`, "TABLE_SCHEMA_ERROR" ); } } // Check if table parameter has a table.column format if (table && table.includes(".")) { const [tableName, columnName] = table.split("."); const tableExists = tables.find((t) => t.toLowerCase() === tableName.toLowerCase()); if (!tableExists) { // Table part of table.column doesn't exist return formatPromptErrorResponse( `Table '${tableName}' does not exist in schema '${schema || "default"}'. Available tables: ${tables.slice(0, 10).join(", ")}${tables.length > 10 ? "..." : ""}`, "TABLE_NOT_FOUND" ); } try { // Get column info const columns = await connector.getTableSchema(tableName, schema); const column = columns.find( (c) => c.column_name.toLowerCase() === columnName.toLowerCase() ); if (column) { const columnDescription = `Column: ${tableName}.${column.column_name} Type: ${column.data_type} Nullable: ${column.is_nullable === "YES" ? "Yes" : "No"} Default: ${column.column_default || "None"} Purpose: ${determineColumnPurpose(column.column_name, column.data_type)}`; return formatPromptSuccessResponse(columnDescription); } else { // Column doesn't exist in the table return formatPromptErrorResponse( `Column '${columnName}' does not exist in table '${tableName}'. Available columns: ${columns.map((c) => c.column_name).join(", ")}`, "COLUMN_NOT_FOUND" ); } } catch (error) { return formatPromptErrorResponse( `Error accessing table schema: ${(error as Error).message}`, "SCHEMA_ACCESS_ERROR" ); } } // If no specific table was provided or the table refers to the database itself, // provide a database overview // This will trigger if: // 1. No table parameter was provided // 2. The table parameter is a database overview keyword if (!table || ["database", "db", "schema", "overview", "all"].includes(normalizedTable)) { const schemaInfo = schema ? `in schema '${schema}'` : "across all schemas"; let dbOverview = `Database Overview ${schemaInfo} Tables: ${tables.length} ${tables.map((t) => `- ${t}`).join("\n")} This database ${describeDatabasePurpose(tables)}`; return formatPromptSuccessResponse(dbOverview); } // If we get here and table was provided but not found, // check for partial matches to suggest alternatives if (table && !normalizedTable.includes(".")) { // Search for partial matches const possibleTableMatches = tables.filter( (t) => t.toLowerCase().includes(normalizedTable) || normalizedTable.includes(t.toLowerCase()) ); if (possibleTableMatches.length > 0) { // Found partial matches, suggest them to the user return formatPromptSuccessResponse( `Table "${table}" not found. Did you mean one of these tables?\n\n${possibleTableMatches.join("\n")}` ); } else { // No matches at all, return a clear error with available tables const schemaInfo = schema ? `in schema '${schema}'` : "in the database"; return formatPromptErrorResponse( `Table "${table}" does not exist ${schemaInfo}. Available tables: ${tables.slice(0, 10).join(", ")}${tables.length > 10 ? "..." : ""}`, "TABLE_NOT_FOUND" ); } } } catch (error) { return formatPromptErrorResponse( `Error explaining database: ${(error as Error).message}`, "EXPLANATION_ERROR" ); } // If no condition was met and no other return was triggered return formatPromptErrorResponse( `Unable to process request for schema: ${schema}, table: ${table}`, "UNHANDLED_REQUEST" ); } /** * Helper function to make an educated guess about the purpose of a table * based on its name and columns */ function determineTablePurpose(tableName: string, columns: any[]): string { const lowerTableName = tableName.toLowerCase(); const columnNames = columns.map((c) => c.column_name.toLowerCase()); // Check for common patterns if ( lowerTableName.includes("user") || columnNames.includes("username") || columnNames.includes("email") ) { return "user information and profiles"; } if (lowerTableName.includes("order") || lowerTableName.includes("purchase")) { return "order or purchase transactions"; } if (lowerTableName.includes("product") || lowerTableName.includes("item")) { return "product or item information"; } if (lowerTableName.includes("log") || columnNames.includes("timestamp")) { return "event or activity logs"; } if (columnNames.includes("created_at") && columnNames.includes("updated_at")) { return "tracking timestamped data records"; } // Default return "data related to " + tableName; } /** * Helper function to determine potential relationships based on column names */ function determineRelationships(tableName: string, columns: any[]): string { const potentialRelationships = []; // Look for _id columns which often indicate foreign keys const idColumns = columns.filter( (c) => c.column_name.toLowerCase().endsWith("_id") && !c.column_name.toLowerCase().startsWith(tableName.toLowerCase()) ); if (idColumns.length > 0) { idColumns.forEach((col) => { const referencedTable = col.column_name.toLowerCase().replace("_id", ""); potentialRelationships.push( `May have a relationship with the "${referencedTable}" table (via ${col.column_name})` ); }); } // Check if the table itself might be referenced by others if (columns.some((c) => c.column_name.toLowerCase() === "id")) { potentialRelationships.push( `May be referenced by other tables as "${tableName.toLowerCase()}_id"` ); } return potentialRelationships.length > 0 ? potentialRelationships.join("\n") : "No obvious relationships identified based on column names"; } /** * Helper function to determine the purpose of a column based on naming patterns */ function determineColumnPurpose(columnName: string, dataType: string): string { const lowerColumnName = columnName.toLowerCase(); if (lowerColumnName === "id") { return "Primary identifier for records in this table"; } if (lowerColumnName.endsWith("_id")) { const referencedTable = lowerColumnName.replace("_id", ""); return `Foreign key reference to the "${referencedTable}" table`; } if (lowerColumnName.includes("name")) { return "Stores name information"; } if (lowerColumnName.includes("email")) { return "Stores email address information"; } if (lowerColumnName.includes("password") || lowerColumnName.includes("hash")) { return "Stores security credential information (likely hashed)"; } if (lowerColumnName === "created_at" || lowerColumnName === "created_on") { return "Timestamp for when the record was created"; } if (lowerColumnName === "updated_at" || lowerColumnName === "modified_at") { return "Timestamp for when the record was last updated"; } if (lowerColumnName.includes("date") || lowerColumnName.includes("time")) { return "Stores date or time information"; } if ( lowerColumnName.includes("price") || lowerColumnName.includes("cost") || lowerColumnName.includes("amount") ) { return "Stores monetary value information"; } // Data type specific purposes if (dataType.includes("boolean")) { return "Stores a true/false flag"; } if (dataType.includes("json")) { return "Stores structured JSON data"; } if (dataType.includes("text") || dataType.includes("varchar") || dataType.includes("char")) { return "Stores text information"; } // Default return `Stores ${dataType} data`; } /** * Helper function to describe the overall database purpose based on tables */ function describeDatabasePurpose(tables: string[]): string { const tableNames = tables.map((t) => t.toLowerCase()); if (tableNames.some((t) => t.includes("user")) && tableNames.some((t) => t.includes("order"))) { return "appears to be an e-commerce or customer order management system"; } if ( tableNames.some((t) => t.includes("patient")) || tableNames.some((t) => t.includes("medical")) ) { return "appears to be related to healthcare or medical record management"; } if ( tableNames.some((t) => t.includes("student")) || tableNames.some((t) => t.includes("course")) ) { return "appears to be related to education or student management"; } if ( tableNames.some((t) => t.includes("employee")) || tableNames.some((t) => t.includes("payroll")) ) { return "appears to be related to HR or employee management"; } if ( tableNames.some((t) => t.includes("inventory")) || tableNames.some((t) => t.includes("stock")) ) { return "appears to be related to inventory or stock management"; } // Default return "contains multiple tables that store related information"; } ``` -------------------------------------------------------------------------------- /src/connectors/postgres/index.ts: -------------------------------------------------------------------------------- ```typescript import pg from "pg"; const { Pool } = pg; import { Connector, ConnectorType, ConnectorRegistry, DSNParser, SQLResult, TableColumn, TableIndex, StoredProcedure, ExecuteOptions, } from "../interface.js"; import { SafeURL } from "../../utils/safe-url.js"; import { obfuscateDSNPassword } from "../../utils/dsn-obfuscate.js"; import { SQLRowLimiter } from "../../utils/sql-row-limiter.js"; /** * PostgreSQL DSN Parser * Handles DSN strings like: postgres://user:password@localhost:5432/dbname?sslmode=disable * Supported SSL modes: * - sslmode=disable: No SSL * - sslmode=require: SSL connection without certificate verification * - Any other value: SSL with certificate verification */ class PostgresDSNParser implements DSNParser { async parse(dsn: string): Promise<pg.PoolConfig> { // Basic validation if (!this.isValidDSN(dsn)) { const obfuscatedDSN = obfuscateDSNPassword(dsn); const expectedFormat = this.getSampleDSN(); throw new Error( `Invalid PostgreSQL DSN format.\nProvided: ${obfuscatedDSN}\nExpected: ${expectedFormat}` ); } try { // Use the SafeURL helper instead of the built-in URL // This will handle special characters in passwords, etc. const url = new SafeURL(dsn); const config: pg.PoolConfig = { host: url.hostname, port: url.port ? parseInt(url.port) : 5432, database: url.pathname ? url.pathname.substring(1) : '', // Remove leading '/' if exists user: url.username, password: url.password, }; // Handle query parameters (like sslmode, etc.) url.forEachSearchParam((value, key) => { if (key === "sslmode") { if (value === "disable") { config.ssl = false; } else if (value === "require") { config.ssl = { rejectUnauthorized: false }; } else { config.ssl = true; } } // Add other parameters as needed }); return config; } catch (error) { throw new Error( `Failed to parse PostgreSQL DSN: ${error instanceof Error ? error.message : String(error)}` ); } } getSampleDSN(): string { return "postgres://postgres:password@localhost:5432/postgres?sslmode=require"; } isValidDSN(dsn: string): boolean { try { return dsn.startsWith('postgres://') || dsn.startsWith('postgresql://'); } catch (error) { return false; } } } /** * PostgreSQL Connector Implementation */ export class PostgresConnector implements Connector { id: ConnectorType = "postgres"; name = "PostgreSQL"; dsnParser = new PostgresDSNParser(); private pool: pg.Pool | null = null; async connect(dsn: string): Promise<void> { try { const config = await this.dsnParser.parse(dsn); this.pool = new Pool(config); // Test the connection const client = await this.pool.connect(); console.error("Successfully connected to PostgreSQL database"); client.release(); } catch (err) { console.error("Failed to connect to PostgreSQL database:", err); throw err; } } async disconnect(): Promise<void> { if (this.pool) { await this.pool.end(); this.pool = null; } } async getSchemas(): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { const result = await client.query(` SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast') ORDER BY schema_name `); return result.rows.map((row) => row.schema_name); } finally { client.release(); } } async getTables(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified // 'public' is the standard default schema in PostgreSQL databases const schemaToUse = schema || "public"; const result = await client.query( ` SELECT table_name FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name `, [schemaToUse] ); return result.rows.map((row) => row.table_name); } finally { client.release(); } } async tableExists(tableName: string, schema?: string): Promise<boolean> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified const schemaToUse = schema || "public"; const result = await client.query( ` SELECT EXISTS ( SELECT FROM information_schema.tables WHERE table_schema = $1 AND table_name = $2 ) `, [schemaToUse, tableName] ); return result.rows[0].exists; } finally { client.release(); } } async getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified const schemaToUse = schema || "public"; // Query to get all indexes for the table const result = await client.query( ` SELECT i.relname as index_name, array_agg(a.attname) as column_names, ix.indisunique as is_unique, ix.indisprimary as is_primary FROM pg_class t, pg_class i, pg_index ix, pg_attribute a, pg_namespace ns WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND t.relkind = 'r' AND t.relname = $1 AND ns.oid = t.relnamespace AND ns.nspname = $2 GROUP BY i.relname, ix.indisunique, ix.indisprimary ORDER BY i.relname `, [tableName, schemaToUse] ); return result.rows.map((row) => ({ index_name: row.index_name, column_names: row.column_names, is_unique: row.is_unique, is_primary: row.is_primary, })); } finally { client.release(); } } async getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified // Tables are created in the 'public' schema by default unless otherwise specified const schemaToUse = schema || "public"; // Get table columns const result = await client.query( ` SELECT column_name, data_type, is_nullable, column_default FROM information_schema.columns WHERE table_schema = $1 AND table_name = $2 ORDER BY ordinal_position `, [schemaToUse, tableName] ); return result.rows; } finally { client.release(); } } async getStoredProcedures(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified const schemaToUse = schema || "public"; // Get stored procedures and functions from PostgreSQL const result = await client.query( ` SELECT routine_name FROM information_schema.routines WHERE routine_schema = $1 ORDER BY routine_name `, [schemaToUse] ); return result.rows.map((row) => row.routine_name); } finally { client.release(); } } async getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // In PostgreSQL, use 'public' as the default schema if none specified const schemaToUse = schema || "public"; // Get stored procedure details from PostgreSQL const result = await client.query( ` SELECT routine_name as procedure_name, routine_type, CASE WHEN routine_type = 'PROCEDURE' THEN 'procedure' ELSE 'function' END as procedure_type, external_language as language, data_type as return_type, routine_definition as definition, ( SELECT string_agg( parameter_name || ' ' || parameter_mode || ' ' || data_type, ', ' ) FROM information_schema.parameters WHERE specific_schema = $1 AND specific_name = $2 AND parameter_name IS NOT NULL ) as parameter_list FROM information_schema.routines WHERE routine_schema = $1 AND routine_name = $2 `, [schemaToUse, procedureName] ); if (result.rows.length === 0) { throw new Error(`Stored procedure '${procedureName}' not found in schema '${schemaToUse}'`); } const procedure = result.rows[0]; // If routine_definition is NULL, try to get the procedure body with pg_get_functiondef let definition = procedure.definition; try { // Get the OID for the procedure/function const oidResult = await client.query( ` SELECT p.oid, p.prosrc FROM pg_proc p JOIN pg_namespace n ON p.pronamespace = n.oid WHERE p.proname = $1 AND n.nspname = $2 `, [procedureName, schemaToUse] ); if (oidResult.rows.length > 0) { // If definition is still null, get the full definition if (!definition) { const oid = oidResult.rows[0].oid; const defResult = await client.query(`SELECT pg_get_functiondef($1)`, [oid]); if (defResult.rows.length > 0) { definition = defResult.rows[0].pg_get_functiondef; } else { // Fall back to prosrc if pg_get_functiondef fails definition = oidResult.rows[0].prosrc; } } } } catch (err) { // Ignore errors trying to get definition - it's optional console.error(`Error getting procedure definition: ${err}`); } return { procedure_name: procedure.procedure_name, procedure_type: procedure.procedure_type, language: procedure.language || "sql", parameter_list: procedure.parameter_list || "", return_type: procedure.return_type !== "void" ? procedure.return_type : undefined, definition: definition || undefined, }; } finally { client.release(); } } async executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult> { if (!this.pool) { throw new Error("Not connected to database"); } const client = await this.pool.connect(); try { // Check if this is a multi-statement query const statements = sql.split(';') .map(statement => statement.trim()) .filter(statement => statement.length > 0); if (statements.length === 1) { // Single statement - apply maxRows if applicable const processedStatement = SQLRowLimiter.applyMaxRows(statements[0], options.maxRows); return await client.query(processedStatement); } else { // Multiple statements - execute all in same session for transaction consistency let allRows: any[] = []; // Execute within a transaction to ensure session consistency await client.query('BEGIN'); try { for (let statement of statements) { // Apply maxRows limit to SELECT queries if specified const processedStatement = SQLRowLimiter.applyMaxRows(statement, options.maxRows); const result = await client.query(processedStatement); // Collect rows from SELECT/WITH/EXPLAIN statements if (result.rows && result.rows.length > 0) { allRows.push(...result.rows); } } await client.query('COMMIT'); } catch (error) { await client.query('ROLLBACK'); throw error; } return { rows: allRows }; } } finally { client.release(); } } } // Create and register the connector const postgresConnector = new PostgresConnector(); ConnectorRegistry.register(postgresConnector); ``` -------------------------------------------------------------------------------- /resources/images/logo-icon-only.svg: -------------------------------------------------------------------------------- ``` <?xml version="1.0" encoding="utf-8" ?> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="1726" height="1726"> <path transform="scale(3.37017 3.37017)" d="M254.218 57.5893C271.394 55.6769 272.54 75.2483 275.511 87.1378L295.294 165.462C299.884 183.301 307.629 197.039 328.502 199.183C334.057 199.754 339.715 198.929 345.246 198.378L412.049 191.183C425.378 189.601 438.767 187.757 452.145 186.658C468.778 185.292 477.924 203.888 465.058 215.324C461.545 218.446 456.446 220.607 452.34 222.893L433.42 233.654C409.633 247.433 383.743 265.504 362.153 282.306C335.092 303.364 338.726 310.498 351.273 342.507C360.435 365.88 370.358 388.992 380.434 411.991C385.123 422.693 395.262 438.162 385.686 449.038C372.682 463.808 357.348 444.473 348.315 436.069C330.537 419.527 278.769 366.218 257.218 367.308C243.678 361.753 220.536 385.323 211.706 392.946C192.763 409.301 174.154 426.001 155.998 443.222C152.993 446.072 149.925 449.376 146.372 451.53C135.603 458.059 120.717 450.01 122.345 436.978C122.781 433.48 124.842 429.998 126.198 426.784L143.092 386.939C148.482 374.162 153.896 361.423 159.017 348.53L165.816 331.022C171.383 316.316 173.417 305.043 161.809 291.308C147.414 274.274 77.2887 232.452 57.6713 221.362C50.2817 217.184 41.9927 214.05 40.663 204.506C39.4719 195.956 45.9445 187.566 54.5122 186.467C59.8027 185.789 73.9458 188.172 80.0187 188.887L158.966 197.829C181 200.286 199.279 201.349 212.689 178.85C216.312 172.771 217.358 166.288 218.867 159.511L235.986 86.0643C238.769 74.094 239.159 59.403 254.218 57.5893Z"/> <path fill="#FD8D31" transform="scale(3.37017 3.37017)" d="M128.679 438.854C129.843 434.72 132.026 430.714 133.69 426.744L151.444 385.252C159.251 366.672 168.271 345.946 174.752 326.903C179.159 313.954 176.797 300.279 168.956 289.184C156.929 272.165 89.5353 231.76 71.2351 221.298C66.6739 218.69 50.3012 210.101 48.5148 206.191C46.3001 201.344 48.6188 194.222 54.3268 193.23C58.3095 192.538 75.2131 195.312 80.2394 195.943L159.41 205.008C183.62 207.554 205.326 206.532 219.076 181.689C223.022 174.56 224.443 166.481 226.263 158.63L230.959 138.031C234.847 120.83 238.661 103.575 242.918 86.464C245.024 78.0039 245.663 59.2654 260.185 65.3291C266.746 69.5161 266.869 80.971 268.774 88.3972L287.296 162.02C289.244 169.794 290.747 177.7 294.78 184.737C306.318 204.874 324.863 207.248 345.284 205.09L435.232 195.685C440.299 195.173 453.658 192.907 457.726 193.917C463.485 195.348 465.613 202.997 462.601 207.725C460.243 211.426 444.677 219.051 440.304 221.589C422.184 232.106 353.665 274.442 341.87 291.207C328.523 310.18 338.108 327.74 345.485 346.332C354.151 368.169 363.222 389.841 372.623 411.367L379.697 427.504C382.7 434.386 387.272 445.297 374.948 447.426C369.533 448.362 362.597 439.929 359.11 436.705L327.814 407.663C312.391 393.306 296.527 378.648 278.702 367.308C263.115 359.278 248.666 355.809 233.635 366.23C222.889 373.68 210.985 384.81 200.988 393.604C187.077 405.84 173.076 417.862 159.555 430.535C152.284 437.349 135.402 458.902 128.679 438.854Z"/> <path fill="#F55E24" transform="scale(3.37017 3.37017)" d="M128.679 438.854C129.843 434.72 132.026 430.714 133.69 426.744L151.444 385.252C159.251 366.672 168.271 345.946 174.752 326.903C179.159 313.954 176.797 300.279 168.956 289.184C156.929 272.165 89.5353 231.76 71.2351 221.298C66.6739 218.69 50.3012 210.101 48.5148 206.191C46.3001 201.344 48.6188 194.222 54.3268 193.23C58.3095 192.538 75.2131 195.312 80.2394 195.943L159.41 205.008C183.62 207.554 205.326 206.532 219.076 181.689C223.022 174.56 224.443 166.481 226.263 158.63L230.959 138.031C234.847 120.83 238.661 103.575 242.918 86.464C245.024 78.0039 245.663 59.2654 260.185 65.3291C258.694 72.6523 255.427 87.0523 254.961 94.1515C245.625 97.2213 244.814 106.899 251.781 112.618C250.683 115.05 247.36 126.823 248.272 128.937C242.859 133.294 241.835 138.071 243.938 144.466C242.729 146.635 237.711 172.197 236.832 176.25C233.107 193.426 230.169 208.706 211.605 215.254C195.421 220.963 165.798 214.52 148.819 211.572C145.716 211.033 122.203 205.926 120.744 208.802C126.902 212.518 128.4 220.072 123.81 225.802C124.799 227.092 132.843 230.927 134.603 230.638L134.837 230.302C137.019 241.771 147.78 245.323 156.716 237.444C158.114 238.968 171.257 246.089 172.958 245.471C176.478 251.699 178.359 250.829 183.447 252.937C184.27 255.93 189.006 258.655 191.123 260.923C203.085 273.735 207.503 291.18 200.797 307.642C195.6 320.403 185.921 331.387 179.91 343.939C178.465 346.957 166.545 374.201 167.808 376.509C165.229 379.996 164.088 383.07 164.911 387.349C162.952 389.167 161.488 396.264 159.058 399.323L159.069 399.641L160.386 399.904C164.72 400.733 167.29 402.169 169.163 406.133C172.706 403.402 177.469 397.673 181.141 394.343C184.207 391.561 187.74 389.291 190.902 386.602C211.079 369.446 233.151 344.441 262.273 356.373C266.72 358.194 276.837 362.856 278.702 367.308C263.115 359.278 248.666 355.809 233.635 366.23C222.889 373.68 210.985 384.81 200.988 393.604C187.077 405.84 173.076 417.862 159.555 430.535C152.284 437.349 135.402 458.902 128.679 438.854Z"/> <path transform="scale(3.37017 3.37017)" d="M123.81 225.802C120.192 228.905 115.27 230.658 110.623 228.739C97.3721 223.266 106.039 201.28 120.744 208.802C126.902 212.518 128.4 220.072 123.81 225.802Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M113.582 211.793C122.878 211.04 124.738 221.388 116.656 224.681C107.385 225.491 105.746 215.161 113.582 211.793Z"/> <path transform="scale(3.37017 3.37017)" d="M160.386 399.904C164.72 400.733 167.29 402.169 169.163 406.133C170.345 415.432 162.49 424.4 154.345 418.198C147.229 412.78 150.881 400.54 160.386 399.904Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M160.386 404.507C167.432 404.743 165.93 413.303 160.84 415.456C153.817 416.64 154.106 406.554 160.386 404.507Z"/> <path transform="scale(3.37017 3.37017)" d="M81.2319 200.032C94.4824 199.513 97.401 215.81 84.8788 219.423C71.6412 222.251 68.8434 203.193 81.2319 200.032Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M82.0675 204.52C89.5931 204.287 90.3433 212.812 84.0014 215.317C76.4218 215.821 75.7216 206.921 82.0675 204.52Z"/> <path transform="scale(3.37017 3.37017)" d="M172.958 245.471C168.569 231.986 180.189 222.032 191.606 228.114C204.617 235.045 197.944 253.887 183.447 252.937C178.359 250.829 176.478 251.699 172.958 245.471Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M183.447 231.229C195.032 229.114 197.396 245.746 186.605 248.485C174.966 249.156 172.2 233.935 183.447 231.229Z"/> <path transform="scale(3.37017 3.37017)" d="M134.837 230.302C134.436 207.522 170.976 216.896 156.716 237.444C147.78 245.323 137.019 241.771 134.837 230.302Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M146.741 221.289C157.687 221.279 158.046 234.484 148.062 237.444C137.68 237.077 136.363 223.803 146.741 221.289Z"/> <path transform="scale(3.37017 3.37017)" d="M248.272 128.937C259.864 122.951 271.645 132.32 267.345 143.497C263.287 154.044 249.351 153.017 243.938 144.466C241.835 138.071 242.859 133.294 248.272 128.937Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M253.602 131.553C264.29 130.46 267.592 143.105 257.218 146.617C247.185 147.799 243.64 134.972 253.602 131.553Z"/> <path transform="scale(3.37017 3.37017)" d="M167.808 376.509C186.465 361.929 192.265 398.337 172.403 394.166C168.692 393.386 166.436 390.594 164.911 387.349C164.088 383.07 165.229 379.996 167.808 376.509Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M175.458 378.289C182.179 378.368 183.539 387.689 175.458 390.334C167.325 390.219 166.529 380.489 175.458 378.289Z"/> <path transform="scale(3.37017 3.37017)" d="M254.961 94.1515C274.48 90.3718 270.386 122.153 251.781 112.618C244.814 106.899 245.625 97.2213 254.961 94.1515Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M257.218 98.5842C261.322 98.8268 262.358 100.553 263.379 104.431C262.444 107.456 261.392 109.009 258.073 109.733C250.274 109.883 249.68 99.8028 257.218 98.5842Z"/> <path transform="scale(3.37017 3.37017)" d="M220.337 242.087C234.571 236.695 246.057 264.337 224.515 270.263C202.975 271.058 202.904 243.408 220.337 242.087Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M220.337 246.783C232.189 243.105 237.056 261.195 224.515 265.519C210.695 267.904 207.419 248.962 220.337 246.783Z"/> <path transform="scale(3.37017 3.37017)" d="M254.218 208.023C256.404 206.512 260.392 207.787 262.506 208.89C274.603 215.202 271.686 231.897 258.073 235.831C238.43 238.855 234.863 207.989 254.218 208.023Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M254.218 211.793C267.272 211.256 269.682 227.847 257.218 231.229C243.991 232.795 241.667 214.917 254.218 211.793Z"/> <path transform="scale(3.37017 3.37017)" d="M286.716 242.087C304.499 240.139 308.829 266.307 290.29 270.263C269.843 271.053 267.795 244.847 286.716 242.087Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M286.716 246.783C298.801 245.49 302.05 261.574 290.29 265.519C277.21 267.497 273.392 250.215 286.716 246.783Z"/> <path transform="scale(3.37017 3.37017)" d="M229.475 285.32C251.081 287.455 248.211 310.226 233.006 313.199C215.396 314.413 209.73 291.958 229.475 285.32Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M229.475 290.551C241.836 289.75 244.195 304.57 233.006 308.536C220.418 310.273 217.025 293.587 229.475 290.551Z"/> <path transform="scale(3.37017 3.37017)" d="M278.217 285.32C295.473 284.095 299.853 307.798 282.178 312.186C263.316 314.789 259.234 288.092 278.217 285.32Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M278.217 289.793C289.964 289.013 292.951 303.076 282.178 307.605C268.937 308.925 265.976 293.469 278.217 289.793Z"/> <path transform="scale(3.37017 3.37017)" d="M325.556 230.302C327.808 229.278 331.813 231.475 333.529 232.844C343.52 240.808 337.282 255.342 325.556 256.34C306.916 256.153 306.956 230.547 325.556 230.302Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M323.846 234.788C335.462 233.63 336.894 248.528 326.818 251.688C315.296 253.664 311.989 238.071 323.846 234.788Z"/> <path transform="scale(3.37017 3.37017)" d="M254.218 163.805C272.189 162.481 275.373 184.857 258.943 189.299C241.66 191.811 236.859 168.488 254.218 163.805Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M254.961 168.287C266.149 167.698 268.61 181.915 258.073 184.807C246.411 185.755 243.759 171.423 254.961 168.287Z"/> <path transform="scale(3.37017 3.37017)" d="M208.3 319.345C224.317 320.298 224.324 342.747 208.3 343.829C192.019 343.756 189.857 320.834 208.3 319.345Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M206.072 323.566C216.489 322.726 220.013 335.518 209.319 339.369C198.414 340.457 196.156 327.32 206.072 323.566Z"/> <path transform="scale(3.37017 3.37017)" d="M302.592 319.345C318.341 318.177 321.79 340.072 305.194 343.829C288.526 343.866 286.041 321.806 302.592 319.345Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M303.383 323.566C313.6 324.03 314.762 336.316 305.194 339.369C293.925 339.039 292.925 325.568 303.383 323.566Z"/> <path transform="scale(3.37017 3.37017)" d="M254.218 260.832C269.91 259.811 273.26 280.946 257.218 285.32C240.632 286.406 237.547 263.035 254.218 260.832Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M254.961 265.519C264.049 264.222 266.296 277.959 256.648 280.756C245.961 280.906 244.025 267.38 254.961 265.519Z"/> <path transform="scale(3.37017 3.37017)" d="M353.69 221.289C367.23 218.813 374.012 240.198 356.1 245.471C338.48 244.15 338.66 222.674 353.69 221.289Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M353.69 225.802C363.729 224.935 365.661 238.305 356.1 240.434C346.144 241.246 343.812 229.245 353.69 225.802Z"/> <path transform="scale(3.37017 3.37017)" d="M320.007 346.795C335.795 345.639 338.265 366.031 323.846 369.465C309.207 371.849 304.931 350.352 320.007 346.795Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M320.901 351.553C330.747 351.303 331.471 362.536 322.894 365.121C313.635 365.623 312.185 353.699 320.901 351.553Z"/> <path transform="scale(3.37017 3.37017)" d="M189.426 345.81C202.095 344.264 207.386 363.619 192.228 368.086C177.656 370.311 172.849 348.486 189.426 345.81Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M189.426 350.31C197.807 350.382 199.953 360.231 192.228 363.718C182.294 365.326 180.35 352.439 189.426 350.31Z"/> <path transform="scale(3.37017 3.37017)" d="M381.943 213.792C397.351 212.902 400.265 230.436 386.409 235.831C371.33 236.875 367.895 218.105 381.943 213.792Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M383.387 218.059C392.487 218.77 392.912 228.856 384.802 231.229C376.13 231.467 375.25 219.935 383.387 218.059Z"/> <path transform="scale(3.37017 3.37017)" d="M335.987 375.31C349.382 374.117 351.073 391.56 338.494 394.505C325.002 395.546 322.662 377.936 335.987 375.31Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M335.987 379.776C343.115 378.749 344.033 387.391 338.494 390.334C331.336 390.898 328.983 382.608 335.987 379.776Z"/> <path transform="scale(3.37017 3.37017)" d="M409.503 208.802C421.599 207.954 424.356 223.488 412.779 227.01C400.21 228.762 397.155 211.746 409.503 208.802Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M410.369 213.792C416.604 213.292 417.502 220.572 411.682 222.692C405.388 222.866 403.965 214.971 410.369 213.792Z"/> <path transform="scale(3.37017 3.37017)" d="M351.509 400.782C363.61 399.999 366.545 415.826 354.825 418.946C341.645 419.89 339.897 403.753 351.509 400.782Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M352.292 405.258C358.624 405.033 359.119 412.574 353.69 414.528C347.125 414.8 346.894 407.664 352.292 405.258Z"/> <path transform="scale(3.37017 3.37017)" d="M432.748 203.591C443.007 201.734 447.103 215.722 435.865 219.423C425.245 219.591 421.212 205.704 432.748 203.591Z"/> <path fill="#7BBCDE" transform="scale(3.37017 3.37017)" d="M432.748 208.023C437.64 207.603 439.89 211.851 435.865 215.317C431.352 214.911 428.02 210.741 432.748 208.023Z"/> </svg> ``` -------------------------------------------------------------------------------- /src/connectors/sqlite/index.ts: -------------------------------------------------------------------------------- ```typescript /** * SQLite Connector Implementation * * Implements SQLite database connectivity for DBHub using better-sqlite3 * To use this connector: Set DSN=sqlite:///path/to/database.db in your .env file */ import { Connector, ConnectorType, ConnectorRegistry, DSNParser, SQLResult, TableColumn, TableIndex, StoredProcedure, ExecuteOptions, } from "../interface.js"; import Database from "better-sqlite3"; import { SafeURL } from "../../utils/safe-url.js"; import { obfuscateDSNPassword } from "../../utils/dsn-obfuscate.js"; import { SQLRowLimiter } from "../../utils/sql-row-limiter.js"; /** * SQLite DSN Parser * Handles DSN strings like: * - sqlite:///path/to/database.db (absolute path) * - sqlite://./relative/path/to/database.db (relative path) * - sqlite:///:memory: (in-memory database) */ class SQLiteDSNParser implements DSNParser { async parse(dsn: string): Promise<{ dbPath: string }> { // Basic validation if (!this.isValidDSN(dsn)) { const obfuscatedDSN = obfuscateDSNPassword(dsn); const expectedFormat = this.getSampleDSN(); throw new Error( `Invalid SQLite DSN format.\nProvided: ${obfuscatedDSN}\nExpected: ${expectedFormat}` ); } try { // Use SafeURL helper to handle special characters properly const url = new SafeURL(dsn); let dbPath: string; // Handle in-memory database if (url.hostname === "" && url.pathname === "/:memory:") { dbPath = ":memory:"; } // Handle file paths else { // Get the path part, handling both relative and absolute paths if (url.pathname.startsWith("//")) { // Absolute path: sqlite:///path/to/db.sqlite dbPath = url.pathname.substring(2); // Remove leading // } else { // Relative path: sqlite://./path/to/db.sqlite dbPath = url.pathname; } } return { dbPath }; } catch (error) { throw new Error( `Failed to parse SQLite DSN: ${error instanceof Error ? error.message : String(error)}` ); } } getSampleDSN(): string { return "sqlite:///path/to/database.db"; } isValidDSN(dsn: string): boolean { try { return dsn.startsWith('sqlite://'); } catch (error) { return false; } } } interface SQLiteTableInfo { name: string; type: string; notnull: number; dflt_value: string | null; pk: number; } interface SQLiteTableNameRow { name: string; } export class SQLiteConnector implements Connector { id: ConnectorType = "sqlite"; name = "SQLite"; dsnParser = new SQLiteDSNParser(); private db: Database.Database | null = null; private dbPath: string = ":memory:"; // Default to in-memory database async connect(dsn: string, initScript?: string): Promise<void> { const config = await this.dsnParser.parse(dsn); this.dbPath = config.dbPath; try { this.db = new Database(this.dbPath); console.error("Successfully connected to SQLite database"); // If an initialization script is provided, run it if (initScript) { this.db.exec(initScript); console.error("Successfully initialized database with script"); } } catch (error) { console.error("Failed to connect to SQLite database:", error); throw error; } } async disconnect(): Promise<void> { if (this.db) { try { // Check if the database is still open before attempting to close if (!this.db.inTransaction) { this.db.close(); } else { // If in transaction, try to rollback first try { this.db.exec('ROLLBACK'); } catch (rollbackError) { // Ignore rollback errors, proceed with close } this.db.close(); } this.db = null; } catch (error) { // Log the error but don't throw to prevent test failures console.error('Error during SQLite disconnect:', error); this.db = null; } } return Promise.resolve(); } async getSchemas(): Promise<string[]> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // SQLite doesn't have the concept of schemas like PostgreSQL or MySQL // It has a concept of "attached databases" where each database has a name // The default database is called 'main', and others can be attached with names // We always return 'main' as the default schema name return ["main"]; } async getTables(schema?: string): Promise<string[]> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // In SQLite, schema parameter is ignored since SQLite doesn't have schemas like PostgreSQL // SQLite has a single namespace for tables within a database file // You could use 'schema.table' syntax if you have attached databases, but we're // accessing the 'main' database by default try { const rows = this.db .prepare( ` SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name ` ) .all() as SQLiteTableNameRow[]; return rows.map((row) => row.name); } catch (error) { throw error; } } async tableExists(tableName: string, schema?: string): Promise<boolean> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // In SQLite, schema parameter is ignored since there's only one schema per database file // All tables exist in a single namespace within the SQLite database try { const row = this.db .prepare( ` SELECT name FROM sqlite_master WHERE type='table' AND name = ? ` ) .get(tableName) as SQLiteTableNameRow | undefined; return !!row; } catch (error) { throw error; } } async getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // In SQLite, schema parameter is ignored (no schema concept) try { // Get all indexes for the specified table const indexInfoRows = this.db .prepare( ` SELECT name as index_name, 0 as is_unique FROM sqlite_master WHERE type = 'index' AND tbl_name = ? ` ) .all(tableName) as { index_name: string; is_unique: number }[]; // Get unique info from PRAGMA index_list which provides the unique flag const indexListRows = this.db .prepare(`PRAGMA index_list(${tableName})`) .all() as { name: string; unique: number }[]; // Create a map of index names to unique status const indexUniqueMap = new Map<string, boolean>(); for (const indexListRow of indexListRows) { indexUniqueMap.set(indexListRow.name, indexListRow.unique === 1); } // Get the primary key info const tableInfo = this.db .prepare(`PRAGMA table_info(${tableName})`) .all() as SQLiteTableInfo[]; // Find primary key columns const pkColumns = tableInfo.filter((col) => col.pk > 0).map((col) => col.name); const results: TableIndex[] = []; // Add regular indexes for (const indexInfo of indexInfoRows) { // Get the columns for this index const indexDetailRows = this.db .prepare(`PRAGMA index_info(${indexInfo.index_name})`) .all() as { name: string; }[]; const columnNames = indexDetailRows.map((row) => row.name); results.push({ index_name: indexInfo.index_name, column_names: columnNames, is_unique: indexUniqueMap.get(indexInfo.index_name) || false, is_primary: false, }); } // Add primary key if it exists if (pkColumns.length > 0) { results.push({ index_name: "PRIMARY", column_names: pkColumns, is_unique: true, is_primary: true, }); } return results; } catch (error) { throw error; } } async getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // In SQLite, schema parameter is ignored for the following reasons: // 1. SQLite doesn't have schemas in the same way as PostgreSQL or MySQL // 2. Each SQLite database file is its own separate namespace // 3. The PRAGMA commands operate on the current database connection try { const rows = this.db.prepare(`PRAGMA table_info(${tableName})`).all() as SQLiteTableInfo[]; // Convert SQLite schema format to our standard TableColumn format const columns = rows.map((row) => ({ column_name: row.name, data_type: row.type, // In SQLite, primary key columns are automatically NOT NULL even if notnull=0 is_nullable: (row.notnull === 1 || row.pk > 0) ? "NO" : "YES", column_default: row.dflt_value, })); return columns; } catch (error) { throw error; } } async getStoredProcedures(schema?: string): Promise<string[]> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // SQLite doesn't have built-in stored procedures like other databases. // While SQLite does support user-defined functions, these are registered through // the C/C++ API or language bindings and cannot be introspected through SQL. // Triggers exist in SQLite but they're not the same as stored procedures. // // We return an empty array because: // 1. SQLite has no native stored procedure concept // 2. User-defined functions cannot be listed via SQL queries // 3. We don't want to misrepresent triggers as stored procedures return []; } async getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure> { if (!this.db) { throw new Error("Not connected to SQLite database"); } // SQLite doesn't have true stored procedures: // 1. SQLite doesn't support the CREATE PROCEDURE syntax // 2. User-defined functions are created programmatically, not stored in the DB // 3. Cannot introspect program-defined functions through SQL // Throw an error since SQLite doesn't support stored procedures throw new Error( "SQLite does not support stored procedures. Functions are defined programmatically through the SQLite API, not stored in the database." ); } async executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult> { if (!this.db) { throw new Error("Not connected to SQLite database"); } try { // Check if this is a multi-statement query const statements = sql.split(';') .map(statement => statement.trim()) .filter(statement => statement.length > 0); if (statements.length === 1) { // Single statement - determine if it returns data let processedStatement = statements[0]; const trimmedStatement = statements[0].toLowerCase().trim(); const isReadStatement = trimmedStatement.startsWith('select') || trimmedStatement.startsWith('with') || trimmedStatement.startsWith('explain') || trimmedStatement.startsWith('analyze') || (trimmedStatement.startsWith('pragma') && (trimmedStatement.includes('table_info') || trimmedStatement.includes('index_info') || trimmedStatement.includes('index_list') || trimmedStatement.includes('foreign_key_list'))); // Apply maxRows limit to SELECT queries if specified (not PRAGMA/ANALYZE) if (options.maxRows) { processedStatement = SQLRowLimiter.applyMaxRows(processedStatement, options.maxRows); } if (isReadStatement) { const rows = this.db.prepare(processedStatement).all(); return { rows }; } else { // Use run() for statements that don't return data this.db.prepare(processedStatement).run(); return { rows: [] }; } } else { // Multiple statements - use native .exec() for optimal performance // Note: .exec() doesn't return results, so we need to handle SELECT statements differently const readStatements = []; const writeStatements = []; // Separate read and write operations for (const statement of statements) { const trimmedStatement = statement.toLowerCase().trim(); if (trimmedStatement.startsWith('select') || trimmedStatement.startsWith('with') || trimmedStatement.startsWith('explain') || trimmedStatement.startsWith('analyze') || (trimmedStatement.startsWith('pragma') && (trimmedStatement.includes('table_info') || trimmedStatement.includes('index_info') || trimmedStatement.includes('index_list') || trimmedStatement.includes('foreign_key_list')))) { readStatements.push(statement); } else { writeStatements.push(statement); } } // Execute write statements using native .exec() for optimal performance if (writeStatements.length > 0) { this.db.exec(writeStatements.join('; ')); } // Execute read statements individually to collect results let allRows: any[] = []; for (let statement of readStatements) { // Apply maxRows limit to SELECT queries if specified statement = SQLRowLimiter.applyMaxRows(statement, options.maxRows); const result = this.db.prepare(statement).all(); allRows.push(...result); } return { rows: allRows }; } } catch (error) { throw error; } } } // Register the SQLite connector const sqliteConnector = new SQLiteConnector(); ConnectorRegistry.register(sqliteConnector); ``` -------------------------------------------------------------------------------- /src/connectors/__tests__/mysql.integration.test.ts: -------------------------------------------------------------------------------- ```typescript import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { MySqlContainer, StartedMySqlContainer } from '@testcontainers/mysql'; import { MySQLConnector } from '../mysql/index.js'; import { IntegrationTestBase, type TestContainer, type DatabaseTestConfig } from './shared/integration-test-base.js'; import type { Connector } from '../interface.js'; class MySQLTestContainer implements TestContainer { constructor(private container: StartedMySqlContainer) {} getConnectionUri(): string { return this.container.getConnectionUri(); } async stop(): Promise<void> { await this.container.stop(); } } class MySQLIntegrationTest extends IntegrationTestBase<MySQLTestContainer> { constructor() { const config: DatabaseTestConfig = { expectedSchemas: ['testdb', 'information_schema'], expectedTables: ['users', 'orders', 'products'], supportsStoredProcedures: false // Disabled due to container privilege restrictions }; super(config); } async createContainer(): Promise<MySQLTestContainer> { const container = await new MySqlContainer('mysql:8.0') .withDatabase('testdb') .withRootPassword('rootpass') .start(); return new MySQLTestContainer(container); } createConnector(): Connector { return new MySQLConnector(); } createSSLTests(): void { describe('SSL Connection Tests', () => { it('should handle SSL mode disable connection', async () => { const baseUri = this.connectionString; const sslDisabledUri = baseUri.includes('?') ? `${baseUri}&sslmode=disable` : `${baseUri}?sslmode=disable`; const sslDisabledConnector = new MySQLConnector(); // Should connect successfully with sslmode=disable await expect(sslDisabledConnector.connect(sslDisabledUri)).resolves.not.toThrow(); // Check SSL status - cipher should be empty when SSL is disabled const result = await sslDisabledConnector.executeSQL("SHOW SESSION STATUS LIKE 'Ssl_cipher'", {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].Variable_name).toBe('Ssl_cipher'); expect(result.rows[0].Value).toBe(''); await sslDisabledConnector.disconnect(); }); it('should handle SSL mode require connection', async () => { const baseUri = this.connectionString; const sslRequiredUri = baseUri.includes('?') ? `${baseUri}&sslmode=require` : `${baseUri}?sslmode=require`; const sslRequiredConnector = new MySQLConnector(); // In test containers, SSL may not be supported, so we expect either success or SSL not supported error try { await sslRequiredConnector.connect(sslRequiredUri); // If connection succeeds, check SSL status - cipher should be non-empty when SSL is enabled const result = await sslRequiredConnector.executeSQL("SHOW SESSION STATUS LIKE 'Ssl_cipher'", {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].Variable_name).toBe('Ssl_cipher'); expect(result.rows[0].Value).not.toBe(''); expect(result.rows[0].Value).toBeTruthy(); await sslRequiredConnector.disconnect(); } catch (error) { // If SSL is not supported by the test container, that's expected expect(error instanceof Error).toBe(true); expect((error as Error).message).toMatch(/SSL|does not support SSL/); } }); }); } async setupTestData(connector: Connector): Promise<void> { // Create users table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS users ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100) NOT NULL, email VARCHAR(100) UNIQUE NOT NULL, age INT ) `, {}); // Create orders table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS orders ( id INT AUTO_INCREMENT PRIMARY KEY, user_id INT, total DECIMAL(10,2), created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users(id) ) `, {}); // Create products table in main database await connector.executeSQL(` CREATE TABLE IF NOT EXISTS products ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100) NOT NULL, price DECIMAL(10,2) ) `, {}); // Insert test data await connector.executeSQL(` INSERT IGNORE INTO users (name, email, age) VALUES ('John Doe', '[email protected]', 30), ('Jane Smith', '[email protected]', 25), ('Bob Johnson', '[email protected]', 35) `, {}); await connector.executeSQL(` INSERT IGNORE INTO orders (user_id, total) VALUES (1, 99.99), (1, 149.50), (2, 75.25) `, {}); await connector.executeSQL(` INSERT IGNORE INTO products (name, price) VALUES ('Widget A', 19.99), ('Widget B', 29.99) `, {}); // Note: Stored procedures/functions are skipped in tests due to container privilege restrictions } } // Create the test suite const mysqlTest = new MySQLIntegrationTest(); describe('MySQL Connector Integration Tests', () => { beforeAll(async () => { await mysqlTest.setup(); }, 120000); afterAll(async () => { await mysqlTest.cleanup(); }); // Include all common tests mysqlTest.createConnectionTests(); mysqlTest.createSchemaTests(); mysqlTest.createTableTests(); mysqlTest.createSQLExecutionTests(); if (mysqlTest.config.supportsStoredProcedures) { mysqlTest.createStoredProcedureTests(); } mysqlTest.createErrorHandlingTests(); mysqlTest.createSSLTests(); describe('MySQL-specific Features', () => { it('should execute multiple statements with native support', async () => { // First insert the test data await mysqlTest.connector.executeSQL(` INSERT INTO users (name, email, age) VALUES ('Multi User 1', '[email protected]', 30); INSERT INTO users (name, email, age) VALUES ('Multi User 2', '[email protected]', 35); `, {}); // Then check the count const result = await mysqlTest.connector.executeSQL( "SELECT COUNT(*) as total FROM users WHERE email LIKE 'multi%'", {} ); expect(result.rows).toHaveLength(1); expect(Number(result.rows[0].total)).toBe(2); }); it('should handle MySQL-specific data types', async () => { await mysqlTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS mysql_types_test ( id INT AUTO_INCREMENT PRIMARY KEY, json_data JSON, timestamp_val TIMESTAMP DEFAULT CURRENT_TIMESTAMP, enum_val ENUM('small', 'medium', 'large') DEFAULT 'medium' ) `, {}); await mysqlTest.connector.executeSQL(` INSERT INTO mysql_types_test (json_data, enum_val) VALUES ('{"key": "value"}', 'large') `, {}); const result = await mysqlTest.connector.executeSQL( 'SELECT * FROM mysql_types_test WHERE id = LAST_INSERT_ID()', {} ); expect(result.rows).toHaveLength(1); expect(result.rows[0].enum_val).toBe('large'); expect(result.rows[0].json_data).toBeDefined(); }); it('should handle MySQL auto-increment properly', async () => { const insertResult = await mysqlTest.connector.executeSQL( "INSERT INTO users (name, email, age) VALUES ('Auto Inc Test', '[email protected]', 40)", {} ); expect(insertResult).toBeDefined(); const selectResult = await mysqlTest.connector.executeSQL( 'SELECT LAST_INSERT_ID() as last_id', {} ); expect(selectResult.rows).toHaveLength(1); expect(Number(selectResult.rows[0].last_id)).toBeGreaterThan(0); }); it('should work with MySQL-specific functions', async () => { const result = await mysqlTest.connector.executeSQL(` SELECT VERSION() as mysql_version, DATABASE() as current_db, USER() as current_user_info, NOW() as timestamp_val `, {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].mysql_version).toBeDefined(); expect(result.rows[0].current_db).toBe('testdb'); expect(result.rows[0].current_user_info).toBeDefined(); expect(result.rows[0].timestamp_val).toBeDefined(); }); it('should handle MySQL transactions correctly', async () => { // Test explicit transaction await mysqlTest.connector.executeSQL(` START TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Transaction Test 1', '[email protected]', 45); INSERT INTO users (name, email, age) VALUES ('Transaction Test 2', '[email protected]', 50); COMMIT; `, {}); const result = await mysqlTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email LIKE 'trans%@example.com'", {} ); expect(Number(result.rows[0].count)).toBe(2); }); it('should handle MySQL rollback correctly', async () => { // Get initial count const beforeResult = await mysqlTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email = '[email protected]'", {} ); const beforeCount = Number(beforeResult.rows[0].count); // Test rollback await mysqlTest.connector.executeSQL(` START TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Rollback Test', '[email protected]', 55); ROLLBACK; `, {}); const afterResult = await mysqlTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email = '[email protected]'", {} ); const afterCount = Number(afterResult.rows[0].count); expect(afterCount).toBe(beforeCount); }); it('should respect maxRows limit for SELECT queries', async () => { // Test basic SELECT with maxRows limit const result = await mysqlTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', { maxRows: 2 } ); expect(result.rows).toHaveLength(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[1]).toHaveProperty('name'); }); it('should respect existing LIMIT clause when lower than maxRows', async () => { // Test when existing LIMIT is lower than maxRows const result = await mysqlTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 1', { maxRows: 3 } ); expect(result.rows).toHaveLength(1); expect(result.rows[0]).toHaveProperty('name'); }); it('should use maxRows when existing LIMIT is higher', async () => { // Test when existing LIMIT is higher than maxRows const result = await mysqlTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 10', { maxRows: 2 } ); expect(result.rows).toHaveLength(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[1]).toHaveProperty('name'); }); it('should not affect non-SELECT queries', async () => { // Test that maxRows doesn't affect INSERT/UPDATE/DELETE const insertResult = await mysqlTest.connector.executeSQL( "INSERT INTO users (name, email, age) VALUES ('MaxRows Test', '[email protected]', 25)", { maxRows: 1 } ); expect(insertResult.rows).toHaveLength(0); // INSERTs don't return rows by default // Verify the insert worked const selectResult = await mysqlTest.connector.executeSQL( "SELECT * FROM users WHERE email = '[email protected]'", {} ); expect(selectResult.rows).toHaveLength(1); expect(selectResult.rows[0].name).toBe('MaxRows Test'); }); it('should handle maxRows with complex queries', async () => { // Test maxRows with JOIN queries const result = await mysqlTest.connector.executeSQL(` SELECT u.name, o.total FROM users u JOIN orders o ON u.id = o.user_id ORDER BY o.total DESC `, { maxRows: 2 }); expect(result.rows.length).toBeLessThanOrEqual(2); expect(result.rows.length).toBeGreaterThan(0); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[0]).toHaveProperty('total'); }); it('should not apply maxRows to CTE queries (WITH clause)', async () => { // Test that maxRows is not applied to CTE queries (WITH clause) try { const result = await mysqlTest.connector.executeSQL(` WITH user_summary AS ( SELECT name, age FROM users WHERE age IS NOT NULL ) SELECT * FROM user_summary ORDER BY age `, { maxRows: 2 }); // Should return all rows since WITH queries are not limited expect(result.rows.length).toBeGreaterThan(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[0]).toHaveProperty('age'); } catch (error) { // Some MySQL versions might not support CTE, that's okay console.log('CTE not supported in this MySQL version, skipping test'); } }); it('should handle maxRows with multiple SELECT statements', async () => { // Test maxRows with multiple SELECT statements only const result = await mysqlTest.connector.executeSQL(` SELECT name FROM users WHERE age > 20 ORDER BY name LIMIT 10; SELECT name FROM users WHERE age > 25 ORDER BY name LIMIT 10; `, { maxRows: 1 }); // Should return only 1 row from each SELECT statement (due to maxRows limit) // MySQL multi-statement may return more complex results, so we check that maxRows was applied expect(result.rows.length).toBeGreaterThan(0); expect(result.rows.length).toBeLessThanOrEqual(2); // At most 1 from each SELECT if (result.rows.length > 0) { expect(result.rows[0]).toHaveProperty('name'); } }); it('should ignore maxRows when not specified', async () => { // Test without maxRows - should return all rows const result = await mysqlTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', {} ); // Should return all users (at least the original 3 plus any added in previous tests) expect(result.rows.length).toBeGreaterThanOrEqual(3); }); }); }); ``` -------------------------------------------------------------------------------- /src/config/env.ts: -------------------------------------------------------------------------------- ```typescript import dotenv from "dotenv"; import path from "path"; import fs from "fs"; import { fileURLToPath } from "url"; import { homedir } from "os"; import type { SSHTunnelConfig } from "../types/ssh.js"; import { parseSSHConfig, looksLikeSSHAlias } from "../utils/ssh-config-parser.js"; // Create __dirname equivalent for ES modules const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); // Parse command line arguments export function parseCommandLineArgs() { // Check if any args start with '--' (the way tsx passes them) const args = process.argv.slice(2); const parsedManually: Record<string, string> = {}; for (let i = 0; i < args.length; i++) { const arg = args[i]; if (arg.startsWith("--")) { const [key, value] = arg.substring(2).split("="); if (value) { // Handle --key=value format parsedManually[key] = value; } else if (i + 1 < args.length && !args[i + 1].startsWith("--")) { // Handle --key value format parsedManually[key] = args[i + 1]; i++; // Skip the next argument as it's the value } else { // Handle --key format (boolean flag) parsedManually[key] = "true"; } } } // Just use the manually parsed args - removed parseArgs dependency for Node.js <18.3.0 compatibility return parsedManually; } /** * Load environment files from various locations * Returns the name of the file that was loaded, or null if none was found */ export function loadEnvFiles(): string | null { // Determine if we're in development or production mode const isDevelopment = process.env.NODE_ENV === "development" || process.argv[1]?.includes("tsx"); // Select environment file names based on environment const envFileNames = isDevelopment ? [".env.local", ".env"] // In development, try .env.local first, then .env : [".env"]; // In production, only look for .env // Build paths to check for environment files const envPaths = []; for (const fileName of envFileNames) { envPaths.push( fileName, // Current working directory path.join(__dirname, "..", "..", fileName), // Two levels up (src/config -> src -> root) path.join(process.cwd(), fileName) // Explicit current working directory ); } // Try to load the first env file found from the prioritized locations for (const envPath of envPaths) { console.error(`Checking for env file: ${envPath}`); if (fs.existsSync(envPath)) { dotenv.config({ path: envPath }); // Return the name of the file that was loaded return path.basename(envPath); } } return null; } /** * Check if demo mode is enabled from command line args * Returns true if --demo flag is provided */ export function isDemoMode(): boolean { const args = parseCommandLineArgs(); return args.demo === "true"; } /** * Check if readonly mode is enabled from command line args or environment * Returns true if --readonly flag is provided */ export function isReadOnlyMode(): boolean { const args = parseCommandLineArgs(); // Check command line args first if (args.readonly !== undefined) { return args.readonly === "true"; } // Check environment variable if (process.env.READONLY !== undefined) { return process.env.READONLY === "true"; } // Default to false return false; } /** * Build DSN from individual environment variables * Returns the constructed DSN or null if required variables are missing */ export function buildDSNFromEnvParams(): { dsn: string; source: string } | null { // Check for required environment variables const dbType = process.env.DB_TYPE; const dbHost = process.env.DB_HOST; const dbUser = process.env.DB_USER; const dbPassword = process.env.DB_PASSWORD; const dbName = process.env.DB_NAME; const dbPort = process.env.DB_PORT; // For SQLite, only DB_TYPE and DB_NAME are required if (dbType?.toLowerCase() === 'sqlite') { if (!dbName) { return null; } } else { // For other databases, require all essential parameters if (!dbType || !dbHost || !dbUser || !dbPassword || !dbName) { return null; } } // Validate supported database types const supportedTypes = ['postgres', 'postgresql', 'mysql', 'mariadb', 'sqlserver', 'sqlite']; if (!supportedTypes.includes(dbType.toLowerCase())) { throw new Error(`Unsupported DB_TYPE: ${dbType}. Supported types: ${supportedTypes.join(', ')}`); } // Determine default port based on database type let port = dbPort; if (!port) { switch (dbType.toLowerCase()) { case 'postgres': case 'postgresql': port = '5432'; break; case 'mysql': case 'mariadb': port = '3306'; break; case 'sqlserver': port = '1433'; break; case 'sqlite': // SQLite doesn't use host/port, handle differently return { dsn: `sqlite:///${dbName}`, source: 'individual environment variables' }; default: throw new Error(`Unknown database type for port determination: ${dbType}`); } } // At this point, dbUser, dbPassword, and dbName are guaranteed to be non-null due to earlier checks. const user: string = dbUser as string; const password: string = dbPassword as string; const dbNameStr: string = dbName as string; const encodedUser = encodeURIComponent(user); const encodedPassword = encodeURIComponent(password); const encodedDbName = encodeURIComponent(dbNameStr); // Construct DSN const protocol = dbType.toLowerCase() === 'postgresql' ? 'postgres' : dbType.toLowerCase(); const dsn = `${protocol}://${encodedUser}:${encodedPassword}@${dbHost}:${port}/${encodedDbName}`; return { dsn, source: 'individual environment variables' }; } /** * Resolve DSN from command line args, environment variables, or .env files * Returns the DSN and its source, or null if not found */ export function resolveDSN(): { dsn: string; source: string; isDemo?: boolean } | null { // Get command line arguments const args = parseCommandLineArgs(); // Check for demo mode first (highest priority) if (isDemoMode()) { // Will use in-memory SQLite with demo data return { dsn: "sqlite:///:memory:", source: "demo mode", isDemo: true, }; } // 1. Check command line arguments if (args.dsn) { return { dsn: args.dsn, source: "command line argument" }; } // 2. Check environment variables before loading .env if (process.env.DSN) { return { dsn: process.env.DSN, source: "environment variable" }; } // 3. Check for individual DB parameters from environment const envParamsResult = buildDSNFromEnvParams(); if (envParamsResult) { return envParamsResult; } // 4. Try loading from .env files const loadedEnvFile = loadEnvFiles(); // 5. Check for DSN in .env file if (loadedEnvFile && process.env.DSN) { return { dsn: process.env.DSN, source: `${loadedEnvFile} file` }; } // 6. Check for individual DB parameters from .env file if (loadedEnvFile) { const envFileParamsResult = buildDSNFromEnvParams(); if (envFileParamsResult) { return { dsn: envFileParamsResult.dsn, source: `${loadedEnvFile} file (individual parameters)` }; } } return null; } /** * Resolve transport type from command line args or environment variables * Returns 'stdio' or 'http' (streamable HTTP), with 'stdio' as the default */ export function resolveTransport(): { type: "stdio" | "http"; source: string } { // Get command line arguments const args = parseCommandLineArgs(); // 1. Check command line arguments first (highest priority) if (args.transport) { const type = args.transport === "http" ? "http" : "stdio"; return { type, source: "command line argument" }; } // 2. Check environment variables if (process.env.TRANSPORT) { const type = process.env.TRANSPORT === "http" ? "http" : "stdio"; return { type, source: "environment variable" }; } // 3. Default to stdio return { type: "stdio", source: "default" }; } /** * Resolve max rows from command line args * Returns max rows value or null if not specified */ export function resolveMaxRows(): { maxRows: number; source: string } | null { // Get command line arguments const args = parseCommandLineArgs(); // Check command line arguments if (args["max-rows"]) { const maxRows = parseInt(args["max-rows"], 10); if (isNaN(maxRows) || maxRows <= 0) { throw new Error(`Invalid --max-rows value: ${args["max-rows"]}. Must be a positive integer.`); } return { maxRows, source: "command line argument" }; } return null; } /** * Resolve port from command line args or environment variables * Returns port number with 8080 as the default * * Note: The port option is only applicable when using --transport=http * as it controls the HTTP server port for streamable HTTP connections. */ export function resolvePort(): { port: number; source: string } { // Get command line arguments const args = parseCommandLineArgs(); // 1. Check command line arguments first (highest priority) if (args.port) { const port = parseInt(args.port, 10); return { port, source: "command line argument" }; } // 2. Check environment variables if (process.env.PORT) { const port = parseInt(process.env.PORT, 10); return { port, source: "environment variable" }; } // 3. Default to 8080 return { port: 8080, source: "default" }; } /** * Redact sensitive information from a DSN string * Replaces the password with asterisks * @param dsn - The DSN string to redact * @returns The sanitized DSN string */ export function redactDSN(dsn: string): string { try { // Create a URL object to parse the DSN const url = new URL(dsn); // Replace the password with asterisks if (url.password) { url.password = "*******"; } // Return the sanitized DSN return url.toString(); } catch (error) { // If parsing fails, do basic redaction with regex return dsn.replace(/\/\/([^:]+):([^@]+)@/, "//$1:***@"); } } /** * Resolve ID from command line args or environment variables * Returns ID or null if not provided */ export function resolveId(): { id: string; source: string } | null { // Get command line arguments const args = parseCommandLineArgs(); // 1. Check command line arguments first (highest priority) if (args.id) { return { id: args.id, source: "command line argument" }; } // 2. Check environment variables if (process.env.ID) { return { id: process.env.ID, source: "environment variable" }; } return null; } /** * Resolve SSH tunnel configuration from command line args or environment variables * Returns SSH config or null if no SSH options are provided */ export function resolveSSHConfig(): { config: SSHTunnelConfig; source: string } | null { // Get command line arguments const args = parseCommandLineArgs(); // Check if any SSH options are provided const hasSSHArgs = args["ssh-host"] || process.env.SSH_HOST; if (!hasSSHArgs) { return null; } // Build SSH config from command line and environment variables let config: Partial<SSHTunnelConfig> = {}; let sources: string[] = []; let sshConfigHost: string | undefined; // SSH Host (required) if (args["ssh-host"]) { sshConfigHost = args["ssh-host"]; config.host = args["ssh-host"]; sources.push("ssh-host from command line"); } else if (process.env.SSH_HOST) { sshConfigHost = process.env.SSH_HOST; config.host = process.env.SSH_HOST; sources.push("SSH_HOST from environment"); } // Check if the host looks like an SSH config alias if (sshConfigHost && looksLikeSSHAlias(sshConfigHost)) { // Try to parse SSH config for this host, default to ~/.ssh/config const sshConfigPath = path.join(homedir(), '.ssh', 'config'); console.error(`Attempting to parse SSH config for host '${sshConfigHost}' from: ${sshConfigPath}`); const sshConfigData = parseSSHConfig(sshConfigHost, sshConfigPath); if (sshConfigData) { // Use SSH config as base, but allow command line/env to override config = { ...sshConfigData }; sources.push(`SSH config for host '${sshConfigHost}'`); // The host from SSH config has already been set, no need to override } } // SSH Port (optional, default: 22) if (args["ssh-port"]) { config.port = parseInt(args["ssh-port"], 10); sources.push("ssh-port from command line"); } else if (process.env.SSH_PORT) { config.port = parseInt(process.env.SSH_PORT, 10); sources.push("SSH_PORT from environment"); } // SSH User (required) if (args["ssh-user"]) { config.username = args["ssh-user"]; sources.push("ssh-user from command line"); } else if (process.env.SSH_USER) { config.username = process.env.SSH_USER; sources.push("SSH_USER from environment"); } // SSH Password (optional) if (args["ssh-password"]) { config.password = args["ssh-password"]; sources.push("ssh-password from command line"); } else if (process.env.SSH_PASSWORD) { config.password = process.env.SSH_PASSWORD; sources.push("SSH_PASSWORD from environment"); } // SSH Private Key (optional) if (args["ssh-key"]) { config.privateKey = args["ssh-key"]; // Expand ~ to home directory if (config.privateKey.startsWith("~/")) { config.privateKey = path.join(process.env.HOME || "", config.privateKey.substring(2)); } sources.push("ssh-key from command line"); } else if (process.env.SSH_KEY) { config.privateKey = process.env.SSH_KEY; // Expand ~ to home directory if (config.privateKey.startsWith("~/")) { config.privateKey = path.join(process.env.HOME || "", config.privateKey.substring(2)); } sources.push("SSH_KEY from environment"); } // SSH Key Passphrase (optional) if (args["ssh-passphrase"]) { config.passphrase = args["ssh-passphrase"]; sources.push("ssh-passphrase from command line"); } else if (process.env.SSH_PASSPHRASE) { config.passphrase = process.env.SSH_PASSPHRASE; sources.push("SSH_PASSPHRASE from environment"); } // Validate required fields if (!config.host || !config.username) { throw new Error("SSH tunnel configuration requires at least --ssh-host and --ssh-user"); } // Validate authentication method if (!config.password && !config.privateKey) { throw new Error("SSH tunnel configuration requires either --ssh-password or --ssh-key for authentication"); } return { config: config as SSHTunnelConfig, source: sources.join(", ") }; } ``` -------------------------------------------------------------------------------- /src/connectors/__tests__/sqlite.integration.test.ts: -------------------------------------------------------------------------------- ```typescript import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { SQLiteConnector } from '../sqlite/index.js'; import { IntegrationTestBase, type TestContainer, type DatabaseTestConfig } from './shared/integration-test-base.js'; import type { Connector } from '../interface.js'; import Database from 'better-sqlite3'; import fs from 'fs'; import path from 'path'; import os from 'os'; class SQLiteTestContainer implements TestContainer { constructor(private dbPath: string) {} getConnectionUri(): string { return `sqlite://${this.dbPath}`; } async stop(): Promise<void> { // Clean up the temporary database file if (this.dbPath !== ':memory:' && fs.existsSync(this.dbPath)) { try { // Add a small delay to ensure any file handles are fully released await new Promise(resolve => setTimeout(resolve, 10)); fs.unlinkSync(this.dbPath); } catch (error) { // Log but don't throw - cleanup failures shouldn't break tests console.warn(`Failed to cleanup database file ${this.dbPath}:`, error); } } } } class SQLiteIntegrationTest extends IntegrationTestBase<SQLiteTestContainer> { constructor() { const config: DatabaseTestConfig = { expectedSchemas: ['main'], // SQLite uses 'main' as the default schema name expectedTables: ['users', 'orders'], supportsStoredProcedures: false // SQLite doesn't support stored procedures }; super(config); } async createContainer(): Promise<SQLiteTestContainer> { // Create a temporary database file const tempDir = os.tmpdir(); const dbPath = path.join(tempDir, `test_${Date.now()}_${Math.random().toString(36).substr(2, 9)}.db`); return new SQLiteTestContainer(dbPath); } createConnector(): Connector { return new SQLiteConnector(); } async setupTestData(connector: Connector): Promise<void> { // Create users table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR(100) NOT NULL, email VARCHAR(100) UNIQUE NOT NULL, age INTEGER ) `, {}); // Create orders table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS orders ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER REFERENCES users(id), total DECIMAL(10,2), created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) `, {}); // Insert test data await connector.executeSQL(` INSERT INTO users (name, email, age) VALUES ('John Doe', '[email protected]', 30), ('Jane Smith', '[email protected]', 25), ('Bob Johnson', '[email protected]', 35) `, {}); await connector.executeSQL(` INSERT INTO orders (user_id, total) VALUES (1, 99.99), (1, 149.50), (2, 75.25) `, {}); } } // Create the test suite const sqliteTest = new SQLiteIntegrationTest(); describe('SQLite Connector Integration Tests', () => { beforeAll(async () => { await sqliteTest.setup(); }, 120000); afterAll(async () => { await sqliteTest.cleanup(); }); // Include all common tests sqliteTest.createConnectionTests(); sqliteTest.createSchemaTests(); sqliteTest.createTableTests(); sqliteTest.createSQLExecutionTests(); sqliteTest.createErrorHandlingTests(); describe('SQLite-specific Features', () => { it('should handle SQLite data types correctly', async () => { await sqliteTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS types_test ( id INTEGER PRIMARY KEY, text_val TEXT, int_val INTEGER, real_val REAL, blob_val BLOB, null_val TEXT ) `, {}); await sqliteTest.connector.executeSQL(` INSERT INTO types_test (text_val, int_val, real_val, blob_val, null_val) VALUES ('test string', 42, 3.14159, X'48656C6C6F', NULL) `, {}); const result = await sqliteTest.connector.executeSQL( 'SELECT * FROM types_test ORDER BY id DESC LIMIT 1', {} ); expect(result.rows).toHaveLength(1); expect(result.rows[0].text_val).toBe('test string'); expect(result.rows[0].int_val).toBe(42); expect(result.rows[0].real_val).toBe(3.14159); expect(result.rows[0].null_val).toBeNull(); }); it('should work with SQLite-specific functions', async () => { const result = await sqliteTest.connector.executeSQL(` SELECT sqlite_version() as sqlite_version, datetime('now') as current_time, hex(randomblob(16)) as random_hex, upper('hello world') as uppercase_text, length('test string') as string_length `, {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].sqlite_version).toBeDefined(); expect(result.rows[0].current_time).toBeDefined(); expect(result.rows[0].random_hex).toBeDefined(); expect(result.rows[0].uppercase_text).toBe('HELLO WORLD'); expect(result.rows[0].string_length).toBe(11); }); it('should handle SQLite transactions correctly', async () => { // Test successful transaction await sqliteTest.connector.executeSQL(` BEGIN TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Transaction User 1', '[email protected]', 28); INSERT INTO users (name, email, age) VALUES ('Transaction User 2', '[email protected]', 32); COMMIT; `, {}); const successResult = await sqliteTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email LIKE 'trans%@example.com'", {} ); expect(Number(successResult.rows[0].count)).toBe(2); // Test manual rollback await sqliteTest.connector.executeSQL(` BEGIN TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Transaction User 3', '[email protected]', 40); ROLLBACK; `, {}); // Verify rollback worked - should still be 2 transaction users const rollbackResult = await sqliteTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email LIKE 'trans%@example.com'", {} ); expect(Number(rollbackResult.rows[0].count)).toBe(2); }); it('should handle SQLite pragma statements', async () => { const result = await sqliteTest.connector.executeSQL(` PRAGMA table_info(users); `, {}); expect(result.rows.length).toBeGreaterThan(0); expect(result.rows.some(row => row.name === 'id')).toBe(true); expect(result.rows.some(row => row.name === 'name')).toBe(true); expect(result.rows.some(row => row.name === 'email')).toBe(true); }); it('should support SQLite window functions', async () => { const result = await sqliteTest.connector.executeSQL(` SELECT name, age, ROW_NUMBER() OVER (ORDER BY age DESC) as age_rank, AVG(age) OVER () as avg_age FROM users WHERE age IS NOT NULL ORDER BY age DESC `, {}); expect(result.rows.length).toBeGreaterThan(0); expect(result.rows[0]).toHaveProperty('age_rank'); expect(result.rows[0]).toHaveProperty('avg_age'); }); it('should handle SQLite JSON functions (if available)', async () => { // SQLite 3.38+ has JSON support, but we'll make this test conditional try { await sqliteTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS json_test ( id INTEGER PRIMARY KEY, data TEXT ) `, {}); await sqliteTest.connector.executeSQL(` INSERT INTO json_test (data) VALUES ('{"name": "John", "age": 30, "tags": ["admin", "user"]}'), ('{"name": "Jane", "age": 25, "tags": ["user"]}') `, {}); // Try to use json_extract (available in newer SQLite versions) const result = await sqliteTest.connector.executeSQL(` SELECT json_extract(data, '$.name') as name, json_extract(data, '$.age') as age FROM json_test WHERE json_extract(data, '$.age') > 27 `, {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].name).toBe('John'); expect(Number(result.rows[0].age)).toBe(30); } catch (error) { // JSON functions not available in this SQLite version, skip this test console.log('JSON functions not available in this SQLite version, skipping JSON test'); } }); it('should handle multiple statements correctly', async () => { const result = await sqliteTest.connector.executeSQL(` INSERT INTO users (name, email, age) VALUES ('Multi User 1', '[email protected]', 30); INSERT INTO users (name, email, age) VALUES ('Multi User 2', '[email protected]', 35); SELECT COUNT(*) as total FROM users WHERE email LIKE 'multi%'; `, {}); expect(result.rows).toHaveLength(1); expect(Number(result.rows[0].total)).toBe(2); }); it('should handle SQLite foreign key constraints', async () => { // Enable foreign key constraints await sqliteTest.connector.executeSQL('PRAGMA foreign_keys = ON', {}); // Try to insert an order with non-existent user_id await expect( sqliteTest.connector.executeSQL('INSERT INTO orders (user_id, total) VALUES (9999, 100.00)', {}) ).rejects.toThrow(); // Verify foreign key is working by inserting valid order await sqliteTest.connector.executeSQL('INSERT INTO orders (user_id, total) VALUES (1, 200.00)', {}); const result = await sqliteTest.connector.executeSQL( 'SELECT COUNT(*) as count FROM orders WHERE total = 200.00', {} ); expect(Number(result.rows[0].count)).toBe(1); }); it('should work with SQLite virtual tables (FTS)', async () => { try { // Create an FTS (Full-Text Search) virtual table if FTS is available await sqliteTest.connector.executeSQL(` CREATE VIRTUAL TABLE IF NOT EXISTS docs_fts USING fts5(title, content) `, {}); await sqliteTest.connector.executeSQL(` INSERT INTO docs_fts (title, content) VALUES ('First Document', 'This is the content of the first document'), ('Second Document', 'This document contains different content'), ('Third Document', 'Another document with more content') `, {}); const result = await sqliteTest.connector.executeSQL(` SELECT title FROM docs_fts WHERE docs_fts MATCH 'content' ORDER BY title `, {}); expect(result.rows.length).toBeGreaterThan(0); expect(result.rows.some(row => row.title.includes('Document'))).toBe(true); } catch (error) { // FTS not available in this SQLite build, skip this test console.log('FTS extension not available in this SQLite build, skipping FTS test'); } }); it('should respect maxRows limit for SELECT queries', async () => { // Test basic SELECT with maxRows limit const result1 = await sqliteTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', { maxRows: 2 } ); expect(result1.rows).toHaveLength(2); expect(result1.rows[0].name).toBe('John Doe'); expect(result1.rows[1].name).toBe('Jane Smith'); }); it('should respect existing LIMIT clause when lower than maxRows', async () => { // Test when existing LIMIT is lower than maxRows const result = await sqliteTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 1', { maxRows: 3 } ); expect(result.rows).toHaveLength(1); expect(result.rows[0].name).toBe('John Doe'); }); it('should use maxRows when existing LIMIT is higher', async () => { // Test when existing LIMIT is higher than maxRows const result = await sqliteTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 10', { maxRows: 2 } ); expect(result.rows).toHaveLength(2); expect(result.rows[0].name).toBe('John Doe'); expect(result.rows[1].name).toBe('Jane Smith'); }); it('should not affect non-SELECT queries', async () => { // Test that maxRows doesn't affect INSERT/UPDATE/DELETE const insertResult = await sqliteTest.connector.executeSQL( "INSERT INTO users (name, email, age) VALUES ('MaxRows Test', '[email protected]', 25)", { maxRows: 1 } ); expect(insertResult.rows).toHaveLength(0); // INSERTs don't return rows // Verify the insert worked const selectResult = await sqliteTest.connector.executeSQL( "SELECT * FROM users WHERE email = '[email protected]'", {} ); expect(selectResult.rows).toHaveLength(1); expect(selectResult.rows[0].name).toBe('MaxRows Test'); }); it('should handle maxRows with complex queries', async () => { // Test maxRows with JOIN queries const result = await sqliteTest.connector.executeSQL(` SELECT u.name, o.total FROM users u JOIN orders o ON u.id = o.user_id ORDER BY o.total DESC `, { maxRows: 2 }); expect(result.rows).toHaveLength(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[0]).toHaveProperty('total'); }); it('should not apply maxRows to CTE queries (WITH clause)', async () => { // Test that maxRows is not applied to CTE queries (WITH clause) const result = await sqliteTest.connector.executeSQL(` WITH user_summary AS ( SELECT name, age FROM users WHERE age IS NOT NULL ) SELECT * FROM user_summary ORDER BY age `, { maxRows: 2 }); // Should return all rows since WITH queries are not limited anymore expect(result.rows.length).toBeGreaterThan(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[0]).toHaveProperty('age'); }); it('should handle maxRows in multi-statement execution', async () => { // Test maxRows with multiple statements where some are SELECT const result = await sqliteTest.connector.executeSQL(` INSERT INTO users (name, email, age) VALUES ('Multi Test 1', '[email protected]', 30); SELECT name FROM users WHERE email LIKE '%@test.com' ORDER BY name; INSERT INTO users (name, email, age) VALUES ('Multi Test 2', '[email protected]', 35); `, { maxRows: 1 }); // Should return only 1 row from the SELECT statement expect(result.rows).toHaveLength(1); expect(result.rows[0].name).toBe('Multi Test 1'); }); it('should ignore maxRows when not specified', async () => { // Test without maxRows - should return all rows const result = await sqliteTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', {} ); // Should return all users (at least the original 3 plus any added in previous tests) expect(result.rows.length).toBeGreaterThanOrEqual(3); }); }); }); ``` -------------------------------------------------------------------------------- /src/connectors/sqlserver/index.ts: -------------------------------------------------------------------------------- ```typescript import sql from "mssql"; import { Connector, ConnectorType, ConnectorRegistry, DSNParser, SQLResult, TableColumn, TableIndex, StoredProcedure, ExecuteOptions, } from "../interface.js"; import { DefaultAzureCredential } from "@azure/identity"; import { SafeURL } from "../../utils/safe-url.js"; import { obfuscateDSNPassword } from "../../utils/dsn-obfuscate.js"; import { SQLRowLimiter } from "../../utils/sql-row-limiter.js"; /** * SQL Server DSN parser * Expected format: mssql://username:password@host:port/database */ export class SQLServerDSNParser implements DSNParser { async parse(dsn: string): Promise<sql.config> { // Basic validation if (!this.isValidDSN(dsn)) { const obfuscatedDSN = obfuscateDSNPassword(dsn); const expectedFormat = this.getSampleDSN(); throw new Error( `Invalid SQL Server DSN format.\nProvided: ${obfuscatedDSN}\nExpected: ${expectedFormat}` ); } try { // Use the SafeURL helper to parse DSNs with special characters const url = new SafeURL(dsn); // Parse additional options from query parameters const options: Record<string, any> = {}; // Process query parameters url.forEachSearchParam((value, key) => { if (key === "connectTimeout") { options.connectTimeout = parseInt(value, 10); } else if (key === "requestTimeout") { options.requestTimeout = parseInt(value, 10); } else if (key === "authentication") { options.authentication = value; } else if (key === "sslmode") { options.sslmode = value; } }); // Handle sslmode parameter similar to PostgreSQL and MySQL if (options.sslmode) { if (options.sslmode === "disable") { options.encrypt = false; options.trustServerCertificate = false; } else if (options.sslmode === "require") { options.encrypt = true; options.trustServerCertificate = true; } // Default behavior (certificate verification) is handled by the default values below } // Base configuration without authentication first const config: sql.config = { user: url.username, password: url.password, server: url.hostname, port: url.port ? parseInt(url.port) : 1433, // Default SQL Server port database: url.pathname ? url.pathname.substring(1) : '', // Remove leading slash options: { encrypt: options.encrypt ?? false, // Default to unencrypted for development trustServerCertificate: options.trustServerCertificate ?? false, connectTimeout: options.connectTimeout ?? 15000, requestTimeout: options.requestTimeout ?? 15000, }, }; // Handle Azure Active Directory authentication with access token if (options.authentication === "azure-active-directory-access-token") { try { // Create a credential instance const credential = new DefaultAzureCredential(); // Get token for SQL Server resource const token = await credential.getToken("https://database.windows.net/"); // Set the token in the config config.authentication = { type: "azure-active-directory-access-token", options: { token: token.token, }, }; } catch (error: unknown) { const errorMessage = error instanceof Error ? error.message : String(error); throw new Error(`Failed to get Azure AD token: ${errorMessage}`); } } return config; } catch (error) { throw new Error( `Failed to parse SQL Server DSN: ${error instanceof Error ? error.message : String(error)}` ); } } getSampleDSN(): string { return "sqlserver://username:password@localhost:1433/database?sslmode=disable"; } isValidDSN(dsn: string): boolean { try { return dsn.startsWith('sqlserver://'); } catch (error) { return false; } } } /** * SQL Server connector */ export class SQLServerConnector implements Connector { id: ConnectorType = "sqlserver"; name = "SQL Server"; dsnParser = new SQLServerDSNParser(); private connection?: sql.ConnectionPool; private config?: sql.config; async connect(dsn: string): Promise<void> { try { this.config = await this.dsnParser.parse(dsn); if (!this.config.options) { this.config.options = {}; } this.connection = await new sql.ConnectionPool(this.config).connect(); } catch (error) { throw error; } } async disconnect(): Promise<void> { if (this.connection) { await this.connection.close(); this.connection = undefined; } } async getSchemas(): Promise<string[]> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { const result = await this.connection.request().query(` SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA ORDER BY SCHEMA_NAME `); return result.recordset.map((row: { SCHEMA_NAME: any }) => row.SCHEMA_NAME); } catch (error) { throw new Error(`Failed to get schemas: ${(error as Error).message}`); } } async getTables(schema?: string): Promise<string[]> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified // This is the default schema for SQL Server databases const schemaToUse = schema || "dbo"; const request = this.connection.request().input("schema", sql.VarChar, schemaToUse); const query = ` SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = @schema ORDER BY TABLE_NAME `; const result = await request.query(query); return result.recordset.map((row: { TABLE_NAME: any }) => row.TABLE_NAME); } catch (error) { throw new Error(`Failed to get tables: ${(error as Error).message}`); } } async tableExists(tableName: string, schema?: string): Promise<boolean> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified const schemaToUse = schema || "dbo"; const request = this.connection .request() .input("tableName", sql.VarChar, tableName) .input("schema", sql.VarChar, schemaToUse); const query = ` SELECT COUNT(*) as count FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = @tableName AND TABLE_SCHEMA = @schema `; const result = await request.query(query); return result.recordset[0].count > 0; } catch (error) { throw new Error(`Failed to check if table exists: ${(error as Error).message}`); } } async getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified const schemaToUse = schema || "dbo"; const request = this.connection .request() .input("tableName", sql.VarChar, tableName) .input("schema", sql.VarChar, schemaToUse); // This gets all indexes including primary keys const query = ` SELECT i.name AS index_name, i.is_unique, i.is_primary_key, c.name AS column_name, ic.key_ordinal FROM sys.indexes i INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id INNER JOIN sys.tables t ON i.object_id = t.object_id INNER JOIN sys.schemas s ON t.schema_id = s.schema_id WHERE t.name = @tableName AND s.name = @schema ORDER BY i.name, ic.key_ordinal `; const result = await request.query(query); // Group by index name to collect all columns for each index const indexMap = new Map< string, { columns: string[]; is_unique: boolean; is_primary: boolean; } >(); for (const row of result.recordset) { const indexName = row.index_name; const columnName = row.column_name; const isUnique = !!row.is_unique; const isPrimary = !!row.is_primary_key; if (!indexMap.has(indexName)) { indexMap.set(indexName, { columns: [], is_unique: isUnique, is_primary: isPrimary, }); } const indexInfo = indexMap.get(indexName)!; indexInfo.columns.push(columnName); } // Convert Map to array of TableIndex objects const indexes: TableIndex[] = []; indexMap.forEach((info, name) => { indexes.push({ index_name: name, column_names: info.columns, is_unique: info.is_unique, is_primary: info.is_primary, }); }); return indexes; } catch (error) { throw new Error(`Failed to get indexes for table ${tableName}: ${(error as Error).message}`); } } async getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified const schemaToUse = schema || "dbo"; const request = this.connection .request() .input("tableName", sql.VarChar, tableName) .input("schema", sql.VarChar, schemaToUse); const query = ` SELECT COLUMN_NAME as column_name, DATA_TYPE as data_type, IS_NULLABLE as is_nullable, COLUMN_DEFAULT as column_default FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = @tableName AND TABLE_SCHEMA = @schema ORDER BY ORDINAL_POSITION `; const result = await request.query(query); return result.recordset; } catch (error) { throw new Error(`Failed to get schema for table ${tableName}: ${(error as Error).message}`); } } async getStoredProcedures(schema?: string): Promise<string[]> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified const schemaToUse = schema || "dbo"; const request = this.connection.request().input("schema", sql.VarChar, schemaToUse); const query = ` SELECT ROUTINE_NAME FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_SCHEMA = @schema AND (ROUTINE_TYPE = 'PROCEDURE' OR ROUTINE_TYPE = 'FUNCTION') ORDER BY ROUTINE_NAME `; const result = await request.query(query); return result.recordset.map((row: { ROUTINE_NAME: any }) => row.ROUTINE_NAME); } catch (error) { throw new Error(`Failed to get stored procedures: ${(error as Error).message}`); } } async getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // In SQL Server, use 'dbo' as the default schema if none specified const schemaToUse = schema || "dbo"; const request = this.connection .request() .input("procedureName", sql.VarChar, procedureName) .input("schema", sql.VarChar, schemaToUse); // First, get basic procedure information const routineQuery = ` SELECT ROUTINE_NAME as procedure_name, ROUTINE_TYPE, DATA_TYPE as return_data_type FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = @procedureName AND ROUTINE_SCHEMA = @schema `; const routineResult = await request.query(routineQuery); if (routineResult.recordset.length === 0) { throw new Error(`Stored procedure '${procedureName}' not found in schema '${schemaToUse}'`); } const routine = routineResult.recordset[0]; // Next, get parameter information const parameterQuery = ` SELECT PARAMETER_NAME, PARAMETER_MODE, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH, ORDINAL_POSITION FROM INFORMATION_SCHEMA.PARAMETERS WHERE SPECIFIC_NAME = @procedureName AND SPECIFIC_SCHEMA = @schema ORDER BY ORDINAL_POSITION `; const parameterResult = await request.query(parameterQuery); // Format the parameter list let parameterList = ""; if (parameterResult.recordset.length > 0) { parameterList = parameterResult.recordset .map( (param: { CHARACTER_MAXIMUM_LENGTH: number; PARAMETER_NAME: any; PARAMETER_MODE: any; DATA_TYPE: any; }) => { const lengthStr = param.CHARACTER_MAXIMUM_LENGTH > 0 ? `(${param.CHARACTER_MAXIMUM_LENGTH})` : ""; return `${param.PARAMETER_NAME} ${param.PARAMETER_MODE} ${param.DATA_TYPE}${lengthStr}`; } ) .join(", "); } // Get the procedure definition from sys.sql_modules const definitionQuery = ` SELECT definition FROM sys.sql_modules sm JOIN sys.objects o ON sm.object_id = o.object_id JOIN sys.schemas s ON o.schema_id = s.schema_id WHERE o.name = @procedureName AND s.name = @schema `; const definitionResult = await request.query(definitionQuery); let definition = undefined; if (definitionResult.recordset.length > 0) { definition = definitionResult.recordset[0].definition; } return { procedure_name: routine.procedure_name, procedure_type: routine.ROUTINE_TYPE === "PROCEDURE" ? "procedure" : "function", language: "sql", // SQL Server procedures are typically in T-SQL parameter_list: parameterList, return_type: routine.ROUTINE_TYPE === "FUNCTION" ? routine.return_data_type : undefined, definition: definition, }; } catch (error) { throw new Error(`Failed to get stored procedure details: ${(error as Error).message}`); } } async executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult> { if (!this.connection) { throw new Error("Not connected to SQL Server database"); } try { // Apply maxRows limit to SELECT queries if specified let processedSQL = sql; if (options.maxRows) { processedSQL = SQLRowLimiter.applyMaxRowsForSQLServer(sql, options.maxRows); } const result = await this.connection.request().query(processedSQL); return { rows: result.recordset || [], fields: result.recordset && result.recordset.length > 0 ? Object.keys(result.recordset[0]).map((key) => ({ name: key, })) : [], rowCount: result.rowsAffected[0] || 0, }; } catch (error) { throw new Error(`Failed to execute query: ${(error as Error).message}`); } } } // Create and register the connector const sqlServerConnector = new SQLServerConnector(); ConnectorRegistry.register(sqlServerConnector); ``` -------------------------------------------------------------------------------- /src/connectors/mariadb/index.ts: -------------------------------------------------------------------------------- ```typescript import mariadb from "mariadb"; import { Connector, ConnectorType, ConnectorRegistry, DSNParser, SQLResult, TableColumn, TableIndex, StoredProcedure, ExecuteOptions, } from "../interface.js"; import { SafeURL } from "../../utils/safe-url.js"; import { obfuscateDSNPassword } from "../../utils/dsn-obfuscate.js"; import { SQLRowLimiter } from "../../utils/sql-row-limiter.js"; /** * MariaDB DSN Parser * Handles DSN strings like: mariadb://user:password@localhost:3306/dbname?sslmode=require * Supported SSL modes: * - sslmode=disable: No SSL connection * - sslmode=require: SSL connection without certificate verification * - Any other value: Standard SSL connection with certificate verification */ class MariadbDSNParser implements DSNParser { async parse(dsn: string): Promise<mariadb.ConnectionConfig> { // Basic validation if (!this.isValidDSN(dsn)) { const obfuscatedDSN = obfuscateDSNPassword(dsn); const expectedFormat = this.getSampleDSN(); throw new Error( `Invalid MariaDB DSN format.\nProvided: ${obfuscatedDSN}\nExpected: ${expectedFormat}` ); } try { // Use the SafeURL helper instead of the built-in URL // This will handle special characters in passwords, etc. const url = new SafeURL(dsn); const config: mariadb.ConnectionConfig = { host: url.hostname, port: url.port ? parseInt(url.port) : 3306, database: url.pathname ? url.pathname.substring(1) : '', // Remove leading '/' if exists user: url.username, password: url.password, multipleStatements: true, // Enable native multi-statement support connectTimeout: 5000, // 5 second timeout for connections }; // Handle query parameters url.forEachSearchParam((value, key) => { if (key === "sslmode") { if (value === "disable") { config.ssl = undefined; } else if (value === "require") { config.ssl = { rejectUnauthorized: false }; } else { config.ssl = {}; } } // Add other parameters as needed }); return config; } catch (error) { throw new Error( `Failed to parse MariaDB DSN: ${error instanceof Error ? error.message : String(error)}` ); } } getSampleDSN(): string { return "mariadb://root:password@localhost:3306/db?sslmode=require"; } isValidDSN(dsn: string): boolean { try { return dsn.startsWith('mariadb://'); } catch (error) { return false; } } } /** * MariaDB Connector Implementation */ export class MariaDBConnector implements Connector { id: ConnectorType = "mariadb"; name = "MariaDB"; dsnParser = new MariadbDSNParser(); private pool: mariadb.Pool | null = null; async connect(dsn: string): Promise<void> { try { const config = await this.dsnParser.parse(dsn); this.pool = mariadb.createPool(config); // Test the connection console.error("Testing connection to MariaDB..."); await this.pool.query("SELECT 1"); console.error("Successfully connected to MariaDB database"); } catch (err) { console.error("Failed to connect to MariaDB database:", err); throw err; } } async disconnect(): Promise<void> { if (this.pool) { await this.pool.end(); this.pool = null; } } async getSchemas(): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, schemas are equivalent to databases const rows = await this.pool.query(` SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA ORDER BY SCHEMA_NAME `) as any[]; return rows.map((row) => row.SCHEMA_NAME); } catch (error) { console.error("Error getting schemas:", error); throw error; } } async getTables(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, if no schema is provided, use the current active database (DATABASE()) // MariaDB uses the terms 'database' and 'schema' interchangeably // The DATABASE() function returns the current database context const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema] : []; // Get all tables from the specified schema or current database const rows = await this.pool.query( ` SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES ${schemaClause} ORDER BY TABLE_NAME `, queryParams ) as any[]; return rows.map((row) => row.TABLE_NAME); } catch (error) { console.error("Error getting tables:", error); throw error; } } async tableExists(tableName: string, schema?: string): Promise<boolean> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, if no schema is provided, use the current active database // DATABASE() function returns the name of the current database const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; const rows = await this.pool.query( ` SELECT COUNT(*) AS COUNT FROM INFORMATION_SCHEMA.TABLES ${schemaClause} AND TABLE_NAME = ? `, queryParams ) as any[]; return rows[0].COUNT > 0; } catch (error) { console.error("Error checking if table exists:", error); throw error; } } async getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, if no schema is provided, use the current active database const schemaClause = schema ? "TABLE_SCHEMA = ?" : "TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; // Get information about indexes const indexRows = await this.pool.query( ` SELECT INDEX_NAME, COLUMN_NAME, NON_UNIQUE, SEQ_IN_INDEX FROM INFORMATION_SCHEMA.STATISTICS WHERE ${schemaClause} AND TABLE_NAME = ? ORDER BY INDEX_NAME, SEQ_IN_INDEX `, queryParams ) as any[]; // Process the results to group columns by index const indexMap = new Map< string, { columns: string[]; is_unique: boolean; is_primary: boolean; } >(); for (const row of indexRows) { const indexName = row.INDEX_NAME; const columnName = row.COLUMN_NAME; const isUnique = row.NON_UNIQUE === 0; // In MariaDB, NON_UNIQUE=0 means the index is unique const isPrimary = indexName === "PRIMARY"; if (!indexMap.has(indexName)) { indexMap.set(indexName, { columns: [], is_unique: isUnique, is_primary: isPrimary, }); } const indexInfo = indexMap.get(indexName)!; indexInfo.columns.push(columnName); } // Convert the map to the expected TableIndex format const results: TableIndex[] = []; indexMap.forEach((indexInfo, indexName) => { results.push({ index_name: indexName, column_names: indexInfo.columns, is_unique: indexInfo.is_unique, is_primary: indexInfo.is_primary, }); }); return results; } catch (error) { console.error("Error getting table indexes:", error); throw error; } } async getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, schema is synonymous with database // If no schema is provided, use the current database context via DATABASE() function // This means tables will be retrieved from whatever database the connection is currently using const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; // Get table columns const rows = await this.pool.query( ` SELECT COLUMN_NAME as column_name, DATA_TYPE as data_type, IS_NULLABLE as is_nullable, COLUMN_DEFAULT as column_default FROM INFORMATION_SCHEMA.COLUMNS ${schemaClause} AND TABLE_NAME = ? ORDER BY ORDINAL_POSITION `, queryParams ) as any[]; return rows; } catch (error) { console.error("Error getting table schema:", error); throw error; } } async getStoredProcedures(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, if no schema is provided, use the current database context const schemaClause = schema ? "WHERE ROUTINE_SCHEMA = ?" : "WHERE ROUTINE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema] : []; // Get all stored procedures and functions const rows = await this.pool.query( ` SELECT ROUTINE_NAME FROM INFORMATION_SCHEMA.ROUTINES ${schemaClause} ORDER BY ROUTINE_NAME `, queryParams ) as any[]; return rows.map((row) => row.ROUTINE_NAME); } catch (error) { console.error("Error getting stored procedures:", error); throw error; } } async getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MariaDB, if no schema is provided, use the current database context const schemaClause = schema ? "WHERE r.ROUTINE_SCHEMA = ?" : "WHERE r.ROUTINE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, procedureName] : [procedureName]; // Get details of the stored procedure const rows = await this.pool.query( ` SELECT r.ROUTINE_NAME AS procedure_name, CASE WHEN r.ROUTINE_TYPE = 'PROCEDURE' THEN 'procedure' ELSE 'function' END AS procedure_type, LOWER(r.ROUTINE_TYPE) AS routine_type, r.ROUTINE_DEFINITION, r.DTD_IDENTIFIER AS return_type, ( SELECT GROUP_CONCAT( CONCAT(p.PARAMETER_NAME, ' ', p.PARAMETER_MODE, ' ', p.DATA_TYPE) ORDER BY p.ORDINAL_POSITION SEPARATOR ', ' ) FROM INFORMATION_SCHEMA.PARAMETERS p WHERE p.SPECIFIC_SCHEMA = r.ROUTINE_SCHEMA AND p.SPECIFIC_NAME = r.ROUTINE_NAME AND p.PARAMETER_NAME IS NOT NULL ) AS parameter_list FROM INFORMATION_SCHEMA.ROUTINES r ${schemaClause} AND r.ROUTINE_NAME = ? `, queryParams ) as any[]; if (rows.length === 0) { const schemaName = schema || "current schema"; throw new Error(`Stored procedure '${procedureName}' not found in ${schemaName}`); } const procedure = rows[0]; // If ROUTINE_DEFINITION is NULL, try to get the procedure body from mariadb.proc let definition = procedure.ROUTINE_DEFINITION; try { const schemaValue = schema || (await this.getCurrentSchema()); // For full definition - different approaches based on type if (procedure.procedure_type === "procedure") { // Try to get the definition from SHOW CREATE PROCEDURE try { const defRows = await this.pool.query(` SHOW CREATE PROCEDURE ${schemaValue}.${procedureName} `) as any[]; if (defRows && defRows.length > 0) { definition = defRows[0]["Create Procedure"]; } } catch (err) { console.error(`Error getting procedure definition with SHOW CREATE: ${err}`); } } else { // Try to get the definition for functions try { const defRows = await this.pool.query(` SHOW CREATE FUNCTION ${schemaValue}.${procedureName} `) as any[]; if (defRows && defRows.length > 0) { definition = defRows[0]["Create Function"]; } } catch (innerErr) { console.error(`Error getting function definition with SHOW CREATE: ${innerErr}`); } } // Last attempt - try to get from information_schema.routines if not found yet if (!definition) { const bodyRows = await this.pool.query( ` SELECT ROUTINE_DEFINITION, ROUTINE_BODY FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_SCHEMA = ? AND ROUTINE_NAME = ? `, [schemaValue, procedureName] ) as any[]; if (bodyRows && bodyRows.length > 0) { if (bodyRows[0].ROUTINE_DEFINITION) { definition = bodyRows[0].ROUTINE_DEFINITION; } else if (bodyRows[0].ROUTINE_BODY) { definition = bodyRows[0].ROUTINE_BODY; } } } } catch (error) { // Ignore errors when getting definition - it's optional console.error(`Error getting procedure/function details: ${error}`); } return { procedure_name: procedure.procedure_name, procedure_type: procedure.procedure_type, language: "sql", // MariaDB procedures are generally in SQL parameter_list: procedure.parameter_list || "", return_type: procedure.routine_type === "function" ? procedure.return_type : undefined, definition: definition || undefined, }; } catch (error) { console.error("Error getting stored procedure detail:", error); throw error; } } // Helper method to get current schema (database) name private async getCurrentSchema(): Promise<string> { const rows = await this.pool!.query("SELECT DATABASE() AS DB") as any[]; return rows[0].DB; } async executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult> { if (!this.pool) { throw new Error("Not connected to database"); } try { // Apply maxRows limit to SELECT queries if specified let processedSQL = sql; if (options.maxRows) { // Handle multi-statement SQL by processing each statement individually const statements = sql.split(';') .map(statement => statement.trim()) .filter(statement => statement.length > 0); const processedStatements = statements.map(statement => SQLRowLimiter.applyMaxRows(statement, options.maxRows) ); processedSQL = processedStatements.join('; '); if (sql.trim().endsWith(';')) { processedSQL += ';'; } } // Use pool.query - MariaDB driver returns rows directly for single statements const results = await this.pool.query(processedSQL) as any; // MariaDB driver returns different formats: // - Single statement: returns rows array directly // - Multiple statements: returns array of results (when multipleStatements is true) if (Array.isArray(results)) { // Check if this looks like multiple statement results // Multiple statements return an array where each element might be an array of results if (results.length > 0 && Array.isArray(results[0]) && results[0].length > 0) { // This might be multiple statement results - flatten them let allRows: any[] = []; for (const result of results) { if (Array.isArray(result)) { allRows.push(...result); } } return { rows: allRows }; } else { // Single statement result - results is the rows array return { rows: results }; } } else { // Non-array result (like for INSERT/UPDATE/DELETE without RETURNING) return { rows: [] }; } } catch (error) { console.error("Error executing query:", error); throw error; } } } // Create and register the connector const mariadbConnector = new MariaDBConnector(); ConnectorRegistry.register(mariadbConnector); ``` -------------------------------------------------------------------------------- /src/connectors/__tests__/mariadb.integration.test.ts: -------------------------------------------------------------------------------- ```typescript import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { MariaDbContainer, StartedMariaDbContainer } from '@testcontainers/mariadb'; import { MariaDBConnector } from '../mariadb/index.js'; import { IntegrationTestBase, type TestContainer, type DatabaseTestConfig } from './shared/integration-test-base.js'; import type { Connector } from '../interface.js'; class MariaDBTestContainer implements TestContainer { constructor(private container: StartedMariaDbContainer) {} getConnectionUri(): string { return this.container.getConnectionUri(); } async stop(): Promise<void> { await this.container.stop(); } } class MariaDBIntegrationTest extends IntegrationTestBase<MariaDBTestContainer> { constructor() { const config: DatabaseTestConfig = { expectedSchemas: ['testdb', 'information_schema'], expectedTables: ['users', 'orders', 'products'], supportsStoredProcedures: false // Disabled due to container privilege restrictions }; super(config); } async createContainer(): Promise<MariaDBTestContainer> { const container = await new MariaDbContainer('mariadb:10.11') .withDatabase('testdb') .withRootPassword('rootpass') .start(); return new MariaDBTestContainer(container); } createConnector(): Connector { return new MariaDBConnector(); } createSSLTests(): void { describe('SSL Connection Tests', () => { it('should handle SSL mode disable connection', async () => { const baseUri = this.connectionString; const sslDisabledUri = baseUri.includes('?') ? `${baseUri}&sslmode=disable` : `${baseUri}?sslmode=disable`; const sslDisabledConnector = new MariaDBConnector(); // Should connect successfully with sslmode=disable await expect(sslDisabledConnector.connect(sslDisabledUri)).resolves.not.toThrow(); // Check SSL status - cipher should be empty when SSL is disabled const result = await sslDisabledConnector.executeSQL("SHOW SESSION STATUS LIKE 'Ssl_cipher'", {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].Variable_name).toBe('Ssl_cipher'); expect(result.rows[0].Value).toBe(''); await sslDisabledConnector.disconnect(); }); it('should handle SSL mode require connection', async () => { const baseUri = this.connectionString; const sslRequiredUri = baseUri.includes('?') ? `${baseUri}&sslmode=require` : `${baseUri}?sslmode=require`; const sslRequiredConnector = new MariaDBConnector(); // In test containers, SSL may not be supported, so we expect either success or SSL not supported error try { // Add our own timeout to prevent test hanging const connectionPromise = sslRequiredConnector.connect(sslRequiredUri); const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error('Connection timeout')), 3000) ); await Promise.race([connectionPromise, timeoutPromise]); // If connection succeeds, check SSL status - cipher should be non-empty when SSL is enabled const result = await sslRequiredConnector.executeSQL("SHOW SESSION STATUS LIKE 'Ssl_cipher'", {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].Variable_name).toBe('Ssl_cipher'); expect(result.rows[0].Value).not.toBe(''); expect(result.rows[0].Value).toBeTruthy(); await sslRequiredConnector.disconnect(); } catch (error) { // If SSL is not supported by the test container, that's expected expect(error instanceof Error).toBe(true); expect((error as Error).message).toMatch(/SSL|does not support SSL|timeout|ETIMEDOUT|Connection timeout/); } }); }); } async setupTestData(connector: Connector): Promise<void> { // Create users table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS users ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100) NOT NULL, email VARCHAR(100) UNIQUE NOT NULL, age INT ) `, {}); // Create orders table await connector.executeSQL(` CREATE TABLE IF NOT EXISTS orders ( id INT AUTO_INCREMENT PRIMARY KEY, user_id INT, total DECIMAL(10,2), created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users(id) ) `, {}); // Create products table in main database await connector.executeSQL(` CREATE TABLE IF NOT EXISTS products ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(100) NOT NULL, price DECIMAL(10,2) ) `, {}); // Insert test data await connector.executeSQL(` INSERT IGNORE INTO users (name, email, age) VALUES ('John Doe', '[email protected]', 30), ('Jane Smith', '[email protected]', 25), ('Bob Johnson', '[email protected]', 35) `, {}); await connector.executeSQL(` INSERT IGNORE INTO orders (user_id, total) VALUES (1, 99.99), (1, 149.50), (2, 75.25) `, {}); await connector.executeSQL(` INSERT IGNORE INTO products (name, price) VALUES ('Widget A', 19.99), ('Widget B', 29.99) `, {}); // Note: Stored procedures/functions are skipped in tests due to container privilege restrictions } } // Create the test suite const mariadbTest = new MariaDBIntegrationTest(); describe('MariaDB Connector Integration Tests', () => { beforeAll(async () => { await mariadbTest.setup(); }, 120000); afterAll(async () => { await mariadbTest.cleanup(); }); // Include all common tests mariadbTest.createConnectionTests(); mariadbTest.createSchemaTests(); mariadbTest.createTableTests(); mariadbTest.createSQLExecutionTests(); if (mariadbTest.config.supportsStoredProcedures) { mariadbTest.createStoredProcedureTests(); } mariadbTest.createErrorHandlingTests(); mariadbTest.createSSLTests(); describe('MariaDB-specific Features', () => { it('should execute multiple statements with native support', async () => { // First insert the test data await mariadbTest.connector.executeSQL(` INSERT INTO users (name, email, age) VALUES ('Multi User 1', '[email protected]', 30); INSERT INTO users (name, email, age) VALUES ('Multi User 2', '[email protected]', 35); `, {}); // Then check the count const result = await mariadbTest.connector.executeSQL( "SELECT COUNT(*) as total FROM users WHERE email LIKE 'multi%'", {} ); expect(result.rows).toHaveLength(1); expect(Number(result.rows[0].total)).toBe(2); }); it('should handle MariaDB-specific data types', async () => { await mariadbTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS mariadb_types_test ( id INT AUTO_INCREMENT PRIMARY KEY, json_data JSON, timestamp_val TIMESTAMP DEFAULT CURRENT_TIMESTAMP, enum_val ENUM('small', 'medium', 'large') DEFAULT 'medium', bit_val BIT(8) DEFAULT b'00000001' ) `, {}); await mariadbTest.connector.executeSQL(` INSERT INTO mariadb_types_test (json_data, enum_val, bit_val) VALUES ('{"key": "value"}', 'large', b'11110000') `, {}); // Use a different approach to get the inserted row const result = await mariadbTest.connector.executeSQL( "SELECT * FROM mariadb_types_test WHERE enum_val = 'large' ORDER BY id DESC LIMIT 1", {} ); expect(result.rows).toHaveLength(1); expect(result.rows[0].enum_val).toBe('large'); expect(result.rows[0].json_data).toBeDefined(); expect(result.rows[0].bit_val).toBeDefined(); }); it('should handle MariaDB auto-increment properly', async () => { const insertResult = await mariadbTest.connector.executeSQL( "INSERT INTO users (name, email, age) VALUES ('Auto Inc Test', '[email protected]', 40)", {} ); expect(insertResult).toBeDefined(); const selectResult = await mariadbTest.connector.executeSQL( 'SELECT LAST_INSERT_ID() as last_id', {} ); expect(selectResult.rows).toHaveLength(1); expect(Number(selectResult.rows[0].last_id)).toBeGreaterThan(0); }); it('should work with MariaDB-specific functions', async () => { const result = await mariadbTest.connector.executeSQL(` SELECT VERSION() as mariadb_version, DATABASE() as current_db, USER() as current_user_info, NOW() as timestamp_val `, {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].mariadb_version).toContain('MariaDB'); expect(result.rows[0].current_db).toBe('testdb'); expect(result.rows[0].current_user_info).toBeDefined(); expect(result.rows[0].timestamp_val).toBeDefined(); }); it('should handle MariaDB transactions correctly', async () => { // Test explicit transaction await mariadbTest.connector.executeSQL(` START TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Transaction Test 1', '[email protected]', 45); INSERT INTO users (name, email, age) VALUES ('Transaction Test 2', '[email protected]', 50); COMMIT; `, {}); const result = await mariadbTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email LIKE 'trans%@example.com'", {} ); expect(Number(result.rows[0].count)).toBe(2); }); it('should handle MariaDB rollback correctly', async () => { // Get initial count const beforeResult = await mariadbTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email = '[email protected]'", {} ); const beforeCount = Number(beforeResult.rows[0].count); // Test rollback await mariadbTest.connector.executeSQL(` START TRANSACTION; INSERT INTO users (name, email, age) VALUES ('Rollback Test', '[email protected]', 55); ROLLBACK; `, {}); const afterResult = await mariadbTest.connector.executeSQL( "SELECT COUNT(*) as count FROM users WHERE email = '[email protected]'", {} ); const afterCount = Number(afterResult.rows[0].count); expect(afterCount).toBe(beforeCount); }); it('should handle MariaDB-specific storage engines', async () => { await mariadbTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS engine_test ( id INT AUTO_INCREMENT PRIMARY KEY, data VARCHAR(100) ) ENGINE=InnoDB `, {}); await mariadbTest.connector.executeSQL(` INSERT INTO engine_test (data) VALUES ('InnoDB test data') `, {}); const result = await mariadbTest.connector.executeSQL(` SELECT TABLE_NAME, ENGINE FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 'engine_test' `, {}); expect(result.rows).toHaveLength(1); expect(result.rows[0].ENGINE).toBe('InnoDB'); }); it('should handle MariaDB virtual and computed columns', async () => { await mariadbTest.connector.executeSQL(` CREATE TABLE IF NOT EXISTS computed_test ( id INT AUTO_INCREMENT PRIMARY KEY, first_name VARCHAR(50), last_name VARCHAR(50), full_name VARCHAR(101) AS (CONCAT(first_name, ' ', last_name)) VIRTUAL ) `, {}); await mariadbTest.connector.executeSQL(` INSERT INTO computed_test (first_name, last_name) VALUES ('John', 'Doe'), ('Jane', 'Smith') `, {}); const result = await mariadbTest.connector.executeSQL(` SELECT first_name, last_name, full_name FROM computed_test ORDER BY id `, {}); expect(result.rows).toHaveLength(2); expect(result.rows[0].full_name).toBe('John Doe'); expect(result.rows[1].full_name).toBe('Jane Smith'); }); it('should handle MariaDB sequence functionality', async () => { // MariaDB supports sequences similar to PostgreSQL await mariadbTest.connector.executeSQL(` CREATE SEQUENCE IF NOT EXISTS test_seq START WITH 100 INCREMENT BY 5 MAXVALUE 1000 `, {}); const result = await mariadbTest.connector.executeSQL(` SELECT NEXT VALUE FOR test_seq as next_val1, NEXT VALUE FOR test_seq as next_val2 `, {}); expect(result.rows).toHaveLength(1); expect(Number(result.rows[0].next_val1)).toBe(100); expect(Number(result.rows[0].next_val2)).toBe(105); }); it('should respect maxRows limit for SELECT queries', async () => { // Test basic SELECT with maxRows limit const result = await mariadbTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', { maxRows: 2 } ); expect(result.rows).toHaveLength(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[1]).toHaveProperty('name'); }); it('should respect existing LIMIT clause when lower than maxRows', async () => { // Test when existing LIMIT is lower than maxRows const result = await mariadbTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 1', { maxRows: 3 } ); expect(result.rows).toHaveLength(1); expect(result.rows[0]).toHaveProperty('name'); }); it('should use maxRows when existing LIMIT is higher', async () => { // Test when existing LIMIT is higher than maxRows const result = await mariadbTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id LIMIT 10', { maxRows: 2 } ); expect(result.rows).toHaveLength(2); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[1]).toHaveProperty('name'); }); it('should not affect non-SELECT queries', async () => { // Test that maxRows doesn't affect INSERT/UPDATE/DELETE const insertResult = await mariadbTest.connector.executeSQL( "INSERT INTO users (name, email, age) VALUES ('MaxRows Test', '[email protected]', 25)", { maxRows: 1 } ); expect(insertResult.rows).toHaveLength(0); // INSERTs don't return rows by default // Verify the insert worked const selectResult = await mariadbTest.connector.executeSQL( "SELECT * FROM users WHERE email = '[email protected]'", {} ); expect(selectResult.rows).toHaveLength(1); expect(selectResult.rows[0].name).toBe('MaxRows Test'); }); it('should handle maxRows with complex queries', async () => { // Test maxRows with JOIN queries const result = await mariadbTest.connector.executeSQL(` SELECT u.name, o.total FROM users u JOIN orders o ON u.id = o.user_id ORDER BY o.total DESC `, { maxRows: 2 }); expect(result.rows.length).toBeLessThanOrEqual(2); expect(result.rows.length).toBeGreaterThan(0); expect(result.rows[0]).toHaveProperty('name'); expect(result.rows[0]).toHaveProperty('total'); }); it('should handle maxRows with multiple SELECT statements', async () => { // Test maxRows with multiple SELECT statements only const result = await mariadbTest.connector.executeSQL(` SELECT name FROM users WHERE age > 20 ORDER BY name LIMIT 10; SELECT name FROM users WHERE age > 25 ORDER BY name LIMIT 10; `, { maxRows: 1 }); // Should return only 1 row from each SELECT statement (due to maxRows limit) // MariaDB multi-statement may return more complex results, so we check that maxRows was applied expect(result.rows.length).toBeGreaterThan(0); expect(result.rows.length).toBeLessThanOrEqual(2); // At most 1 from each SELECT if (result.rows.length > 0) { expect(result.rows[0]).toHaveProperty('name'); } }); it('should ignore maxRows when not specified', async () => { // Test without maxRows - should return all rows const result = await mariadbTest.connector.executeSQL( 'SELECT * FROM users ORDER BY id', {} ); // Should return all users (at least the original 3 plus any added in previous tests) expect(result.rows.length).toBeGreaterThanOrEqual(3); }); }); }); ``` -------------------------------------------------------------------------------- /src/connectors/mysql/index.ts: -------------------------------------------------------------------------------- ```typescript import mysql from "mysql2/promise"; import { Connector, ConnectorType, ConnectorRegistry, DSNParser, SQLResult, TableColumn, TableIndex, StoredProcedure, ExecuteOptions, } from "../interface.js"; import { SafeURL } from "../../utils/safe-url.js"; import { obfuscateDSNPassword } from "../../utils/dsn-obfuscate.js"; import { SQLRowLimiter } from "../../utils/sql-row-limiter.js"; /** * MySQL DSN Parser * Handles DSN strings like: mysql://user:password@localhost:3306/dbname?sslmode=require * Supported SSL modes: * - sslmode=disable: No SSL connection * - sslmode=require: SSL connection without certificate verification * - Any other value: Standard SSL connection with certificate verification */ class MySQLDSNParser implements DSNParser { async parse(dsn: string): Promise<mysql.ConnectionOptions> { // Basic validation if (!this.isValidDSN(dsn)) { const obfuscatedDSN = obfuscateDSNPassword(dsn); const expectedFormat = this.getSampleDSN(); throw new Error( `Invalid MySQL DSN format.\nProvided: ${obfuscatedDSN}\nExpected: ${expectedFormat}` ); } try { // Use the SafeURL helper instead of the built-in URL // This will handle special characters in passwords, etc. const url = new SafeURL(dsn); const config: mysql.ConnectionOptions = { host: url.hostname, port: url.port ? parseInt(url.port) : 3306, database: url.pathname ? url.pathname.substring(1) : '', // Remove leading '/' if exists user: url.username, password: url.password, multipleStatements: true, // Enable native multi-statement support }; // Handle query parameters url.forEachSearchParam((value, key) => { if (key === "sslmode") { if (value === "disable") { config.ssl = undefined; } else if (value === "require") { config.ssl = { rejectUnauthorized: false }; } else { config.ssl = {}; } } // Add other parameters as needed }); return config; } catch (error) { throw new Error( `Failed to parse MySQL DSN: ${error instanceof Error ? error.message : String(error)}` ); } } getSampleDSN(): string { return "mysql://root:password@localhost:3306/mysql?sslmode=require"; } isValidDSN(dsn: string): boolean { try { return dsn.startsWith('mysql://'); } catch (error) { return false; } } } /** * MySQL Connector Implementation */ export class MySQLConnector implements Connector { id: ConnectorType = "mysql"; name = "MySQL"; dsnParser = new MySQLDSNParser(); private pool: mysql.Pool | null = null; async connect(dsn: string): Promise<void> { try { const config = await this.dsnParser.parse(dsn); this.pool = mysql.createPool(config); // Test the connection const [rows] = await this.pool.query("SELECT 1"); console.error("Successfully connected to MySQL database"); } catch (err) { console.error("Failed to connect to MySQL database:", err); throw err; } } async disconnect(): Promise<void> { if (this.pool) { await this.pool.end(); this.pool = null; } } async getSchemas(): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, schemas are equivalent to databases const [rows] = (await this.pool.query(` SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA ORDER BY SCHEMA_NAME `)) as [any[], any]; return rows.map((row) => row.SCHEMA_NAME); } catch (error) { console.error("Error getting schemas:", error); throw error; } } async getTables(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, if no schema is provided, use the current active database (DATABASE()) // MySQL uses the terms 'database' and 'schema' interchangeably // The DATABASE() function returns the current database context const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema] : []; // Get all tables from the specified schema or current database const [rows] = (await this.pool.query( ` SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES ${schemaClause} ORDER BY TABLE_NAME `, queryParams )) as [any[], any]; return rows.map((row) => row.TABLE_NAME); } catch (error) { console.error("Error getting tables:", error); throw error; } } async tableExists(tableName: string, schema?: string): Promise<boolean> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, if no schema is provided, use the current active database // DATABASE() function returns the name of the current database const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; const [rows] = (await this.pool.query( ` SELECT COUNT(*) AS COUNT FROM INFORMATION_SCHEMA.TABLES ${schemaClause} AND TABLE_NAME = ? `, queryParams )) as [any[], any]; return rows[0].COUNT > 0; } catch (error) { console.error("Error checking if table exists:", error); throw error; } } async getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, if no schema is provided, use the current active database const schemaClause = schema ? "TABLE_SCHEMA = ?" : "TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; // Get information about indexes const [indexRows] = (await this.pool.query( ` SELECT INDEX_NAME, COLUMN_NAME, NON_UNIQUE, SEQ_IN_INDEX FROM INFORMATION_SCHEMA.STATISTICS WHERE ${schemaClause} AND TABLE_NAME = ? ORDER BY INDEX_NAME, SEQ_IN_INDEX `, queryParams )) as [any[], any]; // Process the results to group columns by index const indexMap = new Map< string, { columns: string[]; is_unique: boolean; is_primary: boolean; } >(); for (const row of indexRows) { const indexName = row.INDEX_NAME; const columnName = row.COLUMN_NAME; const isUnique = row.NON_UNIQUE === 0; // In MySQL, NON_UNIQUE=0 means the index is unique const isPrimary = indexName === "PRIMARY"; if (!indexMap.has(indexName)) { indexMap.set(indexName, { columns: [], is_unique: isUnique, is_primary: isPrimary, }); } const indexInfo = indexMap.get(indexName)!; indexInfo.columns.push(columnName); } // Convert the map to the expected TableIndex format const results: TableIndex[] = []; indexMap.forEach((indexInfo, indexName) => { results.push({ index_name: indexName, column_names: indexInfo.columns, is_unique: indexInfo.is_unique, is_primary: indexInfo.is_primary, }); }); return results; } catch (error) { console.error("Error getting table indexes:", error); throw error; } } async getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, schema is synonymous with database // If no schema is provided, use the current database context via DATABASE() function // This means tables will be retrieved from whatever database the connection is currently using const schemaClause = schema ? "WHERE TABLE_SCHEMA = ?" : "WHERE TABLE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, tableName] : [tableName]; // Get table columns const [rows] = (await this.pool.query( ` SELECT COLUMN_NAME as column_name, DATA_TYPE as data_type, IS_NULLABLE as is_nullable, COLUMN_DEFAULT as column_default FROM INFORMATION_SCHEMA.COLUMNS ${schemaClause} AND TABLE_NAME = ? ORDER BY ORDINAL_POSITION `, queryParams )) as [any[], any]; return rows; } catch (error) { console.error("Error getting table schema:", error); throw error; } } async getStoredProcedures(schema?: string): Promise<string[]> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, if no schema is provided, use the current database context const schemaClause = schema ? "WHERE ROUTINE_SCHEMA = ?" : "WHERE ROUTINE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema] : []; // Get all stored procedures and functions const [rows] = (await this.pool.query( ` SELECT ROUTINE_NAME FROM INFORMATION_SCHEMA.ROUTINES ${schemaClause} ORDER BY ROUTINE_NAME `, queryParams )) as [any[], any]; return rows.map((row) => row.ROUTINE_NAME); } catch (error) { console.error("Error getting stored procedures:", error); throw error; } } async getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure> { if (!this.pool) { throw new Error("Not connected to database"); } try { // In MySQL, if no schema is provided, use the current database context const schemaClause = schema ? "WHERE r.ROUTINE_SCHEMA = ?" : "WHERE r.ROUTINE_SCHEMA = DATABASE()"; const queryParams = schema ? [schema, procedureName] : [procedureName]; // Get details of the stored procedure const [rows] = (await this.pool.query( ` SELECT r.ROUTINE_NAME AS procedure_name, CASE WHEN r.ROUTINE_TYPE = 'PROCEDURE' THEN 'procedure' ELSE 'function' END AS procedure_type, LOWER(r.ROUTINE_TYPE) AS routine_type, r.ROUTINE_DEFINITION, r.DTD_IDENTIFIER AS return_type, ( SELECT GROUP_CONCAT( CONCAT(p.PARAMETER_NAME, ' ', p.PARAMETER_MODE, ' ', p.DATA_TYPE) ORDER BY p.ORDINAL_POSITION SEPARATOR ', ' ) FROM INFORMATION_SCHEMA.PARAMETERS p WHERE p.SPECIFIC_SCHEMA = r.ROUTINE_SCHEMA AND p.SPECIFIC_NAME = r.ROUTINE_NAME AND p.PARAMETER_NAME IS NOT NULL ) AS parameter_list FROM INFORMATION_SCHEMA.ROUTINES r ${schemaClause} AND r.ROUTINE_NAME = ? `, queryParams )) as [any[], any]; if (rows.length === 0) { const schemaName = schema || "current schema"; throw new Error(`Stored procedure '${procedureName}' not found in ${schemaName}`); } const procedure = rows[0]; // If ROUTINE_DEFINITION is NULL, try to get the procedure body from mysql.proc let definition = procedure.ROUTINE_DEFINITION; try { const schemaValue = schema || (await this.getCurrentSchema()); // For full definition - different approaches based on type if (procedure.procedure_type === "procedure") { // Try to get the definition from SHOW CREATE PROCEDURE try { const [defRows] = (await this.pool.query(` SHOW CREATE PROCEDURE ${schemaValue}.${procedureName} `)) as [any[], any]; if (defRows && defRows.length > 0) { definition = defRows[0]["Create Procedure"]; } } catch (err) { console.error(`Error getting procedure definition with SHOW CREATE: ${err}`); } } else { // Try to get the definition for functions try { const [defRows] = (await this.pool.query(` SHOW CREATE FUNCTION ${schemaValue}.${procedureName} `)) as [any[], any]; if (defRows && defRows.length > 0) { definition = defRows[0]["Create Function"]; } } catch (innerErr) { console.error(`Error getting function definition with SHOW CREATE: ${innerErr}`); } } // Last attempt - try to get from information_schema.routines if not found yet if (!definition) { const [bodyRows] = (await this.pool.query( ` SELECT ROUTINE_DEFINITION, ROUTINE_BODY FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_SCHEMA = ? AND ROUTINE_NAME = ? `, [schemaValue, procedureName] )) as [any[], any]; if (bodyRows && bodyRows.length > 0) { if (bodyRows[0].ROUTINE_DEFINITION) { definition = bodyRows[0].ROUTINE_DEFINITION; } else if (bodyRows[0].ROUTINE_BODY) { definition = bodyRows[0].ROUTINE_BODY; } } } } catch (error) { // Ignore errors when getting definition - it's optional console.error(`Error getting procedure/function details: ${error}`); } return { procedure_name: procedure.procedure_name, procedure_type: procedure.procedure_type, language: "sql", // MySQL procedures are generally in SQL parameter_list: procedure.parameter_list || "", return_type: procedure.routine_type === "function" ? procedure.return_type : undefined, definition: definition || undefined, }; } catch (error) { console.error("Error getting stored procedure detail:", error); throw error; } } // Helper method to get current schema (database) name private async getCurrentSchema(): Promise<string> { const [rows] = (await this.pool!.query("SELECT DATABASE() AS DB")) as [any[], any]; return rows[0].DB; } async executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult> { if (!this.pool) { throw new Error("Not connected to database"); } try { // Apply maxRows limit to SELECT queries if specified let processedSQL = sql; if (options.maxRows) { // Handle multi-statement SQL by processing each statement individually const statements = sql.split(';') .map(statement => statement.trim()) .filter(statement => statement.length > 0); const processedStatements = statements.map(statement => SQLRowLimiter.applyMaxRows(statement, options.maxRows) ); processedSQL = processedStatements.join('; '); if (sql.trim().endsWith(';')) { processedSQL += ';'; } } // Use pool.query with multipleStatements: true support const results = await this.pool.query(processedSQL) as any; // MySQL2 with multipleStatements returns: // - Single statement: [rows, fields] // - Multiple statements: [array_of_results, fields] where array_of_results contains [rows, fields] for each statement const [firstResult] = results; // Check if this is a multi-statement result if (Array.isArray(firstResult) && firstResult.length > 0 && Array.isArray(firstResult[0])) { // Multiple statements - firstResult is an array of results let allRows: any[] = []; for (const result of firstResult) { // Each result is either a ResultSetHeader object (for INSERT/UPDATE/DELETE) // or an array of rows (for SELECT) if (Array.isArray(result)) { // This is a rows array from a SELECT query allRows.push(...result); } // Skip non-array results (ResultSetHeader objects) } return { rows: allRows }; } else { // Single statement - firstResult is the rows array directly return { rows: Array.isArray(firstResult) ? firstResult : [] }; } } catch (error) { console.error("Error executing query:", error); throw error; } } } // Create and register the connector const mysqlConnector = new MySQLConnector(); ConnectorRegistry.register(mysqlConnector); ```