#
tokens: 49061/50000 56/77 files (page 1/7)
lines: off (toggle) GitHub
raw markdown copy
This is page 1 of 7. Use http://codebase.md/bytebase/dbhub?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .dockerignore
├── .env.example
├── .github
│   ├── CODEOWNERS
│   ├── copilot-instructions.md
│   └── workflows
│       ├── docker-publish.yml
│       ├── npm-publish.yml
│       └── run-tests.yml
├── .gitignore
├── .husky
│   └── pre-commit
├── .npmrc
├── .prettierrc.json
├── bun.lock
├── CLAUDE.md
├── Dockerfile
├── LICENSE
├── llms-full.txt
├── package.json
├── pnpm-lock.yaml
├── pnpm-workspace.yaml
├── README.md
├── resources
│   ├── employee-sqlite
│   │   ├── employee.sql
│   │   ├── load_department.sql
│   │   ├── load_dept_emp.sql
│   │   ├── load_dept_manager.sql
│   │   ├── load_employee.sql
│   │   ├── load_salary1.sql
│   │   ├── load_title.sql
│   │   ├── object.sql
│   │   ├── show_elapsed.sql
│   │   └── test_employee_md5.sql
│   └── images
│       ├── claude-desktop.webp
│       ├── cursor.webp
│       ├── logo-full.svg
│       ├── logo-full.webp
│       ├── logo-icon-only.svg
│       ├── logo-text-only.svg
│       └── mcp-inspector.webp
├── scripts
│   └── setup-husky.sh
├── src
│   ├── __tests__
│   │   └── json-rpc-integration.test.ts
│   ├── config
│   │   ├── __tests__
│   │   │   ├── env.test.ts
│   │   │   └── ssh-config-integration.test.ts
│   │   ├── demo-loader.ts
│   │   └── env.ts
│   ├── connectors
│   │   ├── __tests__
│   │   │   ├── mariadb.integration.test.ts
│   │   │   ├── mysql.integration.test.ts
│   │   │   ├── postgres-ssh.integration.test.ts
│   │   │   ├── postgres.integration.test.ts
│   │   │   ├── shared
│   │   │   │   └── integration-test-base.ts
│   │   │   ├── sqlite.integration.test.ts
│   │   │   └── sqlserver.integration.test.ts
│   │   ├── interface.ts
│   │   ├── manager.ts
│   │   ├── mariadb
│   │   │   └── index.ts
│   │   ├── mysql
│   │   │   └── index.ts
│   │   ├── postgres
│   │   │   └── index.ts
│   │   ├── sqlite
│   │   │   └── index.ts
│   │   └── sqlserver
│   │       └── index.ts
│   ├── index.ts
│   ├── prompts
│   │   ├── db-explainer.ts
│   │   ├── index.ts
│   │   └── sql-generator.ts
│   ├── resources
│   │   ├── index.ts
│   │   ├── indexes.ts
│   │   ├── procedures.ts
│   │   ├── schema.ts
│   │   ├── schemas.ts
│   │   └── tables.ts
│   ├── server.ts
│   ├── tools
│   │   ├── __tests__
│   │   │   └── execute-sql.test.ts
│   │   ├── execute-sql.ts
│   │   └── index.ts
│   ├── types
│   │   ├── sql.ts
│   │   └── ssh.ts
│   └── utils
│       ├── __tests__
│       │   ├── safe-url.test.ts
│       │   ├── ssh-config-parser.test.ts
│       │   └── ssh-tunnel.test.ts
│       ├── allowed-keywords.ts
│       ├── dsn-obfuscate.ts
│       ├── response-formatter.ts
│       ├── safe-url.ts
│       ├── sql-row-limiter.ts
│       ├── ssh-config-parser.ts
│       └── ssh-tunnel.ts
├── tsconfig.json
├── tsup.config.ts
└── vitest.config.ts
```

# Files

--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------

```
# Skip husky install when installing in CI or production environments
ignore-scripts=false
engine-strict=true
hoist=true
enable-pre-post-scripts=true
auto-install-peers=true
```

--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------

```json
{
  "printWidth": 100,
  "tabWidth": 2,
  "useTabs": false,
  "semi": true,
  "singleQuote": false,
  "trailingComma": "es5",
  "bracketSpacing": true,
  "arrowParens": "always"
}

```

--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------

```
# Git
.git
.gitignore
.github

# Node.js
node_modules
npm-debug.log
yarn-debug.log
yarn-error.log
pnpm-debug.log

# Build output
dist
node_modules

# Environment
.env
.env.*
!.env.example

# Editor directories and files
.vscode
.idea
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db

# Logs
logs
*.log

# Docker
Dockerfile
.dockerignore

# Project specific
*.md
!README.md
LICENSE
CLAUDE.md
```

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
# Dependencies
node_modules/
.pnp/
.pnp.js
vendor/
jspm_packages/
bower_components/

# Build outputs
build/
dist/
out/
*.min.js
*.min.css

# Environment & config
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.venv
env/
venv/
ENV/
config.local.js
*.local.json

# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*

# Cache and temp
.npm
.eslintcache
.stylelintcache
.cache/
.parcel-cache/
.next/
.nuxt/
.vuepress/dist
.serverless/
.fusebox/
.dynamodb/
.grunt
.temp
.tmp
.sass-cache/
__pycache__/
*.py[cod]
*$py.class
.pytest_cache/
.coverage
htmlcov/
coverage/
.nyc_output/

# OS files
.DS_Store
Thumbs.db
ehthumbs.db
Desktop.ini
$RECYCLE.BIN/
*.lnk

# Editor directories and files
.idea/
.vscode/
*.swp
*.swo
*~
.*.sw[a-p]
*.sublime-workspace
*.sublime-project

# Compiled binaries
*.com
*.class
*.dll
*.exe
*.o
*.so 
```

--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------

```
# DBHub Configuration

# Method 1: Connection String (DSN)
# Use one of these DSN formats:
# DSN=postgres://user:password@localhost:5432/dbname
# DSN=sqlite:///path/to/database.db
# DSN=sqlite::memory:
# DSN=sqlserver://user:password@localhost:1433/dbname
# DSN=mysql://user:password@localhost:3306/dbname
DSN=

# Method 2: Individual Database Parameters
# Use this method if your password contains special characters like @, :, /, #, etc.
# that would break URL parsing in the DSN format above
# DB_TYPE=postgres
# DB_HOST=localhost
# DB_PORT=5432
# DB_USER=postgres
# DB_PASSWORD=my@password:with/special#chars
# DB_NAME=mydatabase

# Supported DB_TYPE values: postgres, mysql, mariadb, sqlserver, sqlite
# DB_PORT is optional - defaults to standard port for each database type
# For SQLite: only DB_TYPE and DB_NAME are required (DB_NAME is the file path)

# Transport configuration
# --transport=stdio (default) for stdio transport
# --transport=sse for SSE transport with HTTP server
TRANSPORT=stdio

# Server port for SSE transport (default: 3000)
PORT=3000

# SSH Tunnel Configuration (optional)
# Use these settings to connect through an SSH bastion host
# SSH_HOST=bastion.example.com
# SSH_PORT=22
# SSH_USER=ubuntu
# SSH_PASSWORD=mypassword
# SSH_KEY=~/.ssh/id_rsa
# SSH_PASSPHRASE=mykeypassphrase

# Read-only mode (optional)
# Set to true to restrict SQL execution to read-only operations
# READONLY=false
```

--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------

```markdown
> [!NOTE]  
> Brought to you by [Bytebase](https://www.bytebase.com/), open-source database DevSecOps platform.

<p align="center">
<a href="https://dbhub.ai/" target="_blank">
<picture>
  <img src="https://raw.githubusercontent.com/bytebase/dbhub/main/resources/images/logo-full.webp" width="50%">
</picture>
</a>
</p>

<p align="center">
  <a href="https://discord.gg/BjEkZpsJzn"><img src="https://img.shields.io/badge/%20-Hang%20out%20on%20Discord-5865F2?style=for-the-badge&logo=discord&labelColor=EEEEEE" alt="Join our Discord" height="32" /></a>
</p>

<p>
Add to Cursor by copying the below link to browser

```text
cursor://anysphere.cursor-deeplink/mcp/install?name=dbhub&config=eyJjb21tYW5kIjoibnB4IEBieXRlYmFzZS9kYmh1YiIsImVudiI6eyJUUkFOU1BPUlQiOiJzdGRpbyIsIkRTTiI6InBvc3RncmVzOi8vdXNlcjpwYXNzd29yZEBsb2NhbGhvc3Q6NTQzMi9kYm5hbWU%2Fc3NsbW9kZT1kaXNhYmxlIiwiUkVBRE9OTFkiOiJ0cnVlIn19
```

</p>

DBHub is a universal database gateway implementing the Model Context Protocol (MCP) server interface. This gateway allows MCP-compatible clients to connect to and explore different databases.

```bash
 +------------------+    +--------------+    +------------------+
 |                  |    |              |    |                  |
 |                  |    |              |    |                  |
 |  Claude Desktop  +--->+              +--->+    PostgreSQL    |
 |                  |    |              |    |                  |
 |  Claude Code     +--->+              +--->+    SQL Server    |
 |                  |    |              |    |                  |
 |  Cursor          +--->+    DBHub     +--->+    SQLite        |
 |                  |    |              |    |                  |
 |  Other Clients   +--->+              +--->+    MySQL         |
 |                  |    |              |    |                  |
 |                  |    |              +--->+    MariaDB       |
 |                  |    |              |    |                  |
 |                  |    |              |    |                  |
 +------------------+    +--------------+    +------------------+
      MCP Clients           MCP Server             Databases
```

## Supported Matrix

### Database Resources

| Resource Name               | URI Format                                             | PostgreSQL | MySQL | MariaDB | SQL Server | SQLite |
| --------------------------- | ------------------------------------------------------ | :--------: | :---: | :-----: | :--------: | :----: |
| schemas                     | `db://schemas`                                         |     ✅     |  ✅   |   ✅    |     ✅     |   ✅   |
| tables_in_schema            | `db://schemas/{schemaName}/tables`                     |     ✅     |  ✅   |   ✅    |     ✅     |   ✅   |
| table_structure_in_schema   | `db://schemas/{schemaName}/tables/{tableName}`         |     ✅     |  ✅   |   ✅    |     ✅     |   ✅   |
| indexes_in_table            | `db://schemas/{schemaName}/tables/{tableName}/indexes` |     ✅     |  ✅   |   ✅    |     ✅     |   ✅   |
| procedures_in_schema        | `db://schemas/{schemaName}/procedures`                 |     ✅     |  ✅   |   ✅    |     ✅     |   ❌   |
| procedure_details_in_schema | `db://schemas/{schemaName}/procedures/{procedureName}` |     ✅     |  ✅   |   ✅    |     ✅     |   ❌   |

### Database Tools

| Tool        | Command Name  | Description                                                         | PostgreSQL | MySQL | MariaDB | SQL Server | SQLite |
| ----------- | ------------- | ------------------------------------------------------------------- | :--------: | :---: | :-----: | :--------: | ------ |
| Execute SQL | `execute_sql` | Execute single or multiple SQL statements (separated by semicolons) |     ✅     |  ✅   |   ✅    |     ✅     | ✅     |

### Prompt Capabilities

| Prompt              | Command Name   | PostgreSQL | MySQL | MariaDB | SQL Server | SQLite |
| ------------------- | -------------- | :--------: | :---: | :-----: | :--------: | ------ |
| Generate SQL        | `generate_sql` |     ✅     |  ✅   |   ✅    |     ✅     | ✅     |
| Explain DB Elements | `explain_db`   |     ✅     |  ✅   |   ✅    |     ✅     | ✅     |

## Installation

### Docker

```bash
# PostgreSQL example
docker run --rm --init \
   --name dbhub \
   --publish 8080:8080 \
   bytebase/dbhub \
   --transport http \
   --port 8080 \
   --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"
```

```bash
# Demo mode with sqlite sample employee database
docker run --rm --init \
   --name dbhub \
   --publish 8080:8080 \
   bytebase/dbhub \
   --transport http \
   --port 8080 \
   --demo
```

**Docker Compose Setup:**

If you're using Docker Compose for development, add DBHub to your `docker-compose.yml`:

```yaml
dbhub:
  image: bytebase/dbhub:latest
  container_name: dbhub
  ports:
    - "8080:8080"
  environment:
    - DBHUB_LOG_LEVEL=info
  command:
    - --transport
    - http
    - --port
    - "8080"
    - --dsn
    - "postgres://user:password@database:5432/dbname"
  depends_on:
    - database
```

### NPM

```bash
# PostgreSQL example
npx @bytebase/dbhub --transport http --port 8080 --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"

# Demo mode with sqlite sample employee database
npx @bytebase/dbhub --transport http --port 8080 --demo
```

```bash
# Demo mode with sample employee database
npx @bytebase/dbhub --transport http --port 8080 --demo
```

> Note: The demo mode includes a bundled SQLite sample "employee" database with tables for employees, departments, salaries, and more.

### Claude Desktop

![claude-desktop](https://raw.githubusercontent.com/bytebase/dbhub/main/resources/images/claude-desktop.webp)

- Claude Desktop only supports `stdio` transport https://github.com/orgs/modelcontextprotocol/discussions/16

```json
// claude_desktop_config.json
{
  "mcpServers": {
    "dbhub-postgres-docker": {
      "command": "docker",
      "args": [
        "run",
        "-i",
        "--rm",
        "bytebase/dbhub",
        "--transport",
        "stdio",
        "--dsn",
        // Use host.docker.internal as the host if connecting to the local db
        "postgres://user:[email protected]:5432/dbname?sslmode=disable"
      ]
    },
    "dbhub-postgres-npx": {
      "command": "npx",
      "args": [
        "-y",
        "@bytebase/dbhub",
        "--transport",
        "stdio",
        "--dsn",
        "postgres://user:password@localhost:5432/dbname?sslmode=disable"
      ]
    },
    "dbhub-demo": {
      "command": "npx",
      "args": ["-y", "@bytebase/dbhub", "--transport", "stdio", "--demo"]
    }
  }
}
```

### Claude Code

Check https://docs.anthropic.com/en/docs/claude-code/mcp

### Cursor

<p>
Add to Cursor by copying the below link to browser

```text
cursor://anysphere.cursor-deeplink/mcp/install?name=dbhub&config=eyJjb21tYW5kIjoibnB4IEBieXRlYmFzZS9kYmh1YiIsImVudiI6eyJUUkFOU1BPUlQiOiJzdGRpbyIsIkRTTiI6InBvc3RncmVzOi8vdXNlcjpwYXNzd29yZEBsb2NhbGhvc3Q6NTQzMi9kYm5hbWU%2Fc3NsbW9kZT1kaXNhYmxlIiwiUkVBRE9OTFkiOiJ0cnVlIn19
```

</p>

![cursor](https://raw.githubusercontent.com/bytebase/dbhub/main/resources/images/cursor.webp)

- Cursor supports both `stdio` and `http`.
- Follow [Cursor MCP guide](https://docs.cursor.com/context/model-context-protocol) and make sure to use [Agent](https://docs.cursor.com/chat/agent) mode.

### VSCode + Copilot

Check https://code.visualstudio.com/docs/copilot/customization/mcp-servers

VSCode with GitHub Copilot can connect to DBHub via both `stdio` and `http` transports. This enables AI agents to interact with your development database through a secure interface.

- VSCode supports both `stdio` and `http` transports
- Configure MCP server in `.vscode/mcp.json`:

**Stdio Transport:**

```json
{
  "servers": {
    "dbhub": {
      "command": "npx",
      "args": [
        "-y",
        "@bytebase/dbhub",
        "--transport",
        "stdio",
        "--dsn",
        "postgres://user:password@localhost:5432/dbname"
      ]
    }
  },
  "inputs": []
}
```

**HTTP Transport:**

```json
{
  "servers": {
    "dbhub": {
      "url": "http://localhost:8080/message",
      "type": "http"
    }
  },
  "inputs": []
}
```

**Copilot Instructions:**

You can provide Copilot with context by creating `.github/copilot-instructions.md`:

```markdown
## Database Access

This project provides an MCP server (DBHub) for secure SQL access to the development database.

AI agents can execute SQL queries. In read-only mode (recommended for production):

- `SELECT * FROM users LIMIT 5;`
- `SHOW TABLES;`
- `DESCRIBE table_name;`

In read-write mode (development/testing):

- `INSERT INTO users (name, email) VALUES ('John', '[email protected]');`
- `UPDATE users SET status = 'active' WHERE id = 1;`
- `CREATE TABLE test_table (id INT PRIMARY KEY);`

Use `--readonly` flag to restrict to read-only operations for safety.
```

## Usage

### Read-only Mode

You can run DBHub in read-only mode, which restricts SQL query execution to read-only operations:

```bash
# Enable read-only mode
npx @bytebase/dbhub --readonly --dsn "postgres://user:password@localhost:5432/dbname"
```

In read-only mode, only [readonly SQL operations](https://github.com/bytebase/dbhub/blob/main/src/utils/allowed-keywords.ts) are allowed.

This provides an additional layer of security when connecting to production databases.

### Suffix Tool Names with ID

You can suffix tool names with a custom ID using the `--id` flag or `ID` environment variable. This is useful when running multiple DBHub instances (e.g., in Cursor) to allow the client to route queries to the correct database.

**Example configuration for multiple databases in Cursor:**

```json
{
  "mcpServers": {
    "dbhub-prod": {
      "command": "npx",
      "args": ["-y", "@bytebase/dbhub"],
      "env": {
        "ID": "prod",
        "DSN": "postgres://user:password@prod-host:5432/dbname"
      }
    },
    "dbhub-staging": {
      "command": "npx",
      "args": ["-y", "@bytebase/dbhub"],
      "env": {
        "ID": "staging",
        "DSN": "mysql://user:password@staging-host:3306/dbname"
      }
    }
  }
}
```

With this configuration:

- Production database tools: `execute_sql_prod`
- Staging database tools: `execute_sql_staging`

### Row Limiting

You can limit the number of rows returned from SELECT queries using the `--max-rows` parameter. This helps prevent accidentally retrieving too much data from large tables:

```bash
# Limit SELECT queries to return at most 1000 rows
npx @bytebase/dbhub --dsn "postgres://user:password@localhost:5432/dbname" --max-rows 1000
```

- Row limiting is only applied to SELECT statements, not INSERT/UPDATE/DELETE
- If your query already has a `LIMIT` or `TOP` clause, DBHub uses the smaller value

### SSL Connections

You can specify the SSL mode using the `sslmode` parameter in your DSN string:

| Database   | `sslmode=disable` | `sslmode=require` |   Default SSL Behavior   |
| ---------- | :---------------: | :---------------: | :----------------------: |
| PostgreSQL |        ✅         |        ✅         | Certificate verification |
| MySQL      |        ✅         |        ✅         | Certificate verification |
| MariaDB    |        ✅         |        ✅         | Certificate verification |
| SQL Server |        ✅         |        ✅         | Certificate verification |
| SQLite     |        ❌         |        ❌         |     N/A (file-based)     |

**SSL Mode Options:**

- `sslmode=disable`: All SSL/TLS encryption is turned off. Data is transmitted in plaintext.
- `sslmode=require`: Connection is encrypted, but the server's certificate is not verified. This provides protection against packet sniffing but not against man-in-the-middle attacks. You may use this for trusted self-signed CA.

Without specifying `sslmode`, most databases default to certificate verification, which provides the highest level of security.

Example usage:

```bash
# Disable SSL
postgres://user:password@localhost:5432/dbname?sslmode=disable

# Require SSL without certificate verification
postgres://user:password@localhost:5432/dbname?sslmode=require

# Standard SSL with certificate verification (default)
postgres://user:password@localhost:5432/dbname
```

### SSH Tunnel Support

DBHub supports connecting to databases through SSH tunnels, enabling secure access to databases in private networks or behind firewalls.

#### Using SSH Config File (Recommended)

DBHub can read SSH connection settings from your `~/.ssh/config` file. Simply use the host alias from your SSH config:

```bash
# If you have this in ~/.ssh/config:
# Host mybastion
#   HostName bastion.example.com
#   User ubuntu
#   IdentityFile ~/.ssh/id_rsa

npx @bytebase/dbhub \
  --dsn "postgres://dbuser:[email protected]:5432/mydb" \
  --ssh-host mybastion
```

DBHub will automatically use the settings from your SSH config, including hostname, user, port, and identity file. If no identity file is specified in the config, DBHub will try common default locations (`~/.ssh/id_rsa`, `~/.ssh/id_ed25519`, etc.).

#### SSH with Password Authentication

```bash
npx @bytebase/dbhub \
  --dsn "postgres://dbuser:[email protected]:5432/mydb" \
  --ssh-host bastion.example.com \
  --ssh-user ubuntu \
  --ssh-password mypassword
```

#### SSH with Private Key Authentication

```bash
npx @bytebase/dbhub \
  --dsn "postgres://dbuser:[email protected]:5432/mydb" \
  --ssh-host bastion.example.com \
  --ssh-user ubuntu \
  --ssh-key ~/.ssh/id_rsa
```

#### SSH with Private Key and Passphrase

```bash
npx @bytebase/dbhub \
  --dsn "postgres://dbuser:[email protected]:5432/mydb" \
  --ssh-host bastion.example.com \
  --ssh-port 2222 \
  --ssh-user ubuntu \
  --ssh-key ~/.ssh/id_rsa \
  --ssh-passphrase mykeypassphrase
```

#### Using Environment Variables

```bash
export SSH_HOST=bastion.example.com
export SSH_USER=ubuntu
export SSH_KEY=~/.ssh/id_rsa
npx @bytebase/dbhub --dsn "postgres://dbuser:[email protected]:5432/mydb"
```

**Note**: When using SSH tunnels, the database host in your DSN should be the hostname/IP as seen from the SSH server (bastion host), not from your local machine.

### Configure your database connection

You can use DBHub in demo mode with a sample employee database for testing:

```bash
npx @bytebase/dbhub  --demo
```

> [!WARNING]
> If your user/password contains special characters, you have two options:
>
> 1. Escape them in the DSN (e.g. `pass#word` should be escaped as `pass%23word`)
> 2. Use the individual database parameters method below (recommended)

For real databases, you can configure the database connection in two ways:

#### Method 1: Database Source Name (DSN)

- **Command line argument** (highest priority):

  ```bash
  npx @bytebase/dbhub  --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"
  ```

- **Environment variable** (second priority):

  ```bash
  export DSN="postgres://user:password@localhost:5432/dbname?sslmode=disable"
  npx @bytebase/dbhub
  ```

- **Environment file** (third priority):
  - For development: Create `.env.local` with your DSN
  - For production: Create `.env` with your DSN
  ```
  DSN=postgres://user:password@localhost:5432/dbname?sslmode=disable
  ```

#### Method 2: Individual Database Parameters

If your password contains special characters that would break URL parsing, use individual environment variables instead:

- **Environment variables**:

  ```bash
  export DB_TYPE=postgres
  export DB_HOST=localhost
  export DB_PORT=5432
  export DB_USER=myuser
  export DB_PASSWORD='my@complex:password/with#special&chars'
  export DB_NAME=mydatabase
  npx @bytebase/dbhub
  ```

- **Environment file**:
  ```
  DB_TYPE=postgres
  DB_HOST=localhost
  DB_PORT=5432
  DB_USER=myuser
  DB_PASSWORD=my@complex:password/with#special&chars
  DB_NAME=mydatabase
  ```

**Supported DB_TYPE values**: `postgres`, `mysql`, `mariadb`, `sqlserver`, `sqlite`

**Default ports** (when DB_PORT is omitted):

- PostgreSQL: `5432`
- MySQL/MariaDB: `3306`
- SQL Server: `1433`

**For SQLite**: Only `DB_TYPE=sqlite` and `DB_NAME=/path/to/database.db` are required.

> [!TIP]
> Use the individual parameter method when your password contains special characters like `@`, `:`, `/`, `#`, `&`, `=` that would break DSN parsing.

> [!WARNING]
> When running in Docker, use `host.docker.internal` instead of `localhost` to connect to databases running on your host machine. For example: `mysql://user:[email protected]:3306/dbname`

DBHub supports the following database connection string formats:

| Database   | DSN Format                                               | Example                                                                                                        |
| ---------- | -------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- |
| MySQL      | `mysql://[user]:[password]@[host]:[port]/[database]`     | `mysql://user:password@localhost:3306/dbname?sslmode=disable`                                                  |
| MariaDB    | `mariadb://[user]:[password]@[host]:[port]/[database]`   | `mariadb://user:password@localhost:3306/dbname?sslmode=disable`                                                |
| PostgreSQL | `postgres://[user]:[password]@[host]:[port]/[database]`  | `postgres://user:password@localhost:5432/dbname?sslmode=disable`                                               |
| SQL Server | `sqlserver://[user]:[password]@[host]:[port]/[database]` | `sqlserver://user:password@localhost:1433/dbname?sslmode=disable`                                              |
| SQLite     | `sqlite:///[path/to/file]` or `sqlite:///:memory:`       | `sqlite:///path/to/database.db`, `sqlite:C:/Users/YourName/data/database.db (windows)` or `sqlite:///:memory:` |

#### SQL Server

Extra query parameters:

#### authentication

- `authentication=azure-active-directory-access-token`. Only applicable when running from Azure. See [DefaultAzureCredential](https://learn.microsoft.com/en-us/azure/developer/javascript/sdk/authentication/credential-chains#use-defaultazurecredential-for-flexibility).

### Transport

- **stdio** (default) - for direct integration with tools like Claude Desktop:

  ```bash
  npx @bytebase/dbhub --transport stdio --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"
  ```

- **http** - for browser and network clients:
  ```bash
  npx @bytebase/dbhub --transport http --port 5678 --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"
  ```

### Command line options

| Option         | Environment Variable | Description                                                           | Default                      |
| -------------- | -------------------- | --------------------------------------------------------------------- | ---------------------------- |
| dsn            | `DSN`                | Database connection string                                            | Required if not in demo mode |
| N/A            | `DB_TYPE`            | Database type: `postgres`, `mysql`, `mariadb`, `sqlserver`, `sqlite`  | N/A                          |
| N/A            | `DB_HOST`            | Database server hostname (not needed for SQLite)                      | N/A                          |
| N/A            | `DB_PORT`            | Database server port (uses default if omitted, not needed for SQLite) | N/A                          |
| N/A            | `DB_USER`            | Database username (not needed for SQLite)                             | N/A                          |
| N/A            | `DB_PASSWORD`        | Database password (not needed for SQLite)                             | N/A                          |
| N/A            | `DB_NAME`            | Database name or SQLite file path                                     | N/A                          |
| transport      | `TRANSPORT`          | Transport mode: `stdio` or `http`                                     | `stdio`                      |
| port           | `PORT`               | HTTP server port (only applicable when using `--transport=http`)      | `8080`                       |
| readonly       | `READONLY`           | Restrict SQL execution to read-only operations                        | `false`                      |
| max-rows       | N/A                  | Limit the number of rows returned from SELECT queries                 | No limit                     |
| demo           | N/A                  | Run in demo mode with sample employee database                        | `false`                      |
| id             | `ID`                 | Instance identifier to suffix tool names (for multi-instance)         | N/A                          |
| ssh-host       | `SSH_HOST`           | SSH server hostname for tunnel connection                             | N/A                          |
| ssh-port       | `SSH_PORT`           | SSH server port                                                       | `22`                         |
| ssh-user       | `SSH_USER`           | SSH username                                                          | N/A                          |
| ssh-password   | `SSH_PASSWORD`       | SSH password (for password authentication)                            | N/A                          |
| ssh-key        | `SSH_KEY`            | Path to SSH private key file                                          | N/A                          |
| ssh-passphrase | `SSH_PASSPHRASE`     | Passphrase for SSH private key                                        | N/A                          |

The demo mode uses an in-memory SQLite database loaded with the [sample employee database](https://github.com/bytebase/dbhub/tree/main/resources/employee-sqlite) that includes tables for employees, departments, titles, salaries, department employees, and department managers. The sample database includes SQL scripts for table creation, data loading, and testing.

## Development

1. Install dependencies:

   ```bash
   pnpm install
   ```

1. Run in development mode:

   ```bash
   pnpm dev
   ```

1. Build for production:
   ```bash
   pnpm build
   pnpm start --transport stdio --dsn "postgres://user:password@localhost:5432/dbname?sslmode=disable"
   ```

### Testing

The project uses Vitest for comprehensive unit and integration testing:

- **Run all tests**: `pnpm test`
- **Run tests in watch mode**: `pnpm test:watch`
- **Run integration tests**: `pnpm test:integration`

#### Integration Tests

DBHub includes comprehensive integration tests for all supported database connectors using [Testcontainers](https://testcontainers.com/). These tests run against real database instances in Docker containers, ensuring full compatibility and feature coverage.

##### Prerequisites

- **Docker**: Ensure Docker is installed and running on your machine
- **Docker Resources**: Allocate sufficient memory (recommended: 4GB+) for multiple database containers
- **Network Access**: Ability to pull Docker images from registries

##### Running Integration Tests

**Note**: This command runs all integration tests in parallel, which may take 5-15 minutes depending on your system resources and network speed.

```bash
# Run all database integration tests
pnpm test:integration
```

```bash
# Run only PostgreSQL integration tests
pnpm test src/connectors/__tests__/postgres.integration.test.ts
# Run only MySQL integration tests
pnpm test src/connectors/__tests__/mysql.integration.test.ts
# Run only MariaDB integration tests
pnpm test src/connectors/__tests__/mariadb.integration.test.ts
# Run only SQL Server integration tests
pnpm test src/connectors/__tests__/sqlserver.integration.test.ts
# Run only SQLite integration tests
pnpm test src/connectors/__tests__/sqlite.integration.test.ts
# Run JSON RPC integration tests
pnpm test src/__tests__/json-rpc-integration.test.ts
```

All integration tests follow these patterns:

1. **Container Lifecycle**: Start database container → Connect → Setup test data → Run tests → Cleanup
2. **Shared Test Utilities**: Common test patterns implemented in `IntegrationTestBase` class
3. **Database-Specific Features**: Each database includes tests for unique features and capabilities
4. **Error Handling**: Comprehensive testing of connection errors, invalid SQL, and edge cases

##### Troubleshooting Integration Tests

**Container Startup Issues:**

```bash
# Check Docker is running
docker ps

# Check available memory
docker system df

# Pull images manually if needed
docker pull postgres:15-alpine
docker pull mysql:8.0
docker pull mariadb:10.11
docker pull mcr.microsoft.com/mssql/server:2019-latest
```

**SQL Server Timeout Issues:**

- SQL Server containers require significant startup time (3-5 minutes)
- Ensure Docker has sufficient memory allocated (4GB+ recommended)
- Consider running SQL Server tests separately if experiencing timeouts

**Network/Resource Issues:**

```bash
# Run tests with verbose output
pnpm test:integration --reporter=verbose

# Run single database test to isolate issues
pnpm test:integration -- --testNamePattern="PostgreSQL"

# Check Docker container logs if tests fail
docker logs <container_id>
```

#### Pre-commit Hooks (for Developers)

The project includes pre-commit hooks to run tests automatically before each commit:

1. After cloning the repository, set up the pre-commit hooks:

   ```bash
   ./scripts/setup-husky.sh
   ```

2. This ensures the test suite runs automatically whenever you create a commit, preventing commits that would break tests.

### Debug with [MCP Inspector](https://github.com/modelcontextprotocol/inspector)

![mcp-inspector](https://raw.githubusercontent.com/bytebase/dbhub/main/resources/images/mcp-inspector.webp)

#### stdio

```bash
# PostgreSQL example
TRANSPORT=stdio DSN="postgres://user:password@localhost:5432/dbname?sslmode=disable" npx @modelcontextprotocol/inspector node /path/to/dbhub/dist/index.js
```

#### HTTP

```bash
# Start DBHub with HTTP transport
pnpm dev --transport=http --port=8080

# Start the MCP Inspector in another terminal
npx @modelcontextprotocol/inspector
```

Connect to the DBHub server `/message` endpoint

## Contributors

<a href="https://github.com/bytebase/dbhub/graphs/contributors">
  <img src="https://contrib.rocks/image?repo=bytebase/dbhub" />
</a>

## Star History

[![Star History Chart](https://api.star-history.com/svg?repos=bytebase/dbhub&type=Date)](https://www.star-history.com/#bytebase/dbhub&Date)

```

--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------

```markdown
# CLAUDE.md

This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.

# DBHub Development Guidelines

DBHub is a Universal Database Gateway implementing the Model Context Protocol (MCP) server interface. It bridges MCP-compatible clients (Claude Desktop, Claude Code, Cursor) with various database systems.

## Commands

- Build: `pnpm run build` - Compiles TypeScript to JavaScript using tsup
- Start: `pnpm run start` - Runs the compiled server
- Dev: `pnpm run dev` - Runs server with tsx (no compilation needed)
- Test: `pnpm test` - Run all tests
- Test Watch: `pnpm test:watch` - Run tests in watch mode
- Integration Tests: `pnpm test:integration` - Run database integration tests (requires Docker)
- Pre-commit: `./scripts/setup-husky.sh` - Setup git hooks for automated testing

## Architecture Overview

The codebase follows a modular architecture centered around the MCP protocol:

```
src/
├── connectors/          # Database-specific implementations
│   ├── postgres/        # PostgreSQL connector
│   ├── mysql/           # MySQL connector
│   ├── mariadb/         # MariaDB connector
│   ├── sqlserver/       # SQL Server connector
│   └── sqlite/          # SQLite connector
├── resources/           # MCP resource handlers (DB exploration)
│   ├── schemas.ts       # Schema listing
│   ├── tables.ts        # Table exploration
│   ├── indexes.ts       # Index information
│   └── procedures.ts    # Stored procedures
├── tools/               # MCP tool handlers
│   └── execute-sql.ts   # SQL execution handler
├── prompts/             # AI prompt handlers
│   ├── generate-sql.ts  # SQL generation
│   └── explain-db.ts    # Database explanation
├── utils/               # Shared utilities
│   ├── dsn-obfuscator.ts# DSN security
│   ├── response-formatter.ts # Output formatting
│   └── allowed-keywords.ts  # Read-only SQL validation
└── index.ts             # Entry point with transport handling
```

Key architectural patterns:
- **Connector Registry**: Dynamic registration system for database connectors
- **Transport Abstraction**: Support for both stdio (desktop tools) and HTTP (network clients)
- **Resource/Tool/Prompt Handlers**: Clean separation of MCP protocol concerns
- **Integration Test Base**: Shared test utilities for consistent connector testing

## Environment

- Copy `.env.example` to `.env` and configure for your database connection
- Two ways to configure:
  - Set `DSN` to a full connection string (recommended)
  - Set `DB_CONNECTOR_TYPE` to select a connector with its default DSN
- Transport options:
  - Set `--transport=stdio` (default) for stdio transport
  - Set `--transport=http` for streamable HTTP transport with HTTP server
- Demo mode: Use `--demo` flag for bundled SQLite employee database
- Read-only mode: Use `--readonly` flag to restrict to read-only SQL operations

## Database Connectors

- Add new connectors in `src/connectors/{db-type}/index.ts`
- Implement the `Connector` and `DSNParser` interfaces from `src/interfaces/connector.ts`
- Register connector with `ConnectorRegistry.register(connector)`
- DSN Examples:
  - PostgreSQL: `postgres://user:password@localhost:5432/dbname?sslmode=disable`
  - MySQL: `mysql://user:password@localhost:3306/dbname?sslmode=disable`
  - MariaDB: `mariadb://user:password@localhost:3306/dbname?sslmode=disable`
  - SQL Server: `sqlserver://user:password@localhost:1433/dbname?sslmode=disable`
  - SQLite: `sqlite:///path/to/database.db` or `sqlite:///:memory:`
- SSL modes: `sslmode=disable` (no SSL) or `sslmode=require` (SSL without cert verification)

## Testing Approach

- Unit tests for individual components and utilities
- Integration tests using Testcontainers for real database testing
- All connectors have comprehensive integration test coverage
- Pre-commit hooks run related tests automatically
- Test specific databases: `pnpm test src/connectors/__tests__/{db-type}.integration.test.ts`
- SSH tunnel tests: `pnpm test postgres-ssh-simple.integration.test.ts`

## SSH Tunnel Support

DBHub supports SSH tunnels for secure database connections through bastion hosts:

- Configuration via command-line options: `--ssh-host`, `--ssh-port`, `--ssh-user`, `--ssh-password`, `--ssh-key`, `--ssh-passphrase`
- Configuration via environment variables: `SSH_HOST`, `SSH_PORT`, `SSH_USER`, `SSH_PASSWORD`, `SSH_KEY`, `SSH_PASSPHRASE`
- SSH config file support: Automatically reads from `~/.ssh/config` when using host aliases
- Implementation in `src/utils/ssh-tunnel.ts` using the `ssh2` library
- SSH config parsing in `src/utils/ssh-config-parser.ts` using the `ssh-config` library
- Automatic tunnel establishment when SSH config is detected
- Support for both password and key-based authentication
- Default SSH key detection (tries `~/.ssh/id_rsa`, `~/.ssh/id_ed25519`, etc.)
- Tunnel lifecycle managed by `ConnectorManager`

## Code Style

- TypeScript with strict mode enabled
- ES modules with `.js` extension in imports
- Group imports: Node.js core modules → third-party → local modules
- Use camelCase for variables/functions, PascalCase for classes/types
- Include explicit type annotations for function parameters/returns
- Use try/finally blocks with DB connections (always release clients)
- Prefer async/await over callbacks and Promise chains
- Format error messages consistently
- Use parameterized queries for DB operations
- Validate inputs with zod schemas
- Include fallbacks for environment variables
- Use descriptive variable/function names
- Keep functions focused and single-purpose

```

--------------------------------------------------------------------------------
/src/types/sql.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * SQL dialect types supported by the application
 */
export type SQLDialect = "postgres" | "sqlite" | "mysql" | "mariadb" | "mssql" | "ansi";

```

--------------------------------------------------------------------------------
/pnpm-workspace.yaml:
--------------------------------------------------------------------------------

```yaml
packages:
  - '.'

approvedBuilds:
  - better-sqlite3
ignoredBuiltDependencies:
  - cpu-features
  - esbuild
  - protobufjs
  - ssh2
onlyBuiltDependencies:
  - better-sqlite3

```

--------------------------------------------------------------------------------
/resources/employee-sqlite/show_elapsed.sql:
--------------------------------------------------------------------------------

```sql
-- SQLite doesn't have information_schema like MySQL
-- This is a simpler version that just shows when the script was run

SELECT 'Database loaded at ' || datetime('now', 'localtime') AS completion_time;
```

--------------------------------------------------------------------------------
/resources/employee-sqlite/load_department.sql:
--------------------------------------------------------------------------------

```sql
INSERT INTO department VALUES 
('d001','Marketing'),
('d002','Finance'),
('d003','Human Resources'),
('d004','Production'),
('d005','Development'),
('d006','Quality Management'),
('d007','Sales'),
('d008','Research'),
('d009','Customer Service');
```

--------------------------------------------------------------------------------
/vitest.config.ts:
--------------------------------------------------------------------------------

```typescript
import { defineConfig } from 'vitest/config';

export default defineConfig({
  test: {
    globals: true,
    environment: 'node',
    include: ['src/**/*.{test,spec}.ts'],
    coverage: {
      provider: 'v8',
      reporter: ['text', 'lcov'],
    },
  },
});
```

--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------

```json
{
  "compilerOptions": {
    "target": "ES2020",
    "module": "NodeNext",
    "moduleResolution": "NodeNext",
    "esModuleInterop": true,
    "outDir": "./dist",
    "strict": true,
    "lib": ["ES2020", "ES2021.Promise", "ES2022.Error"]
  },
  "include": ["src/**/*"]
}

```

--------------------------------------------------------------------------------
/scripts/setup-husky.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash

# This script is used to set up Husky for development
# It should be run manually, not as part of production builds

echo "Setting up Husky for the project..."
npx husky init

# Create the pre-commit hook
cat > .husky/pre-commit << 'EOL'
#!/usr/bin/env sh

# Run lint-staged to check only the files that are being committed
pnpm lint-staged

# Run the test suite to ensure everything passes
pnpm test
EOL

chmod +x .husky/pre-commit

echo "Husky setup complete!"
```

--------------------------------------------------------------------------------
/.github/copilot-instructions.md:
--------------------------------------------------------------------------------

```markdown
## Database Access

This project provides an MCP server (DBHub) for secure SQL access to the development database.

AI agents can execute SQL queries. In read-only mode (recommended for production):

- `SELECT * FROM users LIMIT 5;`
- `SHOW TABLES;`
- `DESCRIBE table_name;`

In read-write mode (development/testing):

- `INSERT INTO users (name, email) VALUES ('John', '[email protected]');`
- `UPDATE users SET status = 'active' WHERE id = 1;`
- `CREATE TABLE test_table (id INT PRIMARY KEY);`

Use `--readonly` flag to restrict to read-only operations for safety.

```

--------------------------------------------------------------------------------
/src/utils/allowed-keywords.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorType } from "../connectors/interface.js";

/**
 * List of allowed keywords for SQL queries
 * Not only SELECT queries are allowed,
 * but also other queries that are not destructive
 */
export const allowedKeywords: Record<ConnectorType, string[]> = {
  postgres: ["select", "with", "explain", "analyze", "show"],
  mysql: ["select", "with", "explain", "analyze", "show", "describe", "desc"],
  mariadb: ["select", "with", "explain", "analyze", "show", "describe", "desc"],
  sqlite: ["select", "with", "explain", "analyze", "pragma"],
  sqlserver: ["select", "with", "explain", "showplan"],
};

```

--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------

```typescript
#!/usr/bin/env node

// Import connector modules to register them
import "./connectors/postgres/index.js"; // Register PostgreSQL connector
import "./connectors/sqlserver/index.js"; // Register SQL Server connector
import "./connectors/sqlite/index.js"; // SQLite connector
import "./connectors/mysql/index.js"; // MySQL connector
import "./connectors/mariadb/index.js"; // MariaDB connector

// Import main function from server.ts
import { main } from "./server.js";

/**
 * Entry point for the DBHub MCP Server
 * Handles top-level exceptions and starts the server
 */
main().catch((error) => {
  console.error("Fatal error:", error);
  process.exit(1);
});

```

--------------------------------------------------------------------------------
/src/tools/index.ts:
--------------------------------------------------------------------------------

```typescript
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { executeSqlToolHandler, executeSqlSchema } from "./execute-sql.js";
/**
 * Register all tool handlers with the MCP server
 * @param server - The MCP server instance
 * @param id - Optional ID to suffix tool names (for Cursor multi-instance support)
 */
export function registerTools(server: McpServer, id?: string): void {
  // Build tool name with optional suffix
  const toolName = id ? `execute_sql_${id}` : "execute_sql";

  // Tool to run a SQL query (read-only for safety)
  server.tool(
    toolName,
    "Execute a SQL query on the current database",
    executeSqlSchema,
    executeSqlToolHandler
  );

}

```

--------------------------------------------------------------------------------
/resources/employee-sqlite/load_dept_manager.sql:
--------------------------------------------------------------------------------

```sql
INSERT INTO dept_manager VALUES 
(10002,'d001','1985-01-01','1991-10-01'),
(10039,'d001','1991-10-01','9999-01-01'),
(10085,'d002','1985-01-01','1989-12-17'),
(10114,'d002','1989-12-17','9999-01-01'),
(10183,'d003','1985-01-01','1992-03-21'),
(10228,'d003','1992-03-21','9999-01-01'),
(10303,'d004','1985-01-01','1988-09-09'),
(10344,'d004','1988-09-09','1992-08-02'),
(10386,'d004','1992-08-02','1996-08-30'),
(10420,'d004','1996-08-30','9999-01-01'),
(10511,'d005','1985-01-01','1992-04-25'),
(10567,'d005','1992-04-25','9999-01-01'),
(10725,'d006','1985-01-01','1989-05-06'),
(10765,'d006','1989-05-06','1991-09-12'),
(10800,'d006','1991-09-12','1994-06-28'),
(10854,'d006','1994-06-28','9999-01-01');
```

--------------------------------------------------------------------------------
/src/prompts/index.ts:
--------------------------------------------------------------------------------

```typescript
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { sqlGeneratorPromptHandler, sqlGeneratorSchema } from "./sql-generator.js";
import { dbExplainerPromptHandler, dbExplainerSchema } from "./db-explainer.js";

/**
 * Register all prompt handlers with the MCP server
 */
export function registerPrompts(server: McpServer): void {
  // Register SQL Generator prompt
  server.prompt(
    "generate_sql",
    "Generate SQL queries from natural language descriptions",
    sqlGeneratorSchema,
    sqlGeneratorPromptHandler
  );

  // Register Database Explainer prompt
  server.prompt(
    "explain_db",
    "Get explanations about database tables, columns, and structures",
    dbExplainerSchema,
    dbExplainerPromptHandler
  );
}

```

--------------------------------------------------------------------------------
/src/resources/schemas.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorManager } from "../connectors/manager.js";
import {
  createResourceSuccessResponse,
  createResourceErrorResponse,
} from "../utils/response-formatter.js";

/**
 * Schemas resource handler
 * Returns a list of all schemas in the database
 */
export async function schemasResourceHandler(uri: URL, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  try {
    const schemas = await connector.getSchemas();

    // Prepare response data
    const responseData = {
      schemas: schemas,
      count: schemas.length,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving database schemas: ${(error as Error).message}`,
      "SCHEMAS_RETRIEVAL_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/src/types/ssh.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * SSH Tunnel Configuration Types
 */

export interface SSHTunnelConfig {
  /** SSH server hostname */
  host: string;
  
  /** SSH server port (default: 22) */
  port?: number;
  
  /** SSH username */
  username: string;
  
  /** SSH password (for password authentication) */
  password?: string;
  
  /** Path to SSH private key file */
  privateKey?: string;
  
  /** Passphrase for SSH private key */
  passphrase?: string;
}

export interface SSHTunnelOptions {
  /** Target database host (as seen from SSH server) */
  targetHost: string;
  
  /** Target database port */
  targetPort: number;
  
  /** Local port to bind the tunnel (0 for dynamic allocation) */
  localPort?: number;
}

export interface SSHTunnelInfo {
  /** Local port where the tunnel is listening */
  localPort: number;
  
  /** Original target host */
  targetHost: string;
  
  /** Original target port */
  targetPort: number;
}
```

--------------------------------------------------------------------------------
/tsup.config.ts:
--------------------------------------------------------------------------------

```typescript
import { defineConfig } from 'tsup';
import fs from 'fs';
import path from 'path';

export default defineConfig({
  entry: ['src/index.ts'],
  format: ['esm'],
  dts: true,
  clean: true,
  outDir: 'dist',
  // Copy the employee-sqlite resources to dist
  async onSuccess() {
    // Create target directory
    const targetDir = path.join('dist', 'resources', 'employee-sqlite');
    fs.mkdirSync(targetDir, { recursive: true });

    // Copy all SQL files from resources/employee-sqlite to dist/resources/employee-sqlite
    const sourceDir = path.join('resources', 'employee-sqlite');
    const files = fs.readdirSync(sourceDir);

    for (const file of files) {
      if (file.endsWith('.sql')) {
        const sourcePath = path.join(sourceDir, file);
        const targetPath = path.join(targetDir, file);
        fs.copyFileSync(sourcePath, targetPath);
        console.log(`Copied ${sourcePath} to ${targetPath}`);
      }
    }
  },
});

```

--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------

```dockerfile
FROM node:22-alpine AS builder

WORKDIR /app

# Copy package.json and pnpm-lock.yaml
COPY package.json pnpm-lock.yaml ./

# Install pnpm
RUN corepack enable && corepack prepare pnpm@latest --activate

# Install dependencies
RUN pnpm install

# Copy source code
COPY . .

# Build the application
RUN pnpm run build

# Production stage
FROM node:22-alpine

WORKDIR /app

# Copy only production files
COPY --from=builder /app/package.json /app/pnpm-lock.yaml ./

# Install pnpm
RUN corepack enable && corepack prepare pnpm@latest --activate

RUN pnpm pkg set pnpm.onlyBuiltDependencies[0]=better-sqlite3
RUN pnpm add better-sqlite3
RUN node -e 'new require("better-sqlite3")(":memory:")'

# Install production dependencies only
RUN pnpm install --prod

# Copy built application from builder stage
COPY --from=builder /app/dist ./dist

# Expose ports
EXPOSE 8080

# Set environment variables
ENV NODE_ENV=production

# Run the server
ENTRYPOINT ["node", "dist/index.js"]

```

--------------------------------------------------------------------------------
/.github/workflows/run-tests.yml:
--------------------------------------------------------------------------------

```yaml
name: Run Tests

on:
  pull_request:
    branches: [ main ]
    # Run when PR is opened, synchronized, or reopened
    types: [opened, synchronize, reopened]
  # Also allow manual triggering
  workflow_dispatch:

jobs:
  test:
    name: Run Test Suite
    runs-on: ubuntu-latest
    
    steps:
      - name: Checkout code
        uses: actions/checkout@v3
      
      - name: Install pnpm
        uses: pnpm/action-setup@v2
        with:
          version: 8
          run_install: false

      - name: Setup Node.js
        uses: actions/setup-node@v3
        with:
          node-version: '20'
          cache: 'pnpm'
      
      - name: Get pnpm store directory
        id: pnpm-cache
        shell: bash
        run: |
          echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
      
      - name: Setup pnpm cache
        uses: actions/cache@v3
        with:
          path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
          key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
          restore-keys: |
            ${{ runner.os }}-pnpm-store-
      
      - name: Install dependencies
        run: pnpm install
      
      - name: Run tests
        run: pnpm test
```

--------------------------------------------------------------------------------
/src/resources/tables.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorManager } from "../connectors/manager.js";
import {
  createResourceSuccessResponse,
  createResourceErrorResponse,
} from "../utils/response-formatter.js";

/**
 * Tables resource handler
 * Returns a list of all tables in the database or within a specific schema
 */
export async function tablesResourceHandler(uri: URL, variables: any, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  // Extract the schema name from URL variables if present
  const schemaName =
    variables && variables.schemaName
      ? Array.isArray(variables.schemaName)
        ? variables.schemaName[0]
        : variables.schemaName
      : undefined;

  try {
    // If a schema name was provided, verify that it exists
    if (schemaName) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schemaName)) {
        return createResourceErrorResponse(
          uri.href,
          `Schema '${schemaName}' does not exist or cannot be accessed`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Get tables with optional schema filter
    const tableNames = await connector.getTables(schemaName);

    // Prepare response data
    const responseData = {
      tables: tableNames,
      count: tableNames.length,
      schema: schemaName,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving tables: ${(error as Error).message}`,
      "TABLES_RETRIEVAL_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/resources/employee-sqlite/object.sql:
--------------------------------------------------------------------------------

```sql
-- SQLite implementation of views and functions
-- This is simplified compared to the MySQL version

-- Drop views if they exist
DROP VIEW IF EXISTS v_full_employee;
DROP VIEW IF EXISTS v_full_department;
DROP VIEW IF EXISTS emp_dept_current;

-- Create helper view to get current department for employees
CREATE VIEW emp_dept_current AS
SELECT 
    e.emp_no,
    de.dept_no
FROM 
    employee e
JOIN 
    dept_emp de ON e.emp_no = de.emp_no
JOIN (
    SELECT 
        emp_no, 
        MAX(from_date) AS max_from_date
    FROM 
        dept_emp
    GROUP BY 
        emp_no
) latest ON de.emp_no = latest.emp_no AND de.from_date = latest.max_from_date;

-- View that shows employee with their current department name
CREATE VIEW v_full_employee AS
SELECT
    e.emp_no,
    e.first_name, 
    e.last_name,
    e.birth_date, 
    e.gender,
    e.hire_date,
    d.dept_name AS department
FROM
    employee e
LEFT JOIN
    emp_dept_current edc ON e.emp_no = edc.emp_no
LEFT JOIN
    department d ON edc.dept_no = d.dept_no;

-- View to get current managers for departments
CREATE VIEW current_managers AS
SELECT
    d.dept_no,
    d.dept_name,
    e.first_name || ' ' || e.last_name AS manager
FROM
    department d
LEFT JOIN
    dept_manager dm ON d.dept_no = dm.dept_no
JOIN (
    SELECT
        dept_no,
        MAX(from_date) AS max_from_date
    FROM
        dept_manager
    GROUP BY
        dept_no
) latest ON dm.dept_no = latest.dept_no AND dm.from_date = latest.max_from_date
LEFT JOIN
    employee e ON dm.emp_no = e.emp_no;

-- Create a view showing departments with their managers
CREATE VIEW v_full_department AS
SELECT
    dept_no,
    dept_name,
    manager
FROM
    current_managers;
```

--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------

```json
{
  "name": "dbhub",
  "version": "0.11.5",
  "description": "Universal Database MCP Server",
  "repository": {
    "type": "git",
    "url": "https://github.com/bytebase/dbhub.git"
  },
  "main": "dist/index.js",
  "type": "module",
  "bin": {
    "dbhub": "dist/index.js"
  },
  "files": [
    "dist",
    "LICENSE",
    "README.md"
  ],
  "scripts": {
    "build": "tsup",
    "start": "node dist/index.js",
    "dev": "NODE_ENV=development tsx src/index.ts",
    "crossdev": "cross-env NODE_ENV=development tsx src/index.ts",
    "test": "vitest run",
    "test:watch": "vitest",
    "test:integration": "vitest run --testNamePattern='Integration Tests'",
    "prepare": "[[ \"$NODE_ENV\" != \"production\" ]] && husky || echo \"Skipping husky in production\"",
    "pre-commit": "lint-staged"
  },
  "keywords": [],
  "author": "",
  "license": "MIT",
  "dependencies": {
    "@azure/identity": "^4.8.0",
    "@modelcontextprotocol/sdk": "^1.12.1",
    "better-sqlite3": "^11.9.0",
    "dotenv": "^16.4.7",
    "express": "^4.18.2",
    "mariadb": "^3.4.0",
    "mssql": "^11.0.1",
    "mysql2": "^3.13.0",
    "pg": "^8.13.3",
    "ssh-config": "^5.0.3",
    "ssh2": "^1.16.0",
    "zod": "^3.24.2"
  },
  "devDependencies": {
    "@testcontainers/mariadb": "^11.0.3",
    "@testcontainers/mssqlserver": "^11.0.3",
    "@testcontainers/mysql": "^11.0.3",
    "@testcontainers/postgresql": "^11.0.3",
    "@types/better-sqlite3": "^7.6.12",
    "@types/express": "^4.17.21",
    "@types/mssql": "^9.1.7",
    "@types/node": "^22.13.10",
    "@types/pg": "^8.11.11",
    "@types/ssh2": "^1.15.5",
    "cross-env": "^7.0.3",
    "husky": "^9.0.11",
    "lint-staged": "^15.2.2",
    "prettier": "^3.5.3",
    "testcontainers": "^11.0.3",
    "ts-node": "^10.9.2",
    "tsup": "^8.4.0",
    "tsx": "^4.19.3",
    "typescript": "^5.8.2",
    "vitest": "^1.6.1"
  },
  "compilerOptions": {
    "target": "ES2020",
    "module": "NodeNext",
    "moduleResolution": "NodeNext",
    "esModuleInterop": true,
    "strict": true,
    "outDir": "dist",
    "rootDir": "src"
  },
  "include": [
    "src/**/*"
  ],
  "lint-staged": {
    "*.{js,ts}": "vitest related --run"
  }
}

```

--------------------------------------------------------------------------------
/src/resources/index.ts:
--------------------------------------------------------------------------------

```typescript
import { McpServer, ResourceTemplate } from "@modelcontextprotocol/sdk/server/mcp.js";
import { tablesResourceHandler } from "./tables.js";
import { tableStructureResourceHandler } from "./schema.js";
import { schemasResourceHandler } from "./schemas.js";
import { indexesResourceHandler } from "./indexes.js";
import { proceduresResourceHandler, procedureDetailResourceHandler } from "./procedures.js";

// Export all resource handlers
export { tablesResourceHandler } from "./tables.js";
export { tableStructureResourceHandler } from "./schema.js";
export { schemasResourceHandler } from "./schemas.js";
export { indexesResourceHandler } from "./indexes.js";
export { proceduresResourceHandler, procedureDetailResourceHandler } from "./procedures.js";

/**
 * Register all resource handlers with the MCP server
 */
export function registerResources(server: McpServer): void {
  // Resource for listing all schemas
  server.resource("schemas", "db://schemas", schemasResourceHandler);

  // Allow listing tables within a specific schema
  server.resource(
    "tables_in_schema",
    new ResourceTemplate("db://schemas/{schemaName}/tables", { list: undefined }),
    tablesResourceHandler
  );

  // Resource for getting table structure within a specific database schema
  server.resource(
    "table_structure_in_schema",
    new ResourceTemplate("db://schemas/{schemaName}/tables/{tableName}", { list: undefined }),
    tableStructureResourceHandler
  );

  // Resource for getting indexes for a table within a specific database schema
  server.resource(
    "indexes_in_table",
    new ResourceTemplate("db://schemas/{schemaName}/tables/{tableName}/indexes", {
      list: undefined,
    }),
    indexesResourceHandler
  );

  // Resource for listing stored procedures within a schema
  server.resource(
    "procedures_in_schema",
    new ResourceTemplate("db://schemas/{schemaName}/procedures", { list: undefined }),
    proceduresResourceHandler
  );

  // Resource for getting procedure detail within a schema
  server.resource(
    "procedure_detail_in_schema",
    new ResourceTemplate("db://schemas/{schemaName}/procedures/{procedureName}", {
      list: undefined,
    }),
    procedureDetailResourceHandler
  );
}

```

--------------------------------------------------------------------------------
/src/resources/indexes.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorManager } from "../connectors/manager.js";
import {
  createResourceSuccessResponse,
  createResourceErrorResponse,
} from "../utils/response-formatter.js";

/**
 * Indexes resource handler
 * Returns information about indexes on a table
 */
export async function indexesResourceHandler(uri: URL, variables: any, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  // Extract schema and table names from URL variables
  const schemaName =
    variables && variables.schemaName
      ? Array.isArray(variables.schemaName)
        ? variables.schemaName[0]
        : variables.schemaName
      : undefined;

  const tableName =
    variables && variables.tableName
      ? Array.isArray(variables.tableName)
        ? variables.tableName[0]
        : variables.tableName
      : undefined;

  if (!tableName) {
    return createResourceErrorResponse(uri.href, "Table name is required", "MISSING_TABLE_NAME");
  }

  try {
    // If a schema name was provided, verify that it exists
    if (schemaName) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schemaName)) {
        return createResourceErrorResponse(
          uri.href,
          `Schema '${schemaName}' does not exist or cannot be accessed`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Check if table exists
    const tableExists = await connector.tableExists(tableName, schemaName);
    if (!tableExists) {
      return createResourceErrorResponse(
        uri.href,
        `Table '${tableName}' does not exist in schema '${schemaName || "default"}'`,
        "TABLE_NOT_FOUND"
      );
    }

    // Get indexes for the table
    const indexes = await connector.getTableIndexes(tableName, schemaName);

    // Prepare response data
    const responseData = {
      table: tableName,
      schema: schemaName,
      indexes: indexes,
      count: indexes.length,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving indexes: ${(error as Error).message}`,
      "INDEXES_RETRIEVAL_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/src/config/demo-loader.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Demo data loader for SQLite in-memory database
 *
 * This module loads the sample employee database into the SQLite in-memory database
 * when the --demo flag is specified.
 */
import fs from "fs";
import path from "path";
import { fileURLToPath } from "url";

// Create __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

// Path to sample data files - will be bundled with the package
// Try different paths to find the SQL files in development or production
let DEMO_DATA_DIR: string;
const projectRootPath = path.join(__dirname, "..", "..", "..");
const projectResourcesPath = path.join(projectRootPath, "resources", "employee-sqlite");
const distPath = path.join(__dirname, "resources", "employee-sqlite");

// First try the project root resources directory (for development)
if (fs.existsSync(projectResourcesPath)) {
  DEMO_DATA_DIR = projectResourcesPath;
}
// Then try dist directory (for production)
else if (fs.existsSync(distPath)) {
  DEMO_DATA_DIR = distPath;
}
// Fallback to a relative path from the current directory
else {
  DEMO_DATA_DIR = path.join(process.cwd(), "resources", "employee-sqlite");
  if (!fs.existsSync(DEMO_DATA_DIR)) {
    throw new Error(`Could not find employee-sqlite resources in any of the expected locations: 
      - ${projectResourcesPath}
      - ${distPath}
      - ${DEMO_DATA_DIR}`);
  }
}

/**
 * Load SQL file contents
 */
export function loadSqlFile(fileName: string): string {
  const filePath = path.join(DEMO_DATA_DIR, fileName);
  return fs.readFileSync(filePath, "utf8");
}

/**
 * Get SQLite DSN for in-memory database
 */
export function getInMemorySqliteDSN(): string {
  return "sqlite:///:memory:";
}

/**
 * Load SQL files sequentially
 */
export function getSqliteInMemorySetupSql(): string {
  // First, load the schema
  let sql = loadSqlFile("employee.sql");

  // Replace .read directives with the actual file contents
  // This is necessary because in-memory SQLite can't use .read
  const readRegex = /\.read\s+([a-zA-Z0-9_]+\.sql)/g;
  let match;

  while ((match = readRegex.exec(sql)) !== null) {
    const includePath = match[1];
    const includeContent = loadSqlFile(includePath);

    // Replace the .read line with the file contents
    sql = sql.replace(match[0], includeContent);
  }

  return sql;
}

```

--------------------------------------------------------------------------------
/src/utils/__tests__/ssh-tunnel.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect } from 'vitest';
import { SSHTunnel } from '../ssh-tunnel.js';
import type { SSHTunnelConfig } from '../../types/ssh.js';

describe('SSHTunnel', () => {
  describe('Initial State', () => {
    it('should have initial state as disconnected', () => {
      const tunnel = new SSHTunnel();
      expect(tunnel.getIsConnected()).toBe(false);
      expect(tunnel.getTunnelInfo()).toBeNull();
    });
  });

  describe('Tunnel State Management', () => {
    it('should prevent establishing multiple tunnels', async () => {
      const tunnel = new SSHTunnel();
      
      // Set tunnel as connected (simulating a connected state)
      (tunnel as any).isConnected = true;

      const config: SSHTunnelConfig = {
        host: 'ssh.example.com',
        username: 'testuser',
        password: 'testpass',
      };

      const options = {
        targetHost: 'database.local',
        targetPort: 5432,
      };

      await expect(tunnel.establish(config, options)).rejects.toThrow(
        'SSH tunnel is already established'
      );
    });

    it('should handle close when not connected', async () => {
      const tunnel = new SSHTunnel();
      
      // Should not throw when closing disconnected tunnel
      await expect(tunnel.close()).resolves.toBeUndefined();
    });
  });

  describe('Configuration Validation', () => {
    it('should validate authentication requirements', () => {
      // Test that config validation logic exists
      const validConfigWithPassword: SSHTunnelConfig = {
        host: 'ssh.example.com',
        username: 'testuser',
        password: 'testpass',
      };

      const validConfigWithKey: SSHTunnelConfig = {
        host: 'ssh.example.com',
        username: 'testuser',
        privateKey: '/path/to/key',
      };

      const validConfigWithKeyAndPassphrase: SSHTunnelConfig = {
        host: 'ssh.example.com',
        port: 2222,
        username: 'testuser',
        privateKey: '/path/to/key',
        passphrase: 'keypassphrase',
      };

      // These should be valid configurations
      expect(validConfigWithPassword.host).toBe('ssh.example.com');
      expect(validConfigWithPassword.username).toBe('testuser');
      expect(validConfigWithPassword.password).toBe('testpass');

      expect(validConfigWithKey.privateKey).toBe('/path/to/key');
      expect(validConfigWithKeyAndPassphrase.passphrase).toBe('keypassphrase');
      expect(validConfigWithKeyAndPassphrase.port).toBe(2222);
    });
  });
});
```

--------------------------------------------------------------------------------
/src/utils/dsn-obfuscate.ts:
--------------------------------------------------------------------------------

```typescript
import type { SSHTunnelConfig } from '../types/ssh.js';

/**
 * Obfuscates the password in a DSN string for logging purposes
 * @param dsn The original DSN string
 * @returns DSN string with password replaced by asterisks
 */
export function obfuscateDSNPassword(dsn: string): string {
  if (!dsn) {
    return dsn;
  }

  try {
    // Handle different DSN formats
    const protocolMatch = dsn.match(/^([^:]+):/);
    if (!protocolMatch) {
      return dsn; // Not a recognizable DSN format
    }

    const protocol = protocolMatch[1];

    // For SQLite file paths, don't obfuscate
    if (protocol === 'sqlite') {
      return dsn;
    }

    // For other databases, look for password pattern: ://user:password@host
    // We need to be careful with @ in passwords, so we'll find the last @ that separates password from host
    const protocolPart = dsn.split('://')[1];
    if (!protocolPart) {
      return dsn;
    }
    
    // Find the last @ to separate credentials from host
    const lastAtIndex = protocolPart.lastIndexOf('@');
    if (lastAtIndex === -1) {
      return dsn; // No @ found, no password to obfuscate
    }
    
    const credentialsPart = protocolPart.substring(0, lastAtIndex);
    const hostPart = protocolPart.substring(lastAtIndex + 1);
    
    // Check if there's a colon in credentials (user:password format)
    const colonIndex = credentialsPart.indexOf(':');
    if (colonIndex === -1) {
      return dsn; // No colon found, no password to obfuscate
    }
    
    const username = credentialsPart.substring(0, colonIndex);
    const password = credentialsPart.substring(colonIndex + 1);
    const obfuscatedPassword = '*'.repeat(Math.min(password.length, 8));
    
    return `${protocol}://${username}:${obfuscatedPassword}@${hostPart}`;
  } catch (error) {
    // If any error occurs during obfuscation, return the original DSN
    // This ensures we don't break functionality due to obfuscation issues
    return dsn;
  }
}

/**
 * Obfuscates sensitive information in SSH configuration for logging
 * @param config The SSH tunnel configuration
 * @returns SSH config with sensitive data replaced by asterisks
 */
export function obfuscateSSHConfig(config: SSHTunnelConfig): Partial<SSHTunnelConfig> {
  const obfuscated: Partial<SSHTunnelConfig> = {
    host: config.host,
    port: config.port,
    username: config.username,
  };
  
  if (config.password) {
    obfuscated.password = '*'.repeat(8);
  }
  
  if (config.privateKey) {
    obfuscated.privateKey = config.privateKey; // Keep path as-is
  }
  
  if (config.passphrase) {
    obfuscated.passphrase = '*'.repeat(8);
  }
  
  return obfuscated;
}
```

--------------------------------------------------------------------------------
/src/resources/schema.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorManager } from "../connectors/manager.js";
import { Variables } from "@modelcontextprotocol/sdk/shared/uriTemplate.js";
import {
  createResourceSuccessResponse,
  createResourceErrorResponse,
} from "../utils/response-formatter.js";

/**
 * Schema resource handler
 * Returns schema information for a specific table, optionally within a specific database schema
 */
export async function tableStructureResourceHandler(uri: URL, variables: Variables, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  // Handle tableName which could be a string or string array from URL template
  const tableName = Array.isArray(variables.tableName)
    ? variables.tableName[0]
    : (variables.tableName as string);

  // Extract schemaName if present
  const schemaName = variables.schemaName
    ? Array.isArray(variables.schemaName)
      ? variables.schemaName[0]
      : (variables.schemaName as string)
    : undefined;

  try {
    // If a schema name was provided, verify that it exists
    if (schemaName) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schemaName)) {
        return createResourceErrorResponse(
          uri.href,
          `Schema '${schemaName}' does not exist or cannot be accessed`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Check if the table exists in the schema before getting its structure
    const tableExists = await connector.tableExists(tableName, schemaName);
    if (!tableExists) {
      const schemaInfo = schemaName ? ` in schema '${schemaName}'` : "";
      return createResourceErrorResponse(
        uri.href,
        `Table '${tableName}'${schemaInfo} does not exist or cannot be accessed`,
        "TABLE_NOT_FOUND"
      );
    }

    // Get the table schema now that we know it exists
    const columns = await connector.getTableSchema(tableName, schemaName);

    // Create a more structured response
    const formattedColumns = columns.map((col) => ({
      name: col.column_name,
      type: col.data_type,
      nullable: col.is_nullable === "YES",
      default: col.column_default,
    }));

    // Prepare response data
    const responseData = {
      table: tableName,
      schema: schemaName,
      columns: formattedColumns,
      count: formattedColumns.length,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    // Handle any other errors that might occur
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving schema: ${(error as Error).message}`,
      "SCHEMA_RETRIEVAL_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/.github/workflows/docker-publish.yml:
--------------------------------------------------------------------------------

```yaml
# Workflow to build and publish Docker images for DBHub
#
# This workflow:
# 1. Always pushes to the 'latest' tag when changes are pushed to the main branch
# 2. If package.json version changes, also pushes a version-specific tag
# 3. Builds for both amd64 and arm64 architectures

name: Publish to docker hub

on:
  push:
    branches: [main]

env:
  IMAGE_NAME: bytebase/dbhub

jobs:
  build-and-push:
    runs-on: ubuntu-latest

    steps:
      - name: Checkout repository
        uses: actions/checkout@v4
        with:
          fetch-depth: 2 # Fetch two commits to detect changes in package.json

      - name: Check for package.json version changes
        id: check-version
        run: |
          # Get current and previous package.json content
          git show HEAD:package.json > package.json.current
          git show HEAD~1:package.json > package.json.previous 2>/dev/null || cp package.json.current package.json.previous

          # Extract versions
          CURRENT_VERSION=$(jq -r '.version' package.json.current)
          PREVIOUS_VERSION=$(jq -r '.version' package.json.previous)

          echo "Current version: $CURRENT_VERSION"
          echo "Previous version: $PREVIOUS_VERSION"

          # Set output based on whether version changed
          if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
            echo "Version changed from $PREVIOUS_VERSION to $CURRENT_VERSION"
            echo "VERSION_CHANGED=true" >> $GITHUB_OUTPUT
            echo "VERSION=$CURRENT_VERSION" >> $GITHUB_OUTPUT
          else
            echo "Version unchanged: $CURRENT_VERSION"
            echo "VERSION_CHANGED=false" >> $GITHUB_OUTPUT
          fi

      - name: Set up Docker Buildx
        uses: docker/setup-buildx-action@v3

      - name: Log in to Docker Hub
        uses: docker/login-action@v3
        with:
          username: ${{ secrets.DOCKERHUB_USERNAME }}
          password: ${{ secrets.DOCKERHUB_TOKEN }}

      - name: Prepare Docker tags
        id: prep
        run: |
          # Always include latest tag
          TAGS="${{ env.IMAGE_NAME }}:latest"

          # Add version tag if version changed
          if [[ "${{ steps.check-version.outputs.VERSION_CHANGED }}" == "true" ]]; then
            VERSION="${{ steps.check-version.outputs.VERSION }}"
            TAGS="$TAGS,${{ env.IMAGE_NAME }}:$VERSION"
            echo "Publishing with tags: latest, $VERSION"
          else
            echo "Publishing with tag: latest only"
          fi

          echo "TAGS=$TAGS" >> $GITHUB_OUTPUT

      - name: Build and push Docker image
        uses: docker/build-push-action@v5
        with:
          context: .
          push: true
          platforms: linux/amd64,linux/arm64
          tags: ${{ steps.prep.outputs.TAGS }}
          cache-from: type=gha
          cache-to: type=gha,mode=max

```

--------------------------------------------------------------------------------
/src/utils/sql-row-limiter.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Shared utility for applying row limits to SELECT queries only using database-native LIMIT clauses
 */
export class SQLRowLimiter {
  /**
   * Check if a SQL statement is a SELECT query that can benefit from row limiting
   * Only handles SELECT queries
   */
  static isSelectQuery(sql: string): boolean {
    const trimmed = sql.trim().toLowerCase();
    return trimmed.startsWith('select');
  }

  /**
   * Check if a SQL statement already has a LIMIT clause
   */
  static hasLimitClause(sql: string): boolean {
    // Simple regex to detect LIMIT clause - handles most common cases
    const limitRegex = /\blimit\s+\d+/i;
    return limitRegex.test(sql);
  }

  /**
   * Check if a SQL statement already has a TOP clause (SQL Server)
   */
  static hasTopClause(sql: string): boolean {
    // Simple regex to detect TOP clause - handles most common cases
    const topRegex = /\bselect\s+top\s+\d+/i;
    return topRegex.test(sql);
  }

  /**
   * Extract existing LIMIT value from SQL if present
   */
  static extractLimitValue(sql: string): number | null {
    const limitMatch = sql.match(/\blimit\s+(\d+)/i);
    if (limitMatch) {
      return parseInt(limitMatch[1], 10);
    }
    return null;
  }

  /**
   * Extract existing TOP value from SQL if present (SQL Server)
   */
  static extractTopValue(sql: string): number | null {
    const topMatch = sql.match(/\bselect\s+top\s+(\d+)/i);
    if (topMatch) {
      return parseInt(topMatch[1], 10);
    }
    return null;
  }

  /**
   * Add or modify LIMIT clause in a SQL statement
   */
  static applyLimitToQuery(sql: string, maxRows: number): string {
    const existingLimit = this.extractLimitValue(sql);
    
    if (existingLimit !== null) {
      // Use the minimum of existing limit and maxRows
      const effectiveLimit = Math.min(existingLimit, maxRows);
      return sql.replace(/\blimit\s+\d+/i, `LIMIT ${effectiveLimit}`);
    } else {
      // Add LIMIT clause to the end of the query
      // Handle semicolon at the end
      const trimmed = sql.trim();
      const hasSemicolon = trimmed.endsWith(';');
      const sqlWithoutSemicolon = hasSemicolon ? trimmed.slice(0, -1) : trimmed;
      
      return `${sqlWithoutSemicolon} LIMIT ${maxRows}${hasSemicolon ? ';' : ''}`;
    }
  }

  /**
   * Add or modify TOP clause in a SQL statement (SQL Server)
   */
  static applyTopToQuery(sql: string, maxRows: number): string {
    const existingTop = this.extractTopValue(sql);
    
    if (existingTop !== null) {
      // Use the minimum of existing top and maxRows
      const effectiveTop = Math.min(existingTop, maxRows);
      return sql.replace(/\bselect\s+top\s+\d+/i, `SELECT TOP ${effectiveTop}`);
    } else {
      // Add TOP clause after SELECT
      return sql.replace(/\bselect\s+/i, `SELECT TOP ${maxRows} `);
    }
  }

  /**
   * Apply maxRows limit to a SELECT query only
   */
  static applyMaxRows(sql: string, maxRows: number | undefined): string {
    if (!maxRows || !this.isSelectQuery(sql)) {
      return sql;
    }
    return this.applyLimitToQuery(sql, maxRows);
  }

  /**
   * Apply maxRows limit to a SELECT query using SQL Server TOP syntax
   */
  static applyMaxRowsForSQLServer(sql: string, maxRows: number | undefined): string {
    if (!maxRows || !this.isSelectQuery(sql)) {
      return sql;
    }
    return this.applyTopToQuery(sql, maxRows);
  }
}
```

--------------------------------------------------------------------------------
/src/utils/response-formatter.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Response formatter utility for consistent API responses
 * Provides formatting for resources, tools, and prompts
 */

/**
 * Custom JSON replacer function to handle BigInt serialization
 * Converts BigInt values to strings with format: "123n"
 */
export function bigIntReplacer(_key: string, value: any): any {
  if (typeof value === 'bigint') {
    return value.toString();
  }
  return value;
}

/**
 * Create a success response with the given data
 */
export function formatSuccessResponse<T>(
  data: T,
  meta: Record<string, any> = {}
): {
  success: true;
  data: T;
  meta?: Record<string, any>;
} {
  return {
    success: true,
    data,
    ...(Object.keys(meta).length > 0 ? { meta } : {}),
  };
}

/**
 * Create an error response with the given message and code
 */
export function formatErrorResponse(
  error: string,
  code: string = "ERROR",
  details?: any
): {
  success: false;
  error: string;
  code: string;
  details?: any;
} {
  return {
    success: false,
    error,
    code,
    ...(details ? { details } : {}),
  };
}

/**
 * Create a tool error response object
 */
export function createToolErrorResponse(error: string, code: string = "ERROR", details?: any) {
  return {
    content: [
      {
        type: "text" as const,
        text: JSON.stringify(formatErrorResponse(error, code, details), bigIntReplacer, 2),
        mimeType: "application/json",
      },
    ],
    isError: true,
  };
}

/**
 * Create a tool success response object
 */
export function createToolSuccessResponse<T>(data: T, meta: Record<string, any> = {}) {
  return {
    content: [
      {
        type: "text" as const,
        text: JSON.stringify(formatSuccessResponse(data, meta), bigIntReplacer, 2),
        mimeType: "application/json",
      },
    ],
  };
}

/**
 * Create a resource error response object
 */
export function createResourceErrorResponse(
  uri: string,
  error: string,
  code: string = "ERROR",
  details?: any
) {
  return {
    contents: [
      {
        uri,
        text: JSON.stringify(formatErrorResponse(error, code, details), bigIntReplacer, 2),
        mimeType: "application/json",
      },
    ],
  };
}

/**
 * Create a resource success response object
 */
export function createResourceSuccessResponse<T>(
  uri: string,
  data: T,
  meta: Record<string, any> = {}
) {
  return {
    contents: [
      {
        uri,
        text: JSON.stringify(formatSuccessResponse(data, meta), bigIntReplacer, 2),
        mimeType: "application/json",
      },
    ],
  };
}

/**
 * Format a successful prompt response in the MCP format
 */
export function formatPromptSuccessResponse(
  text: string,
  references: string[] = []
): {
  messages: Array<{
    role: "assistant";
    content: {
      type: "text";
      text: string;
    };
  }>;
  references?: string[];
  _meta?: Record<string, unknown>;
  [key: string]: unknown;
} {
  return {
    messages: [
      {
        role: "assistant",
        content: {
          type: "text",
          text,
        },
      },
    ],
    ...(references.length > 0 ? { references } : {}),
  };
}

/**
 * Format an error prompt response in the MCP format
 */
export function formatPromptErrorResponse(
  error: string,
  code: string = "ERROR"
): {
  messages: Array<{
    role: "assistant";
    content: {
      type: "text";
      text: string;
    };
  }>;
  error: string;
  code: string;
  _meta?: Record<string, unknown>;
  [key: string]: unknown;
} {
  return {
    messages: [
      {
        role: "assistant",
        content: {
          type: "text",
          text: `Error: ${error}`,
        },
      },
    ],
    error,
    code,
  };
}

```

--------------------------------------------------------------------------------
/src/tools/execute-sql.ts:
--------------------------------------------------------------------------------

```typescript
import { z } from "zod";
import { ConnectorManager } from "../connectors/manager.js";
import { createToolSuccessResponse, createToolErrorResponse } from "../utils/response-formatter.js";
import { isReadOnlyMode } from "../config/env.js";
import { allowedKeywords } from "../utils/allowed-keywords.js";
import { ConnectorType } from "../connectors/interface.js";

// Schema for execute_sql tool
export const executeSqlSchema = {
  sql: z.string().describe("SQL query or multiple SQL statements to execute (separated by semicolons)"),
};

/**
 * Split SQL string into individual statements, handling semicolons properly
 * @param sql The SQL string to split
 * @returns Array of individual SQL statements
 */
function splitSQLStatements(sql: string): string[] {
  // Split by semicolon and filter out empty statements
  return sql.split(';')
    .map(statement => statement.trim())
    .filter(statement => statement.length > 0);
}

/**
 * Remove SQL comments from a query
 * @param sql The SQL query to clean
 * @returns The SQL query without comments
 */
function stripSQLComments(sql: string): string {
  // Remove single-line comments (-- comment)
  let cleaned = sql.split('\n').map(line => {
    const commentIndex = line.indexOf('--');
    return commentIndex >= 0 ? line.substring(0, commentIndex) : line;
  }).join('\n');
  
  // Remove multi-line comments (/* comment */)
  cleaned = cleaned.replace(/\/\*[\s\S]*?\*\//g, ' ');
  
  return cleaned.trim();
}

/**
 * Check if a SQL query is read-only based on its first keyword
 * @param sql The SQL query to check
 * @param connectorType The database type to check against
 * @returns True if the query is read-only (starts with allowed keywords)
 */
function isReadOnlySQL(sql: string, connectorType: ConnectorType): boolean {
  // Strip comments before analyzing
  const cleanedSQL = stripSQLComments(sql).toLowerCase();
  
  // If the statement is empty after removing comments, consider it read-only
  if (!cleanedSQL) {
    return true;
  }
  
  const firstWord = cleanedSQL.split(/\s+/)[0];
  
  // Get the appropriate allowed keywords list for this database type
  const keywordList = allowedKeywords[connectorType] || allowedKeywords.default || [];
  
  return keywordList.includes(firstWord);
}

/**
 * Check if all SQL statements in a multi-statement query are read-only
 * @param sql The SQL string (possibly containing multiple statements)
 * @param connectorType The database type to check against
 * @returns True if all statements are read-only
 */
function areAllStatementsReadOnly(sql: string, connectorType: ConnectorType): boolean {
  const statements = splitSQLStatements(sql);
  return statements.every(statement => isReadOnlySQL(statement, connectorType));
}

/**
 * execute_sql tool handler
 * Executes a SQL query and returns the results
 */
export async function executeSqlToolHandler({ sql }: { sql: string }, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();
  const executeOptions = ConnectorManager.getCurrentExecuteOptions();

  try {
    // Check if SQL is allowed based on readonly mode
    if (isReadOnlyMode() && !areAllStatementsReadOnly(sql, connector.id)) {
      return createToolErrorResponse(
        `Read-only mode is enabled. Only the following SQL operations are allowed: ${allowedKeywords[connector.id]?.join(", ") || "none"}`,
        "READONLY_VIOLATION"
      );
    }
    
    // Execute the SQL (single or multiple statements) if validation passed
    const result = await connector.executeSQL(sql, executeOptions);

    // Build response data
    const responseData = {
      rows: result.rows,
      count: result.rows.length,
    };

    return createToolSuccessResponse(responseData);
  } catch (error) {
    return createToolErrorResponse((error as Error).message, "EXECUTION_ERROR");
  }
}

```

--------------------------------------------------------------------------------
/src/resources/procedures.ts:
--------------------------------------------------------------------------------

```typescript
import { ConnectorManager } from "../connectors/manager.js";
import {
  createResourceSuccessResponse,
  createResourceErrorResponse,
} from "../utils/response-formatter.js";

/**
 * Stored procedures/functions resource handler
 * Returns a list of all stored procedures/functions in the database or within a specific schema
 */
export async function proceduresResourceHandler(uri: URL, variables: any, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  // Extract the schema name from URL variables if present
  const schemaName =
    variables && variables.schemaName
      ? Array.isArray(variables.schemaName)
        ? variables.schemaName[0]
        : variables.schemaName
      : undefined;

  try {
    // If a schema name was provided, verify that it exists
    if (schemaName) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schemaName)) {
        return createResourceErrorResponse(
          uri.href,
          `Schema '${schemaName}' does not exist or cannot be accessed`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Get stored procedures with optional schema filter
    const procedureNames = await connector.getStoredProcedures(schemaName);

    // Prepare response data
    const responseData = {
      procedures: procedureNames,
      count: procedureNames.length,
      schema: schemaName,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving stored procedures: ${(error as Error).message}`,
      "PROCEDURES_RETRIEVAL_ERROR"
    );
  }
}

/**
 * Stored procedure/function details resource handler
 * Returns details for a specific stored procedure/function
 */
export async function procedureDetailResourceHandler(uri: URL, variables: any, _extra: any) {
  const connector = ConnectorManager.getCurrentConnector();

  // Extract parameters from URL variables
  const schemaName =
    variables && variables.schemaName
      ? Array.isArray(variables.schemaName)
        ? variables.schemaName[0]
        : variables.schemaName
      : undefined;

  const procedureName =
    variables && variables.procedureName
      ? Array.isArray(variables.procedureName)
        ? variables.procedureName[0]
        : variables.procedureName
      : undefined;

  // Validate required parameters
  if (!procedureName) {
    return createResourceErrorResponse(uri.href, "Procedure name is required", "MISSING_PARAMETER");
  }

  try {
    // If a schema name was provided, verify that it exists
    if (schemaName) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schemaName)) {
        return createResourceErrorResponse(
          uri.href,
          `Schema '${schemaName}' does not exist or cannot be accessed`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Get procedure details
    const procedureDetails = await connector.getStoredProcedureDetail(procedureName, schemaName);

    // Prepare response data
    const responseData = {
      procedureName: procedureDetails.procedure_name,
      procedureType: procedureDetails.procedure_type,
      language: procedureDetails.language,
      parameters: procedureDetails.parameter_list,
      returnType: procedureDetails.return_type,
      definition: procedureDetails.definition,
      schema: schemaName,
    };

    // Use the utility to create a standardized response
    return createResourceSuccessResponse(uri.href, responseData);
  } catch (error) {
    return createResourceErrorResponse(
      uri.href,
      `Error retrieving procedure details: ${(error as Error).message}`,
      "PROCEDURE_DETAILS_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/src/utils/__tests__/safe-url.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect } from 'vitest';
import { SafeURL } from '../safe-url.js';

describe('SafeURL', () => {
  it('should parse a simple DSN correctly', () => {
    const url = new SafeURL('postgres://localhost:5432/dbname');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('/dbname');
    expect(url.username).toBe('');
    expect(url.password).toBe('');
    expect(url.searchParams.size).toBe(0);
  });

  it('should parse a DSN with authentication correctly', () => {
    const url = new SafeURL('postgres://user:password@localhost:5432/dbname');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('/dbname');
    expect(url.username).toBe('user');
    expect(url.password).toBe('password');
    expect(url.searchParams.size).toBe(0);
  });

  it('should handle special characters in password correctly', () => {
    const url = new SafeURL('postgres://user:pass%23word@localhost:5432/dbname');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('/dbname');
    expect(url.username).toBe('user');
    expect(url.password).toBe('pass#word');
    expect(url.searchParams.size).toBe(0);
  });

  it('should handle unencoded special characters in password correctly', () => {
    const url = new SafeURL('postgres://user:pass#word@localhost:5432/dbname');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('/dbname');
    expect(url.username).toBe('user');
    expect(url.password).toBe('pass#word');
    expect(url.searchParams.size).toBe(0);
  });

  it('should parse query parameters correctly', () => {
    const url = new SafeURL('postgres://localhost:5432/dbname?sslmode=require&timeout=30');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('/dbname');
    expect(url.searchParams.size).toBe(2);
    expect(url.getSearchParam('sslmode')).toBe('require');
    expect(url.getSearchParam('timeout')).toBe('30');
  });

  it('should handle special characters in query parameters', () => {
    const url = new SafeURL('postgres://localhost:5432/dbname?param=value%20with%20spaces');
    
    expect(url.getSearchParam('param')).toBe('value with spaces');
  });

  it('should handle a DSN without a pathname', () => {
    const url = new SafeURL('postgres://localhost:5432');
    
    expect(url.protocol).toBe('postgres:');
    expect(url.hostname).toBe('localhost');
    expect(url.port).toBe('5432');
    expect(url.pathname).toBe('');
  });

  it('should handle both username and password with special characters', () => {
    const url = new SafeURL('postgres://user%40domain:pass%26word@localhost:5432/dbname');
    
    expect(url.username).toBe('user@domain');
    expect(url.password).toBe('pass&word');
  });

  it('should support the forEachSearchParam method', () => {
    const url = new SafeURL('postgres://localhost:5432/dbname?param1=value1&param2=value2');
    const params: Record<string, string> = {};
    
    url.forEachSearchParam((value, key) => {
      params[key] = value;
    });
    
    expect(Object.keys(params).length).toBe(2);
    expect(params['param1']).toBe('value1');
    expect(params['param2']).toBe('value2');
  });

  it('should throw an error for empty URLs', () => {
    expect(() => new SafeURL('')).toThrow('URL string cannot be empty');
  });
  
  it('should throw an error for URLs without a protocol', () => {
    expect(() => new SafeURL('localhost:5432/dbname')).toThrow('Invalid URL format: missing protocol');
  });
});
```

--------------------------------------------------------------------------------
/resources/employee-sqlite/employee.sql:
--------------------------------------------------------------------------------

```sql
--  Sample employee database 
--  See changelog table for details
--  Copyright (C) 2007,2008, MySQL AB
--  
--  Original data created by Fusheng Wang and Carlo Zaniolo
--  http://www.cs.aau.dk/TimeCenter/software.htm
--  http://www.cs.aau.dk/TimeCenter/Data/employeeTemporalDataSet.zip
-- 
--  Current schema by Giuseppe Maxia 
--  Data conversion from XML to relational by Patrick Crews
--  SQLite adaptation by Claude Code
-- 
-- This work is licensed under the 
-- Creative Commons Attribution-Share Alike 3.0 Unported License. 
-- To view a copy of this license, visit 
-- http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to 
-- Creative Commons, 171 Second Street, Suite 300, San Francisco, 
-- California, 94105, USA.
-- 
--  DISCLAIMER
--  To the best of our knowledge, this data is fabricated, and
--  it does not correspond to real people. 
--  Any similarity to existing people is purely coincidental.
-- 

PRAGMA foreign_keys = ON;

SELECT 'CREATING DATABASE STRUCTURE' as 'INFO';

DROP TABLE IF EXISTS dept_emp;
DROP TABLE IF EXISTS dept_manager;
DROP TABLE IF EXISTS title;
DROP TABLE IF EXISTS salary;
DROP TABLE IF EXISTS employee;
DROP TABLE IF EXISTS department;
DROP VIEW IF EXISTS dept_emp_latest_date;
DROP VIEW IF EXISTS current_dept_emp;

CREATE TABLE employee (
    emp_no      INTEGER         NOT NULL,
    birth_date  DATE            NOT NULL,
    first_name  TEXT            NOT NULL,
    last_name   TEXT            NOT NULL,
    gender      TEXT            NOT NULL CHECK (gender IN ('M','F')),
    hire_date   DATE            NOT NULL,
    PRIMARY KEY (emp_no)
);

CREATE TABLE department (
    dept_no     TEXT            NOT NULL,
    dept_name   TEXT            NOT NULL,
    PRIMARY KEY (dept_no),
    UNIQUE      (dept_name)
);

CREATE TABLE dept_manager (
   emp_no       INTEGER         NOT NULL,
   dept_no      TEXT            NOT NULL,
   from_date    DATE            NOT NULL,
   to_date      DATE            NOT NULL,
   FOREIGN KEY (emp_no)  REFERENCES employee (emp_no)    ON DELETE CASCADE,
   FOREIGN KEY (dept_no) REFERENCES department (dept_no) ON DELETE CASCADE,
   PRIMARY KEY (emp_no,dept_no)
); 

CREATE TABLE dept_emp (
    emp_no      INTEGER         NOT NULL,
    dept_no     TEXT            NOT NULL,
    from_date   DATE            NOT NULL,
    to_date     DATE            NOT NULL,
    FOREIGN KEY (emp_no)  REFERENCES employee (emp_no)   ON DELETE CASCADE,
    FOREIGN KEY (dept_no) REFERENCES department (dept_no) ON DELETE CASCADE,
    PRIMARY KEY (emp_no,dept_no)
);

CREATE TABLE title (
    emp_no      INTEGER         NOT NULL,
    title       TEXT            NOT NULL,
    from_date   DATE            NOT NULL,
    to_date     DATE,
    FOREIGN KEY (emp_no) REFERENCES employee (emp_no) ON DELETE CASCADE,
    PRIMARY KEY (emp_no,title,from_date)
);

CREATE TABLE salary (
    emp_no      INTEGER         NOT NULL,
    amount      INTEGER         NOT NULL,
    from_date   DATE            NOT NULL,
    to_date     DATE            NOT NULL,
    FOREIGN KEY (emp_no) REFERENCES employee (emp_no) ON DELETE CASCADE,
    PRIMARY KEY (emp_no,from_date)
);

CREATE VIEW dept_emp_latest_date AS
    SELECT emp_no, MAX(from_date) AS from_date, MAX(to_date) AS to_date
    FROM dept_emp
    GROUP BY emp_no;

-- shows only the current department for each employee
CREATE VIEW current_dept_emp AS
    SELECT l.emp_no, dept_no, l.from_date, l.to_date
    FROM dept_emp d
        INNER JOIN dept_emp_latest_date l
        ON d.emp_no=l.emp_no AND d.from_date=l.from_date AND l.to_date = d.to_date;

SELECT 'LOADING department' as 'INFO';
.read load_department.sql
SELECT 'LOADING employee' as 'INFO';
.read load_employee.sql
SELECT 'LOADING dept_emp' as 'INFO';
.read load_dept_emp.sql
SELECT 'LOADING dept_manager' as 'INFO';
.read load_dept_manager.sql
SELECT 'LOADING title' as 'INFO';
.read load_title.sql
SELECT 'LOADING salary' as 'INFO';
.read load_salary1.sql
```

--------------------------------------------------------------------------------
/resources/employee-sqlite/test_employee_md5.sql:
--------------------------------------------------------------------------------

```sql
--  Sample employee database 
--  See changelog table for details
--  Copyright (C) 2007,2008, MySQL AB
--  
--  Original data created by Fusheng Wang and Carlo Zaniolo
--  http://www.cs.aau.dk/TimeCenter/software.htm
--  http://www.cs.aau.dk/TimeCenter/Data/employeeTemporalDataSet.zip
-- 
--  Current schema by Giuseppe Maxia 
--  Data conversion from XML to relational by Patrick Crews
--  SQLite adaptation by Claude Code
-- 
-- This work is licensed under the 
-- Creative Commons Attribution-Share Alike 3.0 Unported License. 
-- To view a copy of this license, visit 
-- http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to 
-- Creative Commons, 171 Second Street, Suite 300, San Francisco, 
-- California, 94105, USA.
-- 
--  DISCLAIMER
--  To the best of our knowledge, this data is fabricated, and
--  it does not correspond to real people. 
--  Any similarity to existing people is purely coincidental.
-- 

SELECT 'TESTING INSTALLATION' as 'INFO';

DROP TABLE IF EXISTS expected_value;
DROP TABLE IF EXISTS found_value;

CREATE TABLE expected_value (
    table_name TEXT NOT NULL PRIMARY KEY,
    recs INTEGER NOT NULL,
    crc_md5 TEXT NOT NULL
);

CREATE TABLE found_value (
    table_name TEXT NOT NULL PRIMARY KEY,
    recs INTEGER NOT NULL,
    crc_md5 TEXT NOT NULL
);

INSERT INTO expected_value VALUES 
('employee',    1000, '595460127fb609c2b110b1796083e242'),
('department',     9, 'd1af5e170d2d1591d776d5638d71fc5f'),
('dept_manager',  16, '8ff425d5ad6dc56975998d1893b8dca9'),
('dept_emp',    1103, 'e302aa5b56a69b49e40eb0d60674addc'),
('title',       1470, 'ba77dd331ce00f76c1643a7d73cdcee6'),
('salary',      9488, '61f22cfece4d34f5bb94c9f05a3da3ef');

SELECT table_name, recs AS expected_record, crc_md5 AS expected_crc FROM expected_value;

DROP TABLE IF EXISTS tchecksum;
CREATE TABLE tchecksum (chk TEXT);

-- For SQLite, we need to use a different approach for MD5 calculation
-- Insert employee checksums
INSERT INTO found_value
SELECT 'employee', COUNT(*), 
       (SELECT hex(md5(group_concat(emp_no||birth_date||first_name||last_name||gender||hire_date, '#')))
        FROM (SELECT * FROM employee ORDER BY emp_no))
FROM employee;

-- Insert department checksums
INSERT INTO found_value
SELECT 'department', COUNT(*),
       (SELECT hex(md5(group_concat(dept_no||dept_name, '#')))
        FROM (SELECT * FROM department ORDER BY dept_no))
FROM department;

-- Insert dept_manager checksums
INSERT INTO found_value
SELECT 'dept_manager', COUNT(*),
       (SELECT hex(md5(group_concat(dept_no||emp_no||from_date||to_date, '#')))
        FROM (SELECT * FROM dept_manager ORDER BY dept_no, emp_no))
FROM dept_manager;

-- Insert dept_emp checksums
INSERT INTO found_value
SELECT 'dept_emp', COUNT(*),
       (SELECT hex(md5(group_concat(dept_no||emp_no||from_date||to_date, '#')))
        FROM (SELECT * FROM dept_emp ORDER BY dept_no, emp_no))
FROM dept_emp;

-- Insert title checksums
INSERT INTO found_value
SELECT 'title', COUNT(*),
       (SELECT hex(md5(group_concat(emp_no||title||from_date||IFNULL(to_date,''), '#')))
        FROM (SELECT * FROM title ORDER BY emp_no, title, from_date))
FROM title;

-- Insert salary checksums
INSERT INTO found_value
SELECT 'salary', COUNT(*),
       (SELECT hex(md5(group_concat(emp_no||amount||from_date||to_date, '#')))
        FROM (SELECT * FROM salary ORDER BY emp_no, from_date, to_date))
FROM salary;

SELECT table_name, recs as 'found_records', crc_md5 as found_crc FROM found_value;

-- Compare expected vs found
SELECT  
    e.table_name, 
    CASE WHEN e.recs=f.recs THEN 'OK' ELSE 'not ok' END AS records_match, 
    CASE WHEN e.crc_md5=f.crc_md5 THEN 'ok' ELSE 'not ok' END AS crc_match 
FROM 
    expected_value e 
    JOIN found_value f USING (table_name);

-- Check for failures
SELECT 
    'CRC' as summary, 
    CASE WHEN (SELECT COUNT(*) FROM expected_value e JOIN found_value f USING(table_name) WHERE f.crc_md5 != e.crc_md5) = 0 
         THEN 'OK' ELSE 'FAIL' END as 'result'
UNION ALL
SELECT 
    'count', 
    CASE WHEN (SELECT COUNT(*) FROM expected_value e JOIN found_value f USING(table_name) WHERE f.recs != e.recs) = 0 
         THEN 'OK' ELSE 'FAIL' END;
```

--------------------------------------------------------------------------------
/src/utils/ssh-config-parser.ts:
--------------------------------------------------------------------------------

```typescript
import { readFileSync, existsSync } from 'fs';
import { homedir } from 'os';
import { join } from 'path';
import SSHConfig from 'ssh-config';
import type { SSHTunnelConfig } from '../types/ssh.js';

/**
 * Default SSH key paths to check if no IdentityFile is specified
 */
const DEFAULT_SSH_KEYS = [
  '~/.ssh/id_rsa',
  '~/.ssh/id_ed25519',
  '~/.ssh/id_ecdsa',
  '~/.ssh/id_dsa'
];

/**
 * Expand tilde (~) in file paths to home directory
 */
function expandTilde(filePath: string): string {
  if (filePath.startsWith('~/')) {
    return join(homedir(), filePath.substring(2));
  }
  return filePath;
}

/**
 * Check if a file exists
 */
function fileExists(filePath: string): boolean {
  try {
    return existsSync(expandTilde(filePath));
  } catch {
    return false;
  }
}

/**
 * Find the first existing SSH key from default locations
 */
function findDefaultSSHKey(): string | undefined {
  for (const keyPath of DEFAULT_SSH_KEYS) {
    if (fileExists(keyPath)) {
      return expandTilde(keyPath);
    }
  }
  return undefined;
}

/**
 * Parse SSH config file and extract configuration for a specific host
 * @param hostAlias The host alias to look up in the SSH config
 * @param configPath Path to SSH config file
 * @returns SSH tunnel configuration or null if not found
 */
export function parseSSHConfig(
  hostAlias: string,
  configPath: string
): SSHTunnelConfig | null {
  const sshConfigPath = configPath;

  // Check if SSH config file exists
  if (!existsSync(sshConfigPath)) {
    return null;
  }

  try {
    // Read and parse SSH config file
    const configContent = readFileSync(sshConfigPath, 'utf8');
    const config = SSHConfig.parse(configContent);

    // Find configuration for the specified host
    const hostConfig = config.compute(hostAlias);
    
    // Check if we have a valid config (not just Include directives)
    if (!hostConfig || !hostConfig.HostName && !hostConfig.User) {
      return null;
    }

    // Extract SSH configuration parameters
    const sshConfig: Partial<SSHTunnelConfig> = {};

    // Host (required)
    if (hostConfig.HostName) {
      sshConfig.host = hostConfig.HostName;
    } else {
      // If no HostName specified, use the host alias itself
      sshConfig.host = hostAlias;
    }

    // Port (optional, default will be 22)
    if (hostConfig.Port) {
      sshConfig.port = parseInt(hostConfig.Port, 10);
    }

    // User (required)
    if (hostConfig.User) {
      sshConfig.username = hostConfig.User;
    }

    // IdentityFile (private key)
    if (hostConfig.IdentityFile) {
      // SSH config can have multiple IdentityFile entries, take the first one
      const identityFile = Array.isArray(hostConfig.IdentityFile) 
        ? hostConfig.IdentityFile[0] 
        : hostConfig.IdentityFile;
      
      const expandedPath = expandTilde(identityFile);
      if (fileExists(expandedPath)) {
        sshConfig.privateKey = expandedPath;
      }
    }

    // If no IdentityFile specified or found, try default SSH keys
    if (!sshConfig.privateKey) {
      const defaultKey = findDefaultSSHKey();
      if (defaultKey) {
        sshConfig.privateKey = defaultKey;
      }
    }

    // ProxyJump support could be added in the future if needed
    // Currently, we'll log a warning if ProxyJump is detected
    if (hostConfig.ProxyJump || hostConfig.ProxyCommand) {
      console.error('Warning: ProxyJump/ProxyCommand in SSH config is not yet supported by DBHub');
    }

    // Validate that we have minimum required fields
    if (!sshConfig.host || !sshConfig.username) {
      return null;
    }

    return sshConfig as SSHTunnelConfig;
  } catch (error) {
    console.error(`Error parsing SSH config: ${error instanceof Error ? error.message : String(error)}`);
    return null;
  }
}

/**
 * Check if a string looks like an SSH host alias (not an IP or domain)
 * This is a heuristic to determine if we should look up the host in SSH config
 */
export function looksLikeSSHAlias(host: string): boolean {
  // If it contains dots, it's likely a domain or IP
  if (host.includes('.')) {
    return false;
  }
  
  // If it's all numbers (with possible colons for IPv6), it's likely an IP
  if (/^[\d:]+$/.test(host)) {
    return false;
  }
  
  // Check for IPv6 addresses with hex characters
  if (/^[0-9a-fA-F:]+$/.test(host) && host.includes(':')) {
    return false;
  }
  
  // Otherwise, treat it as a potential SSH alias
  return true;
}
```

--------------------------------------------------------------------------------
/src/config/__tests__/ssh-config-integration.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
import { resolveSSHConfig } from '../env.js';
import { homedir } from 'os';
import { join } from 'path';
import * as sshConfigParser from '../../utils/ssh-config-parser.js';

// Mock the ssh-config-parser module
vi.mock('../../utils/ssh-config-parser.js', () => ({
  parseSSHConfig: vi.fn(),
  looksLikeSSHAlias: vi.fn()
}));

describe('SSH Config Integration', () => {
  let originalArgs: string[];
  
  beforeEach(() => {
    // Save original values
    originalArgs = process.argv;
    
    // Clear mocks
    vi.clearAllMocks();
  });
  
  afterEach(() => {
    // Restore original values
    process.argv = originalArgs;
    
    // Clear any environment variables
    delete process.env.SSH_HOST;
    delete process.env.SSH_USER;
    delete process.env.SSH_PORT;
    delete process.env.SSH_KEY;
    delete process.env.SSH_PASSWORD;
  });
  
  it('should resolve SSH config from host alias', () => {
    // Mock the SSH config parser
    vi.mocked(sshConfigParser.looksLikeSSHAlias).mockReturnValue(true);
    vi.mocked(sshConfigParser.parseSSHConfig).mockImplementation((hostAlias: string, configPath: string) => ({
      host: 'bastion.example.com',
      username: 'ubuntu',
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    }));
    
    // Simulate command line args
    process.argv = ['node', 'index.js', '--ssh-host=mybastion'];
    
    const result = resolveSSHConfig();
    
    expect(result).not.toBeNull();
    expect(result?.config).toMatchObject({
      host: 'bastion.example.com',
      username: 'ubuntu',
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    });
    expect(result?.source).toContain('SSH config for host \'mybastion\'');
  });
  
  it('should allow command line to override SSH config values', () => {
    // Mock the SSH config parser
    vi.mocked(sshConfigParser.looksLikeSSHAlias).mockReturnValue(true);
    vi.mocked(sshConfigParser.parseSSHConfig).mockImplementation((hostAlias: string, configPath: string) => ({
      host: 'bastion.example.com',
      username: 'ubuntu',
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    }));
    
    // Simulate command line args with override
    process.argv = ['node', 'index.js', '--ssh-host=mybastion', '--ssh-user=override-user'];
    
    const result = resolveSSHConfig();
    
    expect(result).not.toBeNull();
    expect(result?.config).toMatchObject({
      host: 'bastion.example.com',
      username: 'override-user', // Command line overrides config
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    });
  });
  
  it('should work with environment variables', () => {
    // Mock the SSH config parser
    vi.mocked(sshConfigParser.looksLikeSSHAlias).mockReturnValue(true);
    vi.mocked(sshConfigParser.parseSSHConfig).mockImplementation((hostAlias: string, configPath: string) => ({
      host: 'bastion.example.com',
      username: 'ubuntu',
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    }));
    
    process.env.SSH_HOST = 'mybastion';
    
    const result = resolveSSHConfig();
    
    expect(result).not.toBeNull();
    expect(result?.config).toMatchObject({
      host: 'bastion.example.com',
      username: 'ubuntu',
      port: 2222,
      privateKey: '/home/user/.ssh/id_rsa'
    });
  });
  
  it('should not use SSH config for direct hostnames', () => {
    // Mock the SSH config parser
    vi.mocked(sshConfigParser.looksLikeSSHAlias).mockReturnValue(false);
    
    process.argv = ['node', 'index.js', '--ssh-host=direct.example.com', '--ssh-user=myuser', '--ssh-password=mypass'];
    
    const result = resolveSSHConfig();
    
    expect(result).not.toBeNull();
    expect(result?.config).toMatchObject({
      host: 'direct.example.com',
      username: 'myuser',
      password: 'mypass'
    });
    expect(result?.source).not.toContain('SSH config');
    expect(sshConfigParser.parseSSHConfig).not.toHaveBeenCalled();
  });
  
  it('should require SSH user when only host is provided', () => {
    // Mock the SSH config parser to return null (no config found)
    vi.mocked(sshConfigParser.looksLikeSSHAlias).mockReturnValue(true);
    vi.mocked(sshConfigParser.parseSSHConfig).mockImplementation((hostAlias: string, configPath: string) => null);
    
    process.argv = ['node', 'index.js', '--ssh-host=unknown-host'];
    
    expect(() => resolveSSHConfig()).toThrow('SSH tunnel configuration requires at least --ssh-host and --ssh-user');
  });
});
```

--------------------------------------------------------------------------------
/src/utils/safe-url.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * SafeURL utility
 * 
 * Provides a safer alternative to URL constructor for database connections
 * that may contain special characters in passwords or other parts
 */

/**
 * Interface defining the structure of a URL parser
 * that can handle special characters in connection strings
 */
export interface ISafeURL {
  protocol: string;
  hostname: string;
  port: string;
  pathname: string;
  username: string;
  password: string;
  searchParams: Map<string, string>;
  
  getSearchParam(name: string): string | null;
  forEachSearchParam(callback: (value: string, key: string) => void): void;
}

/**
 * SafeURL class implements a parser for handling DSN strings
 * with special characters that might break the standard URL constructor
 */
export class SafeURL implements ISafeURL {
  protocol: string;
  hostname: string;
  port: string;
  pathname: string;
  username: string;
  password: string;
  searchParams: Map<string, string>;

  /**
   * Parse a URL and handle special characters in passwords
   * This is a safe alternative to the URL constructor
   * 
   * @param urlString - The DSN string to parse
   */
  constructor(urlString: string) {
    // Initialize with defaults
    this.protocol = '';
    this.hostname = '';
    this.port = '';
    this.pathname = '';
    this.username = '';
    this.password = '';
    this.searchParams = new Map<string, string>();

    // Validate URL string
    if (!urlString || urlString.trim() === '') {
      throw new Error('URL string cannot be empty');
    }

    try {
      // Extract protocol
      const protocolSeparator: number = urlString.indexOf('://');
      if (protocolSeparator !== -1) {
        this.protocol = urlString.substring(0, protocolSeparator + 1); // includes the colon
        urlString = urlString.substring(protocolSeparator + 3); // rest after ://
      } else {
        throw new Error('Invalid URL format: missing protocol (e.g., "mysql://")');
      }

      // Extract query params if any
      const questionMarkIndex: number = urlString.indexOf('?');
      let queryParams: string = '';
      if (questionMarkIndex !== -1) {
        queryParams = urlString.substring(questionMarkIndex + 1);
        urlString = urlString.substring(0, questionMarkIndex);

        // Parse query parameters
        queryParams.split('&').forEach(pair => {
          const parts: string[] = pair.split('=');
          if (parts.length === 2 && parts[0] && parts[1]) {
            this.searchParams.set(parts[0], decodeURIComponent(parts[1]));
          }
        });
      }

      // Extract authentication
      const atIndex: number = urlString.indexOf('@');
      if (atIndex !== -1) {
        const auth: string = urlString.substring(0, atIndex);
        urlString = urlString.substring(atIndex + 1);

        // Split into username and password
        const colonIndex: number = auth.indexOf(':');
        if (colonIndex !== -1) {
          this.username = auth.substring(0, colonIndex);
          this.password = auth.substring(colonIndex + 1);
          
          // Decode username and password
          this.username = decodeURIComponent(this.username);
          this.password = decodeURIComponent(this.password);
        } else {
          this.username = auth;
        }
      }

      // Extract pathname
      const pathSeparatorIndex: number = urlString.indexOf('/');
      if (pathSeparatorIndex !== -1) {
        this.pathname = urlString.substring(pathSeparatorIndex);
        urlString = urlString.substring(0, pathSeparatorIndex);
      }

      // Extract hostname and port
      const colonIndex: number = urlString.indexOf(':');
      if (colonIndex !== -1) {
        this.hostname = urlString.substring(0, colonIndex);
        this.port = urlString.substring(colonIndex + 1);
      } else {
        this.hostname = urlString;
      }
      
      // Additional validation
      if (this.protocol === '') {
        throw new Error('Invalid URL: protocol is required');
      }
    } catch (error) {
      throw new Error(`Failed to parse URL: ${error instanceof Error ? error.message : String(error)}`);
    }
  }

  /**
   * Helper method to safely get a parameter from query string
   * 
   * @param name - The parameter name to retrieve
   * @returns The parameter value or null if not found
   */
  getSearchParam(name: string): string | null {
    return this.searchParams.has(name) ? this.searchParams.get(name) as string : null;
  }

  /**
   * Helper method to iterate over all parameters
   * 
   * @param callback - Function to call for each parameter
   */
  forEachSearchParam(callback: (value: string, key: string) => void): void {
    this.searchParams.forEach((value, key) => callback(value, key));
  }
}

```

--------------------------------------------------------------------------------
/src/utils/ssh-tunnel.ts:
--------------------------------------------------------------------------------

```typescript
import { Client, ConnectConfig } from 'ssh2';
import { readFileSync } from 'fs';
import { Server, createServer } from 'net';
import type { SSHTunnelConfig, SSHTunnelOptions, SSHTunnelInfo } from '../types/ssh.js';

/**
 * SSH Tunnel implementation for secure database connections
 */
export class SSHTunnel {
  private sshClient: Client | null = null;
  private localServer: Server | null = null;
  private tunnelInfo: SSHTunnelInfo | null = null;
  private isConnected: boolean = false;

  /**
   * Establish an SSH tunnel
   * @param config SSH connection configuration
   * @param options Tunnel options including target host and port
   * @returns Promise resolving to tunnel information including local port
   */
  async establish(
    config: SSHTunnelConfig, 
    options: SSHTunnelOptions
  ): Promise<SSHTunnelInfo> {
    if (this.isConnected) {
      throw new Error('SSH tunnel is already established');
    }

    return new Promise((resolve, reject) => {
      this.sshClient = new Client();

      // Build SSH connection config
      const sshConfig: ConnectConfig = {
        host: config.host,
        port: config.port || 22,
        username: config.username,
      };

      // Configure authentication
      if (config.password) {
        sshConfig.password = config.password;
      } else if (config.privateKey) {
        try {
          const privateKey = readFileSync(config.privateKey);
          sshConfig.privateKey = privateKey;
          if (config.passphrase) {
            sshConfig.passphrase = config.passphrase;
          }
        } catch (error) {
          reject(new Error(`Failed to read private key file: ${error instanceof Error ? error.message : String(error)}`));
          return;
        }
      } else {
        reject(new Error('Either password or privateKey must be provided for SSH authentication'));
        return;
      }

      // Handle SSH connection errors
      this.sshClient.on('error', (err) => {
        this.cleanup();
        reject(new Error(`SSH connection error: ${err.message}`));
      });

      // When SSH connection is ready, create the tunnel
      this.sshClient.on('ready', () => {
        console.error('SSH connection established');

        // Create local server for the tunnel
        this.localServer = createServer((localSocket) => {
          this.sshClient!.forwardOut(
            '127.0.0.1',
            0,
            options.targetHost,
            options.targetPort,
            (err, stream) => {
              if (err) {
                console.error('SSH forward error:', err);
                localSocket.end();
                return;
              }

              // Pipe data between local socket and SSH stream
              localSocket.pipe(stream).pipe(localSocket);

              // Handle stream errors
              stream.on('error', (err) => {
                console.error('SSH stream error:', err);
                localSocket.end();
              });

              localSocket.on('error', (err) => {
                console.error('Local socket error:', err);
                stream.end();
              });
            }
          );
        });

        // Listen on local port
        const localPort = options.localPort || 0;
        this.localServer.listen(localPort, '127.0.0.1', () => {
          const address = this.localServer!.address();
          if (!address || typeof address === 'string') {
            this.cleanup();
            reject(new Error('Failed to get local server address'));
            return;
          }

          this.tunnelInfo = {
            localPort: address.port,
            targetHost: options.targetHost,
            targetPort: options.targetPort,
          };

          this.isConnected = true;
          console.error(`SSH tunnel established: localhost:${address.port} -> ${options.targetHost}:${options.targetPort}`);
          resolve(this.tunnelInfo);
        });

        // Handle local server errors
        this.localServer.on('error', (err) => {
          this.cleanup();
          reject(new Error(`Local server error: ${err.message}`));
        });
      });

      // Connect to SSH server
      this.sshClient.connect(sshConfig);
    });
  }

  /**
   * Close the SSH tunnel and clean up resources
   */
  async close(): Promise<void> {
    if (!this.isConnected) {
      return;
    }

    return new Promise((resolve) => {
      this.cleanup();
      this.isConnected = false;
      console.error('SSH tunnel closed');
      resolve();
    });
  }

  /**
   * Clean up resources
   */
  private cleanup(): void {
    if (this.localServer) {
      this.localServer.close();
      this.localServer = null;
    }

    if (this.sshClient) {
      this.sshClient.end();
      this.sshClient = null;
    }

    this.tunnelInfo = null;
  }

  /**
   * Get current tunnel information
   */
  getTunnelInfo(): SSHTunnelInfo | null {
    return this.tunnelInfo;
  }

  /**
   * Check if tunnel is connected
   */
  getIsConnected(): boolean {
    return this.isConnected;
  }
}
```

--------------------------------------------------------------------------------
/src/utils/__tests__/ssh-config-parser.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { parseSSHConfig, looksLikeSSHAlias } from '../ssh-config-parser.js';
import { mkdtempSync, writeFileSync, rmSync } from 'fs';
import { tmpdir } from 'os';
import { join } from 'path';

describe('SSH Config Parser', () => {
  let tempDir: string;
  let configPath: string;

  beforeEach(() => {
    // Create a temporary directory for test config files
    tempDir = mkdtempSync(join(tmpdir(), 'dbhub-ssh-test-'));
    configPath = join(tempDir, 'config');
  });

  afterEach(() => {
    // Clean up temporary directory
    rmSync(tempDir, { recursive: true });
  });

  describe('parseSSHConfig', () => {
    it('should parse basic SSH config', () => {
      const configContent = `
Host myserver
  HostName 192.168.1.100
  User johndoe
  Port 2222
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('myserver', configPath);
      expect(result).toEqual({
        host: '192.168.1.100',
        username: 'johndoe',
        port: 2222
      });
    });

    it('should handle identity file', () => {
      const identityPath = join(tempDir, 'id_rsa');
      writeFileSync(identityPath, 'fake-key-content');
      
      const configContent = `
Host dev-server
  HostName dev.example.com
  User developer
  IdentityFile ${identityPath}
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('dev-server', configPath);
      expect(result).toEqual({
        host: 'dev.example.com',
        username: 'developer',
        privateKey: identityPath
      });
    });

    it('should handle multiple identity files and use the first one', () => {
      const identityPath1 = join(tempDir, 'id_rsa');
      const identityPath2 = join(tempDir, 'id_ed25519');
      writeFileSync(identityPath1, 'fake-key-1');
      writeFileSync(identityPath2, 'fake-key-2');
      
      const configContent = `
Host multi-key
  HostName multi.example.com
  User multiuser
  IdentityFile ${identityPath1}
  IdentityFile ${identityPath2}
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('multi-key', configPath);
      expect(result?.privateKey).toBe(identityPath1);
    });

    it('should handle wildcard patterns', () => {
      const configContent = `
Host *.example.com
  User defaultuser
  Port 2222

Host prod.example.com
  HostName 10.0.0.100
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('prod.example.com', configPath);
      expect(result).toEqual({
        host: '10.0.0.100',
        username: 'defaultuser',
        port: 2222
      });
    });

    it('should use host alias as hostname if HostName not specified', () => {
      const configContent = `
Host myalias
  User testuser
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('myalias', configPath);
      expect(result).toEqual({
        host: 'myalias',
        username: 'testuser'
      });
    });

    it('should return null for non-existent host', () => {
      const configContent = `
Host myserver
  HostName 192.168.1.100
  User johndoe
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('nonexistent', configPath);
      expect(result).toBeNull();
    });

    it('should return null if config file does not exist', () => {
      const result = parseSSHConfig('myserver', '/non/existent/path');
      expect(result).toBeNull();
    });

    it('should return null if required fields are missing', () => {
      const configContent = `
Host incomplete
  HostName 192.168.1.100
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('incomplete', configPath);
      expect(result).toBeNull();
    });

    it('should handle tilde expansion in identity file', () => {
      // Mock a key file that would exist in home directory
      const mockKeyPath = join(tempDir, 'mock_id_rsa');
      writeFileSync(mockKeyPath, 'fake-key');
      
      const configContent = `
Host tilde-test
  HostName tilde.example.com
  User tildeuser
  IdentityFile ${mockKeyPath}
`;
      writeFileSync(configPath, configContent);

      const result = parseSSHConfig('tilde-test', configPath);
      expect(result?.privateKey).toBe(mockKeyPath);
    });
  });

  describe('looksLikeSSHAlias', () => {
    it('should return true for simple hostnames', () => {
      expect(looksLikeSSHAlias('myserver')).toBe(true);
      expect(looksLikeSSHAlias('dev-box')).toBe(true);
      expect(looksLikeSSHAlias('prod_server')).toBe(true);
    });

    it('should return false for domains', () => {
      expect(looksLikeSSHAlias('example.com')).toBe(false);
      expect(looksLikeSSHAlias('sub.example.com')).toBe(false);
      expect(looksLikeSSHAlias('my.local.dev')).toBe(false);
    });

    it('should return false for IP addresses', () => {
      expect(looksLikeSSHAlias('192.168.1.1')).toBe(false);
      expect(looksLikeSSHAlias('10.0.0.1')).toBe(false);
      expect(looksLikeSSHAlias('::1')).toBe(false);
      expect(looksLikeSSHAlias('2001:db8::1')).toBe(false);
    });
  });
});
```

--------------------------------------------------------------------------------
/src/connectors/manager.ts:
--------------------------------------------------------------------------------

```typescript
import { Connector, ConnectorType, ConnectorRegistry, ExecuteOptions } from "./interface.js";
import { SSHTunnel } from "../utils/ssh-tunnel.js";
import { resolveSSHConfig, resolveMaxRows } from "../config/env.js";
import type { SSHTunnelConfig } from "../types/ssh.js";

// Singleton instance for global access
let managerInstance: ConnectorManager | null = null;

/**
 * Manages database connectors and provides a unified interface to work with them
 */
export class ConnectorManager {
  private activeConnector: Connector | null = null;
  private connected = false;
  private sshTunnel: SSHTunnel | null = null;
  private originalDSN: string | null = null;
  private maxRows: number | null = null;

  constructor() {
    if (!managerInstance) {
      managerInstance = this;
    }
    
    // Initialize maxRows from command line arguments
    const maxRowsData = resolveMaxRows();
    if (maxRowsData) {
      this.maxRows = maxRowsData.maxRows;
      console.error(`Max rows limit: ${this.maxRows} (from ${maxRowsData.source})`);
    }
  }

  /**
   * Initialize and connect to the database using a DSN
   */
  async connectWithDSN(dsn: string, initScript?: string): Promise<void> {
    // Store original DSN for reference
    this.originalDSN = dsn;
    
    // Check if SSH tunnel is needed
    const sshConfig = resolveSSHConfig();
    let actualDSN = dsn;
    
    if (sshConfig) {
      console.error(`SSH tunnel configuration loaded from ${sshConfig.source}`);
      
      // Parse DSN to get database host and port
      const url = new URL(dsn);
      const targetHost = url.hostname;
      const targetPort = parseInt(url.port) || this.getDefaultPort(dsn);
      
      // Create and establish SSH tunnel
      this.sshTunnel = new SSHTunnel();
      const tunnelInfo = await this.sshTunnel.establish(sshConfig.config, {
        targetHost,
        targetPort,
      });
      
      // Update DSN to use local tunnel endpoint
      url.hostname = '127.0.0.1';
      url.port = tunnelInfo.localPort.toString();
      actualDSN = url.toString();
      
      console.error(`Database connection will use SSH tunnel through localhost:${tunnelInfo.localPort}`);
    }

    // First try to find a connector that can handle this DSN
    let connector = ConnectorRegistry.getConnectorForDSN(actualDSN);

    if (!connector) {
      throw new Error(`No connector found that can handle the DSN: ${actualDSN}`);
    }

    this.activeConnector = connector;

    // Connect to the database through tunnel if applicable
    await this.activeConnector.connect(actualDSN, initScript);
    this.connected = true;
  }

  /**
   * Initialize and connect to the database using a specific connector type
   */
  async connectWithType(connectorType: ConnectorType, dsn?: string): Promise<void> {
    // Get the connector from the registry
    const connector = ConnectorRegistry.getConnector(connectorType);

    if (!connector) {
      throw new Error(`Connector "${connectorType}" not found`);
    }

    this.activeConnector = connector;

    // Use provided DSN or get sample DSN
    const connectionString = dsn || connector.dsnParser.getSampleDSN();

    // Connect to the database
    await this.activeConnector.connect(connectionString);
    this.connected = true;
  }

  /**
   * Close the database connection
   */
  async disconnect(): Promise<void> {
    if (this.activeConnector && this.connected) {
      await this.activeConnector.disconnect();
      this.connected = false;
    }
    
    // Close SSH tunnel if it exists
    if (this.sshTunnel) {
      await this.sshTunnel.close();
      this.sshTunnel = null;
    }
    
    this.originalDSN = null;
  }

  /**
   * Get the active connector
   */
  getConnector(): Connector {
    if (!this.activeConnector) {
      throw new Error("No active connector. Call connectWithDSN() or connectWithType() first.");
    }
    return this.activeConnector;
  }

  /**
   * Check if there's an active connection
   */
  isConnected(): boolean {
    return this.connected;
  }

  /**
   * Get all available connector types
   */
  static getAvailableConnectors(): ConnectorType[] {
    return ConnectorRegistry.getAvailableConnectors();
  }

  /**
   * Get sample DSNs for all available connectors
   */
  static getAllSampleDSNs(): { [key in ConnectorType]?: string } {
    return ConnectorRegistry.getAllSampleDSNs();
  }

  /**
   * Get the current active connector instance
   * This is used by resource and tool handlers
   */
  static getCurrentConnector(): Connector {
    if (!managerInstance) {
      throw new Error("ConnectorManager not initialized");
    }
    return managerInstance.getConnector();
  }

  /**
   * Get execute options for SQL execution
   */
  getExecuteOptions(): ExecuteOptions {
    const options: ExecuteOptions = {};
    if (this.maxRows !== null) {
      options.maxRows = this.maxRows;
    }
    return options;
  }

  /**
   * Get the current execute options
   * This is used by tool handlers
   */
  static getCurrentExecuteOptions(): ExecuteOptions {
    if (!managerInstance) {
      throw new Error("ConnectorManager not initialized");
    }
    return managerInstance.getExecuteOptions();
  }
  
  /**
   * Get default port for a database based on DSN protocol
   */
  private getDefaultPort(dsn: string): number {
    if (dsn.startsWith('postgres://') || dsn.startsWith('postgresql://')) {
      return 5432;
    } else if (dsn.startsWith('mysql://')) {
      return 3306;
    } else if (dsn.startsWith('mariadb://')) {
      return 3306;
    } else if (dsn.startsWith('sqlserver://')) {
      return 1433;
    }
    // SQLite doesn't use ports
    return 0;
  }
}

```

--------------------------------------------------------------------------------
/.github/workflows/npm-publish.yml:
--------------------------------------------------------------------------------

```yaml
# Workflow for publishing the DBHub package to npm
# This workflow has two trigger modes:
#
# 1. Manual trigger (workflow_dispatch):
#    - Allows manually specifying version and tag
#    - Useful for deliberate releases
#
# 2. Automatic trigger (on push to main branch that modifies package.json):
#    - Detects if the version has changed
#    - Automatically determines the appropriate npm tag based on version format
#    - Skips publishing if the version already exists on npm

name: Publish to npm

on:
  # Manual trigger with customizable version and tag
  workflow_dispatch:
    inputs:
      version:
        description: "Version to publish (e.g., 0.1.0, 0.2.0-beta)"
        required: false
        default: ""
      tag:
        description: "NPM tag (e.g., latest, dev)"
        required: false
        default: "dev"
  
  # Automatic trigger when package.json changes in main branch
  push:
    branches:
      - main
    paths:
      - 'package.json'

jobs:
  build-and-publish:
    runs-on: ubuntu-latest
    steps:
      # Checkout the repository to get access to the code
      - name: Checkout repository
        uses: actions/checkout@v4

      # Set up Node.js with npm registry configuration
      - name: Setup Node.js
        uses: actions/setup-node@v4
        with:
          node-version: "22"
          registry-url: "https://registry.npmjs.org/" # Use the public npm registry
          scope: "@bytebase"                          # Set the npm scope for publishing

      # Install pnpm for faster and more reliable package management
      - name: Install pnpm
        uses: pnpm/action-setup@v3
        with:
          version: latest

      # Install project dependencies
      - name: Install dependencies
        run: pnpm install

      # Build the project (compile TypeScript to JavaScript)
      - name: Build
        run: pnpm run build

      # Determine if we need to publish and what version/tag to use
      - name: Check version and prepare for publishing
        run: |
          # Get current version from package.json
          CURRENT_VERSION=$(jq -r '.version' package.json)
          
          # CASE 1: Manual workflow trigger with specified version
          if [ -n "${{ inputs.version }}" ]; then
            VERSION="${{ inputs.version }}"
            TAG="${{ inputs.tag }}"
            SHOULD_PUBLISH="true"
            echo "Manual trigger: Using provided version ${VERSION} with tag ${TAG}"
          
          # CASE 2: Automatic trigger from package.json changes
          else
            VERSION="${CURRENT_VERSION}"
            
            # Check if this version already exists in npm registry to avoid duplicates
            if npm view @bytebase/dbhub@${VERSION} version &> /dev/null; then
              echo "Version ${VERSION} already exists in npm registry. Skipping publish."
              SHOULD_PUBLISH="false"
            else
              echo "Version ${VERSION} is new. Proceeding with publish."
              SHOULD_PUBLISH="true"
              
              # Determine appropriate npm tag based on version format:
              # - For prerelease versions like "0.1.0-beta", use "beta" as the tag
              # - For stable versions like "1.0.0", use "latest" as the tag
              if [[ "${VERSION}" == *"-"* ]]; then
                # Extract tag from version string (e.g., "beta" from "0.1.0-beta")
                TAG=$(echo "${VERSION}" | cut -d'-' -f2 | cut -d'.' -f1)
                echo "Prerelease version detected. Using '${TAG}' npm tag."
              else
                TAG="latest"
                echo "Stable version detected. Using 'latest' npm tag."
              fi
            fi
          fi
          
          # Store values as environment variables for use in later steps
          echo "PACKAGE_VERSION=${VERSION}" >> $GITHUB_ENV
          echo "NPM_TAG=${TAG}" >> $GITHUB_ENV
          echo "SHOULD_PUBLISH=${SHOULD_PUBLISH}" >> $GITHUB_ENV
          
          # Summary message
          if [ "${SHOULD_PUBLISH}" = "true" ]; then
            echo "Publishing version: ${VERSION} with tag: ${TAG}"
          fi

          # Only modify package.json if we're going to publish
          if [ "${SHOULD_PUBLISH}" = "true" ]; then
            # Step 1: Update package name and version
            echo "Preparing package.json for publishing..."
            jq --arg version "$VERSION" '.name = "@bytebase/dbhub" | .version = $version' package.json > package.json.tmp
            mv package.json.tmp package.json
            
            # Step 2: Configure which files to include in the published package
            echo "Setting files to include in the npm package..."
            jq '.files = ["dist/**/*", "LICENSE", "README.md"]' package.json > package.json.tmp
            mv package.json.tmp package.json

            # Step 3: Add binary entry for CLI usage (makes it executable with 'npx' or after global install)
            echo "Adding bin entry for CLI usage..."
            jq '.bin = {"dbhub": "dist/index.js"}' package.json > package.json.tmp 
            mv package.json.tmp package.json
            
            echo "Package.json prepared successfully for publishing"
          else
            echo "Skipping package.json modifications as we won't be publishing"
          fi

      # Publish the package to npm if conditions are met
      - name: Publish to npm
        if: env.SHOULD_PUBLISH == 'true'
        run: |
          echo "Publishing @bytebase/dbhub@${{ env.PACKAGE_VERSION }} with tag ${{ env.NPM_TAG }}..."
          pnpm publish --no-git-checks --access public --tag ${{ env.NPM_TAG }}
          echo "✅ Successfully published to npm!"
        env:
          # Uses NPM_TOKEN from repository secrets for authentication
          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
          
      # Display a message when skipping publication
      - name: Skip publishing
        if: env.SHOULD_PUBLISH != 'true'
        run: |
          echo "⏭️ Skipping publish step because:"
          echo "   - Version has not changed, or"
          echo "   - Version already exists in the npm registry"
          echo "To force publication, use the manual workflow trigger with a custom version."

```

--------------------------------------------------------------------------------
/src/connectors/interface.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Type definition for supported database connector types
 */
export type ConnectorType = "postgres" | "mysql" | "mariadb" | "sqlite" | "sqlserver";

/**
 * Database Connector Interface
 * This defines the contract that all database connectors must implement.
 */
export interface SQLResult {
  rows: any[];
  [key: string]: any;
}

export interface TableColumn {
  column_name: string;
  data_type: string;
  is_nullable: string;
  column_default: string | null;
}

export interface TableIndex {
  index_name: string;
  column_names: string[];
  is_unique: boolean;
  is_primary: boolean;
}

export interface StoredProcedure {
  procedure_name: string;
  procedure_type: "procedure" | "function";
  language: string;
  parameter_list: string;
  return_type?: string;
  definition?: string;
}

/**
 * Options for SQL execution
 * This interface allows passing execution-specific options to connectors
 */
export interface ExecuteOptions {
  /** Maximum number of rows to return (applied via database-native LIMIT) */
  maxRows?: number;
}

/**
 * Connection string (DSN) parser interface
 * Each connector needs to implement its own DSN parser
 */
export interface DSNParser {
  /**
   * Parse a connection string into connector-specific configuration
   * Example DSN formats:
   * - PostgreSQL: "postgres://user:password@localhost:5432/dbname?sslmode=disable"
   * - MariaDB: "mariadb://user:password@localhost:3306/dbname"
   * - MySQL: "mysql://user:password@localhost:3306/dbname"
   * - SQLite: "sqlite:///path/to/database.db" or "sqlite:///:memory:"
   */
  parse(dsn: string): Promise<any>;

  /**
   * Generate a sample DSN string for this connector type
   */
  getSampleDSN(): string;

  /**
   * Check if a DSN is valid for this connector
   */
  isValidDSN(dsn: string): boolean;
}

export interface Connector {
  /** A unique identifier for the connector */
  id: ConnectorType;

  /** Human-readable name of the connector */
  name: string;

  /** DSN parser for this connector */
  dsnParser: DSNParser;

  /** Connect to the database using DSN, with optional init script */
  connect(dsn: string, initScript?: string): Promise<void>;

  /** Close the connection */
  disconnect(): Promise<void>;

  /**
   * Get all schemas in the database
   * @returns Promise with array of schema names
   */
  getSchemas(): Promise<string[]>;

  /**
   * Get all tables in the database or in a specific schema
   * @param schema Optional schema name. If not provided, implementation should use the default schema:
   *   - PostgreSQL: 'public' schema
   *   - SQL Server: 'dbo' schema
   *   - MySQL: Current active database from connection (DATABASE())
   *   - SQLite: Main database (schema concept doesn't exist in SQLite)
   * @returns Promise with array of table names
   */
  getTables(schema?: string): Promise<string[]>;

  /**
   * Get schema information for a specific table
   * @param tableName The name of the table to get schema information for
   * @param schema Optional schema name. If not provided, implementation should use the default schema
   *   as described in getTables method.
   * @returns Promise with array of column information
   */
  getTableSchema(tableName: string, schema?: string): Promise<TableColumn[]>;

  /**
   * Check if a table exists
   * @param tableName The name of the table to check
   * @param schema Optional schema name. If not provided, implementation should use the default schema
   *   as described in getTables method.
   * @returns Promise with boolean indicating if table exists
   */
  tableExists(tableName: string, schema?: string): Promise<boolean>;

  /**
   * Get indexes for a specific table
   * @param tableName The name of the table to get indexes for
   * @param schema Optional schema name. If not provided, implementation should use the default schema
   *   as described in getTables method.
   * @returns Promise with array of index information
   */
  getTableIndexes(tableName: string, schema?: string): Promise<TableIndex[]>;

  /**
   * Get stored procedures/functions in the database or in a specific schema
   * @param schema Optional schema name. If not provided, implementation should use the default schema
   * @returns Promise with array of stored procedure/function names
   */
  getStoredProcedures(schema?: string): Promise<string[]>;

  /**
   * Get details for a specific stored procedure/function
   * @param procedureName The name of the procedure/function to get details for
   * @param schema Optional schema name. If not provided, implementation should use the default schema
   * @returns Promise with stored procedure details
   */
  getStoredProcedureDetail(procedureName: string, schema?: string): Promise<StoredProcedure>;

  /** Execute a SQL query with execution options */
  executeSQL(sql: string, options: ExecuteOptions): Promise<SQLResult>;
}

/**
 * Registry for available database connectors
 */
export class ConnectorRegistry {
  private static connectors: Map<ConnectorType, Connector> = new Map();

  /**
   * Register a new connector
   */
  static register(connector: Connector): void {
    ConnectorRegistry.connectors.set(connector.id, connector);
  }

  /**
   * Get a connector by ID
   */
  static getConnector(id: ConnectorType): Connector | null {
    return ConnectorRegistry.connectors.get(id) || null;
  }

  /**
   * Get connector for a DSN string
   * Tries to find a connector that can handle the given DSN format
   */
  static getConnectorForDSN(dsn: string): Connector | null {
    for (const connector of ConnectorRegistry.connectors.values()) {
      if (connector.dsnParser.isValidDSN(dsn)) {
        return connector;
      }
    }
    return null;
  }

  /**
   * Get all available connector IDs
   */
  static getAvailableConnectors(): ConnectorType[] {
    return Array.from(ConnectorRegistry.connectors.keys());
  }

  /**
   * Get sample DSN for a specific connector
   */
  static getSampleDSN(connectorType: ConnectorType): string | null {
    const connector = ConnectorRegistry.getConnector(connectorType);
    if (!connector) return null;
    return connector.dsnParser.getSampleDSN();
  }

  /**
   * Get all available sample DSNs
   */
  static getAllSampleDSNs(): { [key in ConnectorType]?: string } {
    const samples: { [key in ConnectorType]?: string } = {};
    for (const [id, connector] of ConnectorRegistry.connectors.entries()) {
      samples[id] = connector.dsnParser.getSampleDSN();
    }
    return samples;
  }
}

```

--------------------------------------------------------------------------------
/resources/images/logo-text-only.svg:
--------------------------------------------------------------------------------

```
<svg width="4233" height="987" viewBox="0 0 4233 987" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M229.9 982.9L45.1 966.4L85.8 210.7C66.7333 215.1 49.1333 220.233 33 226.1L0 39.1C38.8667 31.0333 86.9 22.6 144.1 13.7999C201.3 4.99998 260.333 0.599988 321.2 0.599988C368.133 0.599988 415.8 5.73332 464.2 16C512.6 25.5333 558.8 41.6666 602.8 64.4C647.533 86.4 687.133 116.1 721.6 153.5C756.8 190.9 784.3 236.733 804.1 291C824.633 345.267 834.9 409.433 834.9 483.5C834.9 587.633 818.033 676.733 784.3 750.8C751.3 824.133 702.9 880.233 639.1 919.1C576.033 957.967 499.767 977.4 410.3 977.4C350.9 977.4 292.233 966.4 234.3 944.4C232.833 959.067 231.367 971.9 229.9 982.9ZM313.5 185.4C303.233 185.4 292.6 185.4 281.6 185.4C274.267 321.8 267.667 438.033 261.8 534.1C255.933 629.433 250.8 709 246.4 772.8C268.4 780.133 292.6 786.733 319 792.6C346.133 797.733 374.367 800.3 403.7 800.3C463.1 800.3 510.767 785.267 546.7 755.2C583.367 725.133 609.767 685.533 625.9 636.4C642.033 586.533 650.1 532.633 650.1 474.7C650.1 428.5 643.133 389.267 629.2 357C615.267 324.733 596.933 297.967 574.2 276.7C552.2 255.433 528 238.933 501.6 227.2C475.2 214.733 449.167 205.567 423.5 199.7C398.567 193.833 376.2 190.167 356.4 188.7C336.6 186.5 322.3 185.4 313.5 185.4ZM971.163 971.9C970.429 963.833 970.063 955.033 970.063 945.5C970.063 935.967 970.063 924.967 970.063 912.5C970.063 880.967 970.796 843.567 972.263 800.3C973.729 756.3 975.929 709.733 978.863 660.6C981.796 610.733 984.729 560.867 987.663 511C990.596 460.4 993.529 412 996.463 365.8C1000.13 319.6 1003.43 278.167 1006.36 241.5C990.229 246.633 975.929 251.4 963.463 255.8L934.863 70.9999C1017.73 47.5333 1094.73 29.9333 1165.86 18.2C1237.73 6.46665 1302.63 0.599988 1360.56 0.599988C1478.63 0.599988 1568.83 22.9667 1631.16 67.7C1693.5 112.433 1724.66 177.333 1724.66 262.4C1724.66 310.8 1714.03 351.5 1692.76 384.5C1671.5 416.767 1641.43 443.533 1602.56 464.8C1697.9 513.2 1745.56 592.033 1745.56 701.3C1745.56 749.7 1736.4 790.4 1718.06 823.4C1700.46 855.667 1677.73 881.7 1649.86 901.5C1622 921.3 1592.3 935.967 1560.76 945.5C1529.96 955.767 1500.63 962.367 1472.76 965.3C1445.63 968.967 1424.36 970.8 1408.96 970.8C1370.83 970.8 1330.13 967.133 1286.86 959.8C1243.6 952.467 1201.43 941.467 1160.36 926.8C1160.36 941.467 1160.36 955.4 1160.36 968.6L971.163 971.9ZM1364.96 184.3C1338.56 184.3 1310.7 185.767 1281.36 188.7C1252.76 190.9 1224.16 194.567 1195.56 199.7C1191.16 253.233 1186.76 315.567 1182.36 386.7C1191.9 387.433 1201.43 387.8 1210.96 387.8C1221.23 387.8 1231.5 387.8 1241.76 387.8C1435.36 387.8 1532.16 348.2 1532.16 269C1532.16 242.6 1517.13 222.067 1487.06 207.4C1457.73 192 1417.03 184.3 1364.96 184.3ZM1267.06 561.6C1251.66 561.6 1235.9 561.6 1219.76 561.6C1204.36 561.6 1188.6 561.967 1172.46 562.7C1171 594.967 1169.53 627.233 1168.06 659.5C1167.33 691.767 1166.23 723.667 1164.76 755.2C1205.83 767.667 1246.16 776.833 1285.76 782.7C1325.36 787.833 1359.46 790.4 1388.06 790.4C1448.93 790.4 1492.56 783.067 1518.96 768.4C1545.36 753 1558.56 728.067 1558.56 693.6C1558.56 651.067 1533.26 618.433 1482.66 595.7C1432.8 572.967 1360.93 561.6 1267.06 561.6ZM1889.67 975.2C1883.07 934.133 1878.3 883.9 1875.37 824.5C1872.44 765.1 1870.97 700.933 1870.97 632C1870.97 560.867 1872.07 487.9 1874.27 413.1C1877.2 338.3 1880.5 266.433 1884.17 197.5C1888.57 127.833 1893.34 66.2333 1898.47 12.7L2089.87 16C2081.8 67.3333 2075.2 124.533 2070.07 187.6C2065.67 249.933 2062 314.833 2059.07 382.3C2111.14 374.967 2167.6 369.467 2228.47 365.8C2289.34 362.133 2348 360.3 2404.47 360.3C2431.6 360.3 2457.64 361.033 2482.57 362.5C2481.84 307.5 2480.74 256.167 2479.27 208.5C2477.8 160.833 2475.97 120.5 2473.77 87.5C2471.57 54.5 2469 31.7666 2466.07 19.2999L2655.27 7.19997C2661.87 68.8 2667 142.5 2670.67 228.3C2675.07 314.1 2677.27 405.033 2677.27 501.1C2677.27 578.833 2675.8 658.033 2672.87 738.7C2669.94 818.633 2664.8 895.633 2657.47 969.7H2473.77C2476.7 946.233 2478.9 914.333 2480.37 874C2481.84 832.933 2482.94 786.733 2483.67 735.4C2484.4 683.333 2484.77 629.067 2484.77 572.6C2484.77 562.333 2484.77 551.7 2484.77 540.7C2470.84 540.7 2456.54 540.7 2441.87 540.7C2427.94 539.967 2413.64 539.6 2398.97 539.6C2341.04 539.6 2282.37 541.8 2222.97 546.2C2164.3 549.867 2108.2 555.367 2054.67 562.7C2054.67 586.167 2054.67 609.633 2054.67 633.1C2054.67 697.633 2055.04 758.133 2055.77 814.6C2057.24 871.067 2059.44 922.4 2062.37 968.6L1889.67 975.2ZM3052.37 986.2C3005.44 986.2 2966.57 977.767 2935.77 960.9C2905.7 944.767 2881.5 922.4 2863.17 893.8C2845.57 865.2 2832.37 833.3 2823.57 798.1C2815.5 762.167 2810 725.133 2807.07 687C2804.87 648.133 2803.77 610.733 2803.77 574.8C2803.77 533.733 2804.87 491.933 2807.07 449.4C2810 406.867 2812.94 360.3 2815.87 309.7L3017.17 297.6C3014.97 313 3011.67 336.1 3007.27 366.9C3002.87 397.7 2998.84 432.9 2995.17 472.5C2991.5 512.1 2989.67 553.533 2989.67 596.8C2989.67 667.933 2995.9 720.733 3008.37 755.2C3020.84 788.933 3039.54 805.8 3064.47 805.8C3114.34 805.8 3151 769.5 3174.47 696.9C3197.94 624.3 3209.67 509.533 3209.67 352.6V302L3303.17 299.8L3416.47 297.6C3406.2 366.533 3398.5 429.967 3393.37 487.9C3388.24 545.833 3384.94 594.6 3383.47 634.2C3382 673.067 3381.27 699.1 3381.27 712.3C3381.27 746.033 3384.2 770.233 3390.07 784.9C3395.94 798.833 3406.2 805.8 3420.87 805.8C3428.2 805.8 3436.27 804.333 3445.07 801.4C3453.87 798.467 3464.14 793.333 3475.87 786L3468.17 975.2C3440.3 979.6 3416.1 981.8 3395.57 981.8C3360.37 981.8 3331.4 975.2 3308.67 962C3286.67 948.8 3269.07 931.2 3255.87 909.2C3233.14 933.4 3204.9 952.1 3171.17 965.3C3138.17 979.233 3098.57 986.2 3052.37 986.2ZM3576.47 975.2C3576.47 915.067 3577.57 850.533 3579.77 781.6C3581.97 712.667 3584.9 642.633 3588.57 571.5C3592.24 500.367 3595.9 430.7 3599.57 362.5C3603.97 294.3 3608.37 230.133 3612.77 170C3617.17 109.133 3620.84 55.2333 3623.77 8.29995L3807.47 10.4999C3803.07 52.3 3798.67 99.2333 3794.27 151.3C3790.6 203.367 3786.94 258.733 3783.27 317.4C3804.54 308.6 3827.27 302 3851.47 297.6C3876.4 292.467 3903.17 289.9 3931.77 289.9C3970.64 289.9 4007.67 296.5 4042.87 309.7C4078.8 322.167 4111.07 341.6 4139.67 368C4168.27 394.4 4190.64 428.5 4206.77 470.3C4223.64 512.1 4232.07 562.333 4232.07 621C4232.07 683.333 4223.27 737.233 4205.67 782.7C4188.07 828.167 4164.24 865.567 4134.17 894.9C4104.1 924.233 4070 945.867 4031.87 959.8C3994.47 974.467 3955.97 981.8 3916.37 981.8C3886.3 981.8 3858.07 977.767 3831.67 969.7C3806 961.633 3782.17 951 3760.17 937.8C3760.17 949.533 3760.17 961.267 3760.17 973L3576.47 975.2ZM3906.47 474.7C3851.47 474.7 3806.74 490.833 3772.27 523.1C3769.34 590.567 3766.77 656.567 3764.57 721.1C3800.5 768.767 3850.74 792.6 3915.27 792.6C3936.54 792.6 3957.8 787.833 3979.07 778.3C4001.07 768.033 4019.4 750.8 4034.07 726.6C4048.74 702.4 4056.07 668.667 4056.07 625.4C4056.07 576.267 4043.24 538.867 4017.57 513.2C3992.64 487.533 3955.6 474.7 3906.47 474.7Z" fill="black"/>
</svg>

```

--------------------------------------------------------------------------------
/src/server.ts:
--------------------------------------------------------------------------------

```typescript
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import express from "express";
import path from "path";
import { readFileSync } from "fs";
import { fileURLToPath } from "url";

import { ConnectorManager } from "./connectors/manager.js";
import { ConnectorRegistry } from "./connectors/interface.js";
import { resolveDSN, resolveTransport, resolvePort, isDemoMode, redactDSN, isReadOnlyMode, resolveId } from "./config/env.js";
import { getSqliteInMemorySetupSql } from "./config/demo-loader.js";
import { registerResources } from "./resources/index.js";
import { registerTools } from "./tools/index.js";
import { registerPrompts } from "./prompts/index.js";

// Create __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

// Load package.json to get version
const packageJsonPath = path.join(__dirname, "..", "package.json");
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));

// Server info
export const SERVER_NAME = "DBHub MCP Server";
export const SERVER_VERSION = packageJson.version;

/**
 * Generate ASCII art banner with version information
 */
export function generateBanner(version: string, modes: string[] = []): string {
  // Create a mode string that includes all active modes
  const modeText = modes.length > 0 ? ` [${modes.join(' | ')}]` : '';

  return `
 _____  ____  _   _       _     
|  __ \\|  _ \\| | | |     | |    
| |  | | |_) | |_| |_   _| |__  
| |  | |  _ <|  _  | | | | '_ \\ 
| |__| | |_) | | | | |_| | |_) |
|_____/|____/|_| |_|\\__,_|_.__/ 
                                
v${version}${modeText} - Universal Database MCP Server
`;
}

/**
 * Initialize and start the DBHub server
 */
export async function main(): Promise<void> {
  try {
    // Resolve ID from command line args (for Cursor multi-instance support)
    const idData = resolveId();
    const id = idData?.id;

    // Resolve DSN from command line args, environment variables, or .env files
    const dsnData = resolveDSN();

    if (!dsnData) {
      const samples = ConnectorRegistry.getAllSampleDSNs();
      const sampleFormats = Object.entries(samples)
        .map(([id, dsn]) => `  - ${id}: ${dsn}`)
        .join("\n");

      console.error(`
ERROR: Database connection string (DSN) is required.
Please provide the DSN in one of these ways (in order of priority):

1. Use demo mode: --demo (uses in-memory SQLite with sample employee database)
2. Command line argument: --dsn="your-connection-string"
3. Environment variable: export DSN="your-connection-string"
4. .env file: DSN=your-connection-string

Example formats:
${sampleFormats}

See documentation for more details on configuring database connections.
`);
      process.exit(1);
    }

    // Create MCP server factory function for HTTP transport
    const createServer = () => {
      const server = new McpServer({
        name: SERVER_NAME,
        version: SERVER_VERSION,
      });

      // Register resources, tools, and prompts
      registerResources(server);
      registerTools(server, id);
      registerPrompts(server);

      return server;
    };

    // Create server factory function (will be used for both STDIO and HTTP transports)

    // Create connector manager and connect to database
    const connectorManager = new ConnectorManager();
    console.error(`Connecting with DSN: ${redactDSN(dsnData.dsn)}`);
    console.error(`DSN source: ${dsnData.source}`);
    if (idData) {
      console.error(`ID: ${idData.id} (from ${idData.source})`);
    }

    // If in demo mode, load the employee database
    if (dsnData.isDemo) {
      const initScript = getSqliteInMemorySetupSql();
      await connectorManager.connectWithDSN(dsnData.dsn, initScript);
    } else {
      await connectorManager.connectWithDSN(dsnData.dsn);
    }

    // Resolve transport type
    const transportData = resolveTransport();
    console.error(`Using transport: ${transportData.type}`);
    console.error(`Transport source: ${transportData.source}`);

    // Print ASCII art banner with version and slogan
    const readonly = isReadOnlyMode();
    
    // Collect active modes
    const activeModes: string[] = [];
    const modeDescriptions: string[] = [];
    
    if (dsnData.isDemo) {
      activeModes.push("DEMO");
      modeDescriptions.push("using sample employee database");
    }
    
    if (readonly) {
      activeModes.push("READ-ONLY");
      modeDescriptions.push("only read only queries allowed");
    }
    
    // Output mode information
    if (activeModes.length > 0) {
      console.error(`Running in ${activeModes.join(' and ')} mode - ${modeDescriptions.join(', ')}`);
    }
    
    console.error(generateBanner(SERVER_VERSION, activeModes));

    // Set up transport based on type
    if (transportData.type === "http") {
      // Set up Express server for Streamable HTTP transport
      const app = express();

      // Enable JSON parsing
      app.use(express.json());

      // Handle CORS and security headers
      app.use((req, res, next) => {
        // Validate Origin header to prevent DNS rebinding attacks
        const origin = req.headers.origin;
        if (origin && !origin.startsWith('http://localhost') && !origin.startsWith('https://localhost')) {
          return res.status(403).json({ error: 'Forbidden origin' });
        }
        
        res.header('Access-Control-Allow-Origin', origin || 'http://localhost');
        res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
        res.header('Access-Control-Allow-Headers', 'Content-Type, Mcp-Session-Id');
        res.header('Access-Control-Allow-Credentials', 'true');
        
        if (req.method === 'OPTIONS') {
          return res.sendStatus(200);
        }
        next();
      });

      // Health check endpoint
      app.get("/healthz", (req, res) => {
        res.status(200).send("OK");
      });

      // Main endpoint for streamable HTTP transport
      app.post("/message", async (req, res) => {
        try {
          // In stateless mode, create a new instance of transport and server for each request
          // to ensure complete isolation. A single instance would cause request ID collisions
          // when multiple clients connect concurrently.
          const transport = new StreamableHTTPServerTransport({
            sessionIdGenerator: undefined, // Disable session management for stateless mode
            enableJsonResponse: false // Use SSE streaming
          });
          const server = createServer();

          await server.connect(transport);
          await transport.handleRequest(req, res, req.body);
        } catch (error) {
          console.error("Error handling request:", error);
          if (!res.headersSent) {
            res.status(500).json({ error: 'Internal server error' });
          }
        }
      });


      // Start the HTTP server
      const portData = resolvePort();
      const port = portData.port;
      console.error(`Port source: ${portData.source}`);
      app.listen(port, '0.0.0.0', () => {
        console.error(`DBHub server listening at http://0.0.0.0:${port}`);
        console.error(`Connect to MCP server at http://0.0.0.0:${port}/message`);
      });
    } else {
      // Set up STDIO transport
      const server = createServer();
      const transport = new StdioServerTransport();
      console.error("Starting with STDIO transport");
      await server.connect(transport);

      // Listen for SIGINT to gracefully shut down
      process.on("SIGINT", async () => {
        console.error("Shutting down...");
        await transport.close();
        process.exit(0);
      });
    }
  } catch (err) {
    console.error("Fatal error:", err);
    process.exit(1);
  }
}

```

--------------------------------------------------------------------------------
/src/prompts/sql-generator.ts:
--------------------------------------------------------------------------------

```typescript
import { z } from "zod";
import { ConnectorManager } from "../connectors/manager.js";
import {
  formatPromptSuccessResponse,
  formatPromptErrorResponse,
} from "../utils/response-formatter.js";
import { SQLDialect } from "../types/sql.js";

// Schema for SQL generator prompt
export const sqlGeneratorSchema = {
  description: z.string().describe("Natural language description of the SQL query to generate"),
  schema: z.string().optional().describe("Optional database schema to use"),
};

/**
 * SQL Generator Prompt Handler
 * Generates SQL queries from natural language descriptions
 */
export async function sqlGeneratorPromptHandler(
  {
    description,
    schema,
  }: {
    description: string;
    schema?: string;
  },
  _extra: any
) {
  try {
    // Get current connector to determine dialect
    const connector = ConnectorManager.getCurrentConnector();

    // Determine SQL dialect from connector automatically
    let sqlDialect: SQLDialect;
    switch (connector.id) {
      case "postgres":
        sqlDialect = "postgres";
        break;
      case "sqlite":
        sqlDialect = "sqlite";
        break;
      case "mysql":
        sqlDialect = "mysql";
        break;
      case "sqlserver":
        sqlDialect = "mssql";
        break;
      default:
        sqlDialect = "ansi"; // Default to standard SQL if connector type is unknown
    }

    // Verify schema exists if provided
    if (schema) {
      const availableSchemas = await connector.getSchemas();
      if (!availableSchemas.includes(schema)) {
        return formatPromptErrorResponse(
          `Schema '${schema}' does not exist or cannot be accessed. Available schemas: ${availableSchemas.join(", ")}`,
          "SCHEMA_NOT_FOUND"
        );
      }
    }

    // Get schema information to help with table/column references
    try {
      const tables = await connector.getTables(schema);

      if (tables.length === 0) {
        const schemaInfo = schema ? `in schema '${schema}'` : "in the database";
        return formatPromptErrorResponse(
          `No tables found ${schemaInfo}. Please check your database connection or schema name.`,
          "NO_TABLES_FOUND"
        );
      }

      const tableSchemas = await Promise.all(
        tables.map(async (table) => {
          try {
            const columns = await connector.getTableSchema(table, schema);
            return {
              table,
              columns: columns.map((col) => ({
                name: col.column_name,
                type: col.data_type,
              })),
            };
          } catch (error) {
            // Skip tables we can't access
            return null;
          }
        })
      );

      // Filter out null entries (tables we couldn't access)
      const accessibleSchemas = tableSchemas.filter((schema) => schema !== null);

      if (accessibleSchemas.length === 0) {
        return formatPromptErrorResponse(
          `No accessible tables found. You may not have sufficient permissions to access table schemas.`,
          "NO_ACCESSIBLE_TABLES"
        );
      }

      // Generate a schema description for context
      const schemaContext =
        accessibleSchemas.length > 0
          ? `Available tables and their columns:\n${accessibleSchemas
              .map(
                (schema) =>
                  `- ${schema!.table}: ${schema!.columns
                    .map((col) => `${col.name} (${col.type})`)
                    .join(", ")}`
              )
              .join("\n")}`
          : "No schema information available.";

      // Example queries for the given dialect to use as reference
      const dialectExamples: Record<SQLDialect, string[]> = {
        postgres: [
          "SELECT * FROM users WHERE created_at > NOW() - INTERVAL '1 day'",
          "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name HAVING COUNT(o.id) > 5",
          "SELECT product_name, price FROM products WHERE price > (SELECT AVG(price) FROM products)",
        ],
        sqlite: [
          "SELECT * FROM users WHERE created_at > datetime('now', '-1 day')",
          "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name HAVING COUNT(o.id) > 5",
          "SELECT product_name, price FROM products WHERE price > (SELECT AVG(price) FROM products)",
        ],
        mysql: [
          "SELECT * FROM users WHERE created_at > NOW() - INTERVAL 1 DAY",
          "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name HAVING COUNT(o.id) > 5",
          "SELECT product_name, price FROM products WHERE price > (SELECT AVG(price) FROM products)",
        ],
        mssql: [
          "SELECT * FROM users WHERE created_at > DATEADD(day, -1, GETDATE())",
          "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name HAVING COUNT(o.id) > 5",
          "SELECT product_name, price FROM products WHERE price > (SELECT AVG(price) FROM products)",
        ],
        ansi: [
          "SELECT * FROM users WHERE created_at > CURRENT_TIMESTAMP - INTERVAL '1' DAY",
          "SELECT u.name, COUNT(o.id) FROM users u JOIN orders o ON u.id = o.user_id GROUP BY u.name HAVING COUNT(o.id) > 5",
          "SELECT product_name, price FROM products WHERE price > (SELECT AVG(price) FROM products)",
        ],
      };

      // Build a prompt that would help generate the SQL
      // In a real implementation, this would call an AI model
      const schemaInfo = schema ? `in schema '${schema}'` : "across all schemas";
      const prompt = `
Generate a ${sqlDialect} SQL query based on this description: "${description}"

${schemaContext}
Working ${schemaInfo}

The query should:
1. Be written for ${sqlDialect} dialect
2. Use only the available tables and columns
3. Prioritize readability
4. Include appropriate comments
5. Be compatible with ${sqlDialect} syntax
`;

      // In a real implementation, this would be the result from an AI model call
      // For this demo, we'll generate a simple SQL query based on the description
      let generatedSQL: string;

      // Very simple pattern matching for demo purposes
      // In a real implementation, this would use a language model
      if (description.toLowerCase().includes("count")) {
        const schemaPrefix = schema ? `-- Schema: ${schema}\n` : "";
        generatedSQL = `${schemaPrefix}-- Count query generated from: "${description}"
SELECT COUNT(*) AS count 
FROM ${accessibleSchemas.length > 0 ? accessibleSchemas[0]!.table : "table_name"};`;
      } else if (
        description.toLowerCase().includes("average") ||
        description.toLowerCase().includes("avg")
      ) {
        const table = accessibleSchemas.length > 0 ? accessibleSchemas[0]!.table : "table_name";
        const numericColumn =
          accessibleSchemas.length > 0
            ? accessibleSchemas[0]!.columns.find((col) =>
                ["int", "numeric", "decimal", "float", "real", "double"].some((t) =>
                  col.type.includes(t)
                )
              )?.name || "numeric_column"
            : "numeric_column";

        const schemaPrefix = schema ? `-- Schema: ${schema}\n` : "";
        generatedSQL = `${schemaPrefix}-- Average query generated from: "${description}"
SELECT AVG(${numericColumn}) AS average
FROM ${table};`;
      } else if (description.toLowerCase().includes("join")) {
        const schemaPrefix = schema ? `-- Schema: ${schema}\n` : "";
        generatedSQL = `${schemaPrefix}-- Join query generated from: "${description}"
SELECT t1.*, t2.*
FROM ${accessibleSchemas.length > 0 ? accessibleSchemas[0]?.table : "table1"} t1
JOIN ${accessibleSchemas.length > 1 ? accessibleSchemas[1]?.table : "table2"} t2
  ON t1.id = t2.${accessibleSchemas.length > 0 ? accessibleSchemas[0]?.table : "table1"}_id;`;
      } else {
        // Default to a simple SELECT
        const table = accessibleSchemas.length > 0 ? accessibleSchemas[0]!.table : "table_name";
        const schemaPrefix = schema ? `-- Schema: ${schema}\n` : "";
        generatedSQL = `${schemaPrefix}-- Query generated from: "${description}"
SELECT * 
FROM ${table}
LIMIT 10;`;
      }

      // Return the generated SQL with explanations
      return formatPromptSuccessResponse(
        generatedSQL,
        // Add references to example queries that could help
        dialectExamples[sqlDialect]
      );
    } catch (error) {
      return formatPromptErrorResponse(
        `Error generating SQL query schema information: ${(error as Error).message}`,
        "SCHEMA_RETRIEVAL_ERROR"
      );
    }
  } catch (error) {
    return formatPromptErrorResponse(
      `Failed to generate SQL: ${(error as Error).message}`,
      "SQL_GENERATION_ERROR"
    );
  }
}

```

--------------------------------------------------------------------------------
/src/connectors/__tests__/shared/integration-test-base.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import type { Connector } from '../../interface.js';

export interface DatabaseTestConfig {
  expectedSchemas: string[];
  expectedTables: string[];
  expectedTestSchemaTable?: string;
  testSchema?: string;
  supportsStoredProcedures?: boolean;
  expectedStoredProcedures?: string[];
}

export interface TestContainer {
  getConnectionUri(): string;
  stop(): Promise<void>;
}

/**
 * Base class for database integration tests that provides common test patterns
 */
export abstract class IntegrationTestBase<TContainer extends TestContainer> {
  protected container!: TContainer;
  public connector!: Connector;
  protected connectionString!: string;
  public config: DatabaseTestConfig;

  constructor(config: DatabaseTestConfig) {
    this.config = config;
  }

  /**
   * Abstract methods that must be implemented by specific database test classes
   */
  abstract createContainer(): Promise<TContainer>;
  abstract createConnector(): Connector;
  abstract setupTestData(connector: Connector): Promise<void>;

  /**
   * Setup method to be called in beforeAll
   */
  async setup(): Promise<void> {
    console.log('Starting database container...');
    
    this.container = await this.createContainer();
    console.log('Container started, getting connection details...');
    
    this.connectionString = this.container.getConnectionUri();
    console.log('Connection URI:', this.connectionString);
    
    this.connector = this.createConnector();
    await this.connector.connect(this.connectionString);
    console.log('Connected to database');
    
    await this.setupTestData(this.connector);
    console.log('Test data setup complete');
  }

  /**
   * Cleanup method to be called in afterAll
   */
  async cleanup(): Promise<void> {
    if (this.connector) {
      await this.connector.disconnect();
    }
    if (this.container) {
      await this.container.stop();
    }
  }

  /**
   * Common test suite that can be reused across different database types
   */
  createTestSuite(suiteName: string): void {
    describe(suiteName, () => {
      beforeAll(async () => {
        await this.setup();
      }, 120000);

      afterAll(async () => {
        await this.cleanup();
      });

      this.createConnectionTests();
      this.createSchemaTests();
      this.createTableTests();
      this.createSQLExecutionTests();
      
      if (this.config.supportsStoredProcedures) {
        this.createStoredProcedureTests();
      }
      
      this.createErrorHandlingTests();
    });
  }

  createConnectionTests(): void {
    describe('Connection', () => {
      it('should connect successfully to database container', async () => {
        expect(this.connector).toBeDefined();
      });

      it('should parse DSN correctly', async () => {
        const sampleDSN = this.connector.dsnParser.getSampleDSN();
        expect(sampleDSN).toContain('://');
        expect(this.connector.dsnParser.isValidDSN(sampleDSN)).toBe(true);
      });

      it('should validate DSN format', () => {
        const sampleDSN = this.connector.dsnParser.getSampleDSN();
        expect(this.connector.dsnParser.isValidDSN(sampleDSN)).toBe(true);
        expect(this.connector.dsnParser.isValidDSN('invalid-dsn')).toBe(false);
      });
    });
  }

  createSchemaTests(): void {
    describe('Schema Operations', () => {
      it('should list schemas', async () => {
        const schemas = await this.connector.getSchemas();
        this.config.expectedSchemas.forEach(expectedSchema => {
          expect(schemas).toContain(expectedSchema);
        });
      });

      it('should list tables in default schema', async () => {
        const tables = await this.connector.getTables();
        this.config.expectedTables.forEach(expectedTable => {
          expect(tables).toContain(expectedTable);
        });
      });

      if (this.config.testSchema && this.config.expectedTestSchemaTable) {
        it('should list tables in specific schema', async () => {
          const tables = await this.connector.getTables(this.config.testSchema);
          expect(tables).toContain(this.config.expectedTestSchemaTable);
        });
      }

      it('should check if table exists', async () => {
        const firstTable = this.config.expectedTables[0];
        expect(await this.connector.tableExists(firstTable)).toBe(true);
        expect(await this.connector.tableExists('nonexistent_table')).toBe(false);
        
        if (this.config.testSchema && this.config.expectedTestSchemaTable) {
          expect(await this.connector.tableExists(this.config.expectedTestSchemaTable, this.config.testSchema)).toBe(true);
          expect(await this.connector.tableExists(this.config.expectedTestSchemaTable, 'public')).toBe(false);
        }
      });
    });
  }

  createTableTests(): void {
    describe('Table Schema Operations', () => {
      it('should get table schema for users table', async () => {
        const schema = await this.connector.getTableSchema('users');
        expect(schema.length).toBeGreaterThan(0);
        
        const idColumn = schema.find(col => col.column_name === 'id');
        expect(idColumn).toBeDefined();
        expect(idColumn?.is_nullable).toBe('NO');
        
        const nameColumn = schema.find(col => col.column_name === 'name');
        expect(nameColumn).toBeDefined();
      });

      it('should get table indexes', async () => {
        const indexes = await this.connector.getTableIndexes('users');
        expect(indexes.length).toBeGreaterThan(0);
        
        const primaryIndex = indexes.find(idx => idx.is_primary);
        expect(primaryIndex).toBeDefined();
        expect(primaryIndex?.column_names).toContain('id');
        
        // Some databases automatically create unique indexes, others handle unique constraints differently
        // We'll just verify we got at least the primary key index
        expect(indexes.length).toBeGreaterThanOrEqual(1);
      });
    });
  }

  createSQLExecutionTests(): void {
    describe('SQL Execution', () => {
      it('should execute simple SELECT query', async () => {
        const result = await this.connector.executeSQL('SELECT COUNT(*) as count FROM users', {});
        expect(result.rows).toHaveLength(1);
        expect(Number(result.rows[0].count)).toBeGreaterThanOrEqual(3);
      });

      it('should execute INSERT and SELECT', async () => {
        const insertResult = await this.connector.executeSQL(
          "INSERT INTO users (name, email, age) VALUES ('Test User', '[email protected]', 25)", {}
        );
        expect(insertResult).toBeDefined();
        
        const selectResult = await this.connector.executeSQL(
          "SELECT * FROM users WHERE email = '[email protected]'", {}
        );
        expect(selectResult.rows).toHaveLength(1);
        expect(selectResult.rows[0].name).toBe('Test User');
        expect(Number(selectResult.rows[0].age)).toBe(25);
      });

      it('should handle complex queries with joins', async () => {
        const result = await this.connector.executeSQL(`
          SELECT u.name, COUNT(o.id) as order_count
          FROM users u
          LEFT JOIN orders o ON u.id = o.user_id
          GROUP BY u.id, u.name
          HAVING COUNT(o.id) > 0
          ORDER BY order_count DESC
        `, {});
        
        expect(result.rows.length).toBeGreaterThan(0);
        expect(result.rows[0]).toHaveProperty('name');
        expect(result.rows[0]).toHaveProperty('order_count');
      });
    });
  }

  createStoredProcedureTests(): void {
    describe('Stored Procedures', () => {
      it('should list stored procedures', async () => {
        const procedures = await this.connector.getStoredProcedures();
        if (this.config.expectedStoredProcedures) {
          this.config.expectedStoredProcedures.forEach(expectedProc => {
            expect(procedures).toContain(expectedProc);
          });
        }
      });

      if (this.config.expectedStoredProcedures?.length) {
        it('should get stored procedure details', async () => {
          const procedureName = this.config.expectedStoredProcedures[0];
          const procedure = await this.connector.getStoredProcedureDetail(procedureName);
          expect(procedure.procedure_name).toBe(procedureName);
          expect(procedure.procedure_type).toMatch(/function|procedure/);
        });
      }
    });
  }

  createErrorHandlingTests(): void {
    describe('Error Handling', () => {
      it('should handle invalid SQL gracefully', async () => {
        await expect(
          this.connector.executeSQL('SELECT * FROM nonexistent_table', {})
        ).rejects.toThrow();
      });

      it('should handle connection errors', async () => {
        const newConnector = this.createConnector();
        await expect(
          newConnector.executeSQL('SELECT 1', {})
        ).rejects.toThrow(/Not connected to.*database/);
      });

      it('should handle invalid table schema requests', async () => {
        const result = await this.connector.getTableSchema('nonexistent_table');
        expect(Array.isArray(result)).toBe(true);
        expect(result.length).toBe(0);
      });
    });
  }
}
```

--------------------------------------------------------------------------------
/src/connectors/__tests__/postgres-ssh.integration.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
import { PostgreSqlContainer, StartedPostgreSqlContainer } from '@testcontainers/postgresql';
import { PostgresConnector } from '../postgres/index.js';
import { ConnectorManager } from '../manager.js';
import { ConnectorRegistry } from '../interface.js';
import { SSHTunnel } from '../../utils/ssh-tunnel.js';
import type { SSHTunnelConfig } from '../../types/ssh.js';
import * as sshConfigParser from '../../utils/ssh-config-parser.js';

describe('PostgreSQL SSH Tunnel Simple Integration Tests', () => {
  let postgresContainer: StartedPostgreSqlContainer;

  beforeAll(async () => {
    // Register PostgreSQL connector
    ConnectorRegistry.register(new PostgresConnector());
    
    // Start PostgreSQL container
    postgresContainer = await new PostgreSqlContainer('postgres:15-alpine')
      .withDatabase('testdb')
      .withUsername('testuser')
      .withPassword('testpass')
      .start();
  }, 60000); // 1 minute timeout for container startup

  afterAll(async () => {
    await postgresContainer?.stop();
  });

  describe('SSH Tunnel Basic Functionality', () => {
    it('should establish SSH tunnel and connect to local port', async () => {
      // For this test, we'll create a mock SSH tunnel that just forwards to the same port
      // This tests the tunnel establishment logic without needing a real SSH server
      const tunnel = new SSHTunnel();
      
      // Test that the tunnel correctly reports its state
      expect(tunnel.getIsConnected()).toBe(false);
      expect(tunnel.getTunnelInfo()).toBeNull();
    });

    it('should parse DSN correctly when SSH tunnel is configured', async () => {
      const manager = new ConnectorManager();
      
      // Test DSN parsing with getDefaultPort
      const testCases = [
        { dsn: 'postgres://user:pass@host:5432/db', expectedPort: 5432 },
        { dsn: 'mysql://user:pass@host:3306/db', expectedPort: 3306 },
        { dsn: 'mariadb://user:pass@host:3306/db', expectedPort: 3306 },
        { dsn: 'sqlserver://user:pass@host:1433/db', expectedPort: 1433 },
      ];
      
      for (const testCase of testCases) {
        // Access private method through reflection for testing
        const port = (manager as any).getDefaultPort(testCase.dsn);
        expect(port).toBe(testCase.expectedPort);
      }
    });

    it('should handle connection without SSH tunnel', async () => {
      const manager = new ConnectorManager();
      
      // Make sure no SSH config is set
      delete process.env.SSH_HOST;
      
      const dsn = postgresContainer.getConnectionUri();
      
      await manager.connectWithDSN(dsn);
      
      // Test that connection works
      const connector = manager.getConnector();
      const result = await connector.executeSQL('SELECT 1 as test', {});
      expect(result.rows).toHaveLength(1);
      expect(result.rows[0].test).toBe(1);
      
      await manager.disconnect();
    });

    it('should fail gracefully when SSH config is invalid', async () => {
      const manager = new ConnectorManager();
      
      // Set invalid SSH config (missing required fields)
      process.env.SSH_HOST = 'example.com';
      // Missing SSH_USER
      
      try {
        const dsn = postgresContainer.getConnectionUri();
        await expect(manager.connectWithDSN(dsn)).rejects.toThrow(/SSH tunnel configuration requires/);
      } finally {
        delete process.env.SSH_HOST;
      }
    });

    it('should validate SSH authentication method', async () => {
      const manager = new ConnectorManager();
      
      // Set SSH config without authentication method
      process.env.SSH_HOST = 'example.com';
      process.env.SSH_USER = 'testuser';
      // Missing both SSH_PASSWORD and SSH_KEY
      
      try {
        const dsn = postgresContainer.getConnectionUri();
        await expect(manager.connectWithDSN(dsn)).rejects.toThrow(/SSH tunnel configuration requires either/);
      } finally {
        delete process.env.SSH_HOST;
        delete process.env.SSH_USER;
      }
    });

    it('should handle SSH config file resolution', async () => {
      const manager = new ConnectorManager();
      
      // Mock the SSH config parser functions
      const mockParseSSHConfig = vi.spyOn(sshConfigParser, 'parseSSHConfig');
      const mockLooksLikeSSHAlias = vi.spyOn(sshConfigParser, 'looksLikeSSHAlias');
      
      // Spy on the SSH tunnel establish method to verify the config values
      const mockSSHTunnelEstablish = vi.spyOn(SSHTunnel.prototype, 'establish');
      
      try {
        // Configure mocks to simulate SSH config file lookup with specific values
        mockLooksLikeSSHAlias.mockReturnValue(true);
        mockParseSSHConfig.mockReturnValue({
          host: 'bastion.example.com',
          username: 'sshuser',
          port: 2222,
          privateKey: '/home/user/.ssh/id_rsa'
        });
        
        // Mock SSH tunnel establish to capture the config and prevent actual connection
        mockSSHTunnelEstablish.mockRejectedValue(new Error('SSH connection failed (expected in test)'));
        
        // Set SSH host alias (would normally come from command line)
        process.env.SSH_HOST = 'mybastion';
        
        const dsn = postgresContainer.getConnectionUri();
        
        // This should fail during SSH connection (expected), but we can verify the config parsing
        await expect(manager.connectWithDSN(dsn)).rejects.toThrow();
        
        // Verify that SSH config parsing functions were called correctly
        expect(mockLooksLikeSSHAlias).toHaveBeenCalledWith('mybastion');
        expect(mockParseSSHConfig).toHaveBeenCalledWith('mybastion', expect.stringContaining('.ssh/config'));
        
        // Verify that SSH tunnel was attempted with the correct config values from SSH config
        expect(mockSSHTunnelEstablish).toHaveBeenCalledTimes(1);
        const sshTunnelCall = mockSSHTunnelEstablish.mock.calls[0];
        const [sshConfig, tunnelOptions] = sshTunnelCall;
        
        // Debug: Log the actual values being passed (for verification)
        // SSH Config should contain the values from our mocked SSH config file
        // Tunnel Options should contain database connection details from the container DSN
        
        // Verify SSH config values were properly resolved from the SSH config file
        expect(sshConfig).toMatchObject({
          host: 'bastion.example.com',    // Should use HostName from SSH config
          username: 'sshuser',           // Should use User from SSH config  
          port: 2222,                    // Should use Port from SSH config
          privateKey: '/home/user/.ssh/id_rsa' // Should use IdentityFile from SSH config
        });
        
        // Verify tunnel options are correctly set up for the database connection
        expect(tunnelOptions).toMatchObject({
          targetHost: expect.any(String), // Database host from DSN
          targetPort: expect.any(Number)  // Database port from DSN
        });
        
        // The localPort might be undefined for dynamic allocation, so check separately if it exists
        if (tunnelOptions.localPort !== undefined) {
          expect(typeof tunnelOptions.localPort).toBe('number');
        }
        
        // Verify that the target database details from the DSN are preserved
        const originalDsnUrl = new URL(dsn);
        expect(tunnelOptions.targetHost).toBe(originalDsnUrl.hostname);
        expect(tunnelOptions.targetPort).toBe(parseInt(originalDsnUrl.port));
        
      } finally {
        // Clean up
        delete process.env.SSH_HOST;
        mockParseSSHConfig.mockRestore();
        mockLooksLikeSSHAlias.mockRestore();
        mockSSHTunnelEstablish.mockRestore();
      }
    });

    it('should skip SSH config lookup for direct hostnames', async () => {
      const manager = new ConnectorManager();
      
      // Mock the SSH config parser functions
      const mockParseSSHConfig = vi.spyOn(sshConfigParser, 'parseSSHConfig');
      const mockLooksLikeSSHAlias = vi.spyOn(sshConfigParser, 'looksLikeSSHAlias');
      
      try {
        // Configure mocks - direct hostname should not trigger SSH config lookup
        mockLooksLikeSSHAlias.mockReturnValue(false);
        
        // Set a direct hostname with required SSH credentials
        process.env.SSH_HOST = 'ssh.example.com';
        process.env.SSH_USER = 'sshuser';
        process.env.SSH_PASSWORD = 'sshpass';
        
        const dsn = postgresContainer.getConnectionUri();
        
        // This should fail during actual SSH connection, but we can verify the parsing behavior
        await expect(manager.connectWithDSN(dsn)).rejects.toThrow();
        
        // Verify that SSH config parsing was checked but not executed
        expect(mockLooksLikeSSHAlias).toHaveBeenCalledWith('ssh.example.com');
        expect(mockParseSSHConfig).not.toHaveBeenCalled();
        
      } finally {
        // Clean up
        delete process.env.SSH_HOST;
        delete process.env.SSH_USER;
        delete process.env.SSH_PASSWORD;
        mockParseSSHConfig.mockRestore();
        mockLooksLikeSSHAlias.mockRestore();
      }
    });
  });
});
```

--------------------------------------------------------------------------------
/src/tools/__tests__/execute-sql.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { executeSqlToolHandler } from '../execute-sql.js';
import { ConnectorManager } from '../../connectors/manager.js';
import { isReadOnlyMode } from '../../config/env.js';
import type { Connector, ConnectorType, SQLResult } from '../../connectors/interface.js';

// Mock dependencies
vi.mock('../../connectors/manager.js');
vi.mock('../../config/env.js');

// Mock connector for testing
const createMockConnector = (id: ConnectorType = 'sqlite'): Connector => ({
  id,
  name: 'Mock Connector',
  dsnParser: {} as any,
  connect: vi.fn(),
  disconnect: vi.fn(),
  getSchemas: vi.fn(),
  getTables: vi.fn(),
  tableExists: vi.fn(),
  getTableSchema: vi.fn(),
  getTableIndexes: vi.fn(),
  getStoredProcedures: vi.fn(),
  getStoredProcedureDetail: vi.fn(),
  executeSQL: vi.fn(),
});

// Helper function to parse tool response
const parseToolResponse = (response: any) => {
  return JSON.parse(response.content[0].text);
};

describe('execute-sql tool', () => {
  let mockConnector: Connector;
  const mockGetCurrentConnector = vi.mocked(ConnectorManager.getCurrentConnector);
  const mockGetCurrentExecuteOptions = vi.mocked(ConnectorManager.getCurrentExecuteOptions);
  const mockIsReadOnlyMode = vi.mocked(isReadOnlyMode);

  beforeEach(() => {
    mockConnector = createMockConnector('sqlite');
    mockGetCurrentConnector.mockReturnValue(mockConnector);
    mockGetCurrentExecuteOptions.mockReturnValue({});
    mockIsReadOnlyMode.mockReturnValue(false);
  });

  afterEach(() => {
    vi.clearAllMocks();
  });

  describe('single statement execution', () => {
    it('should execute a single SELECT statement successfully', async () => {
      const mockResult: SQLResult = { rows: [{ id: 1, name: 'test' }] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const result = await executeSqlToolHandler({ sql: 'SELECT * FROM users' }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(parsedResult.data.rows).toEqual([{ id: 1, name: 'test' }]);
      expect(parsedResult.data.count).toBe(1);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith('SELECT * FROM users', {});
    });

    it('should handle execution errors', async () => {
      vi.mocked(mockConnector.executeSQL).mockRejectedValue(new Error('Database error'));

      const result = await executeSqlToolHandler({ sql: 'SELECT * FROM invalid_table' }, null);

      expect(result.isError).toBe(true);
      const parsedResult = parseToolResponse(result);
      expect(parsedResult.success).toBe(false);
      expect(parsedResult.error).toBe('Database error');
      expect(parsedResult.code).toBe('EXECUTION_ERROR');
    });
  });

  describe('multi-statement execution', () => {
    it('should pass multi-statement SQL directly to connector', async () => {
      const mockResult: SQLResult = { rows: [{ id: 1 }] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = 'SELECT * FROM users; SELECT * FROM roles;';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });
  });

  describe('read-only mode validation', () => {
    beforeEach(() => {
      mockIsReadOnlyMode.mockReturnValue(true);
    });

    it('should allow single SELECT statement in read-only mode', async () => {
      const mockResult: SQLResult = { rows: [{ id: 1 }] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const result = await executeSqlToolHandler({ sql: 'SELECT * FROM users' }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalled();
    });

    it('should allow multiple read-only statements in read-only mode', async () => {
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = 'SELECT * FROM users; SELECT * FROM roles;';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });

    it('should reject single INSERT statement in read-only mode', async () => {
      const result = await executeSqlToolHandler({ sql: "INSERT INTO users (name) VALUES ('test')" }, null);

      expect(result.isError).toBe(true);
      const parsedResult = parseToolResponse(result);
      expect(parsedResult.success).toBe(false);
      expect(parsedResult.error).toContain('Read-only mode is enabled');
      expect(parsedResult.code).toBe('READONLY_VIOLATION');
      expect(mockConnector.executeSQL).not.toHaveBeenCalled();
    });

    it('should reject multi-statement with any write operation in read-only mode', async () => {
      const sql = "SELECT * FROM users; INSERT INTO users (name) VALUES ('test'); SELECT COUNT(*) FROM users;";
      const result = await executeSqlToolHandler({ sql }, null);

      expect(result.isError).toBe(true);
      const parsedResult = parseToolResponse(result);
      expect(parsedResult.success).toBe(false);
      expect(parsedResult.error).toContain('Read-only mode is enabled');
      expect(parsedResult.code).toBe('READONLY_VIOLATION');
      expect(mockConnector.executeSQL).not.toHaveBeenCalled();
    });

  });

  describe('SQL comments handling', () => {
    it('should allow SELECT with single-line comment in read-only mode', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);
      const mockResult: SQLResult = { rows: [{ id: 1 }] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = '-- Fetch active users\nSELECT * FROM users WHERE active = TRUE';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });

    it('should allow SELECT with multi-line comment in read-only mode', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = '/* This query fetches\n   all products */\nSELECT * FROM products';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });

    it('should handle multiple statements with comments in read-only mode', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = '-- First query\nSELECT * FROM users;\n/* Second query */\nSELECT * FROM roles;';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });

    it('should reject INSERT with comment in read-only mode', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);

      const sql = '-- Insert new user\nINSERT INTO users (name) VALUES (\'test\')';
      const result = await executeSqlToolHandler({ sql }, null);

      expect(result.isError).toBe(true);
      const parsedResult = parseToolResponse(result);
      expect(parsedResult.success).toBe(false);
      expect(parsedResult.code).toBe('READONLY_VIOLATION');
      expect(mockConnector.executeSQL).not.toHaveBeenCalled();
    });

    it('should handle query that is only comments as read-only', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = '-- Just a comment\n/* Another comment */';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });

    it('should handle inline comments correctly', async () => {
      mockIsReadOnlyMode.mockReturnValue(true);
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const sql = 'SELECT id, -- user id\n       name -- user name\nFROM users';
      const result = await executeSqlToolHandler({ sql }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith(sql, {});
    });
  });


  describe('edge cases', () => {
    it('should handle empty SQL string', async () => {
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const result = await executeSqlToolHandler({ sql: '' }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith('', {});
    });

    it('should handle SQL with only semicolons and whitespace', async () => {
      const mockResult: SQLResult = { rows: [] };
      vi.mocked(mockConnector.executeSQL).mockResolvedValue(mockResult);

      const result = await executeSqlToolHandler({ sql: '   ;  ;  ; ' }, null);
      const parsedResult = parseToolResponse(result);

      expect(parsedResult.success).toBe(true);
      expect(mockConnector.executeSQL).toHaveBeenCalledWith('   ;  ;  ; ', {});
    });
  });
});
```
Page 1/7FirstPrevNextLast