This is page 4 of 4. Use http://codebase.md/modelcontextprotocol/servers?page={x} to view the full context.
# Directory Structure
```
├── .gitattributes
├── .github
│ ├── pull_request_template.md
│ └── workflows
│ ├── claude.yml
│ ├── python.yml
│ ├── release.yml
│ └── typescript.yml
├── .gitignore
├── .mcp.json
├── .npmrc
├── .vscode
│ └── settings.json
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── package-lock.json
├── package.json
├── README.md
├── scripts
│ └── release.py
├── SECURITY.md
├── src
│ ├── everything
│ │ ├── CLAUDE.md
│ │ ├── Dockerfile
│ │ ├── everything.ts
│ │ ├── index.ts
│ │ ├── instructions.md
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── sse.ts
│ │ ├── stdio.ts
│ │ ├── streamableHttp.ts
│ │ └── tsconfig.json
│ ├── fetch
│ │ ├── .python-version
│ │ ├── Dockerfile
│ │ ├── LICENSE
│ │ ├── pyproject.toml
│ │ ├── README.md
│ │ ├── src
│ │ │ └── mcp_server_fetch
│ │ │ ├── __init__.py
│ │ │ ├── __main__.py
│ │ │ └── server.py
│ │ └── uv.lock
│ ├── filesystem
│ │ ├── __tests__
│ │ │ ├── directory-tree.test.ts
│ │ │ ├── lib.test.ts
│ │ │ ├── path-utils.test.ts
│ │ │ ├── path-validation.test.ts
│ │ │ └── roots-utils.test.ts
│ │ ├── Dockerfile
│ │ ├── index.ts
│ │ ├── lib.ts
│ │ ├── package.json
│ │ ├── path-utils.ts
│ │ ├── path-validation.ts
│ │ ├── README.md
│ │ ├── roots-utils.ts
│ │ ├── tsconfig.json
│ │ └── vitest.config.ts
│ ├── git
│ │ ├── .gitignore
│ │ ├── .python-version
│ │ ├── Dockerfile
│ │ ├── LICENSE
│ │ ├── pyproject.toml
│ │ ├── README.md
│ │ ├── src
│ │ │ └── mcp_server_git
│ │ │ ├── __init__.py
│ │ │ ├── __main__.py
│ │ │ ├── py.typed
│ │ │ └── server.py
│ │ ├── tests
│ │ │ └── test_server.py
│ │ └── uv.lock
│ ├── memory
│ │ ├── __tests__
│ │ │ ├── file-path.test.ts
│ │ │ └── knowledge-graph.test.ts
│ │ ├── Dockerfile
│ │ ├── index.ts
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── tsconfig.json
│ │ └── vitest.config.ts
│ ├── sequentialthinking
│ │ ├── __tests__
│ │ │ └── lib.test.ts
│ │ ├── Dockerfile
│ │ ├── index.ts
│ │ ├── lib.ts
│ │ ├── package.json
│ │ ├── README.md
│ │ ├── tsconfig.json
│ │ └── vitest.config.ts
│ └── time
│ ├── .python-version
│ ├── Dockerfile
│ ├── pyproject.toml
│ ├── README.md
│ ├── src
│ │ └── mcp_server_time
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ └── server.py
│ ├── test
│ │ └── time_server_test.py
│ └── uv.lock
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/src/time/test/time_server_test.py:
--------------------------------------------------------------------------------
```python
from freezegun import freeze_time
from mcp.shared.exceptions import McpError
import pytest
from unittest.mock import patch
from zoneinfo import ZoneInfo
from mcp_server_time.server import TimeServer, get_local_tz
@pytest.mark.parametrize(
"test_time,timezone,expected",
[
# UTC+1 non-DST
(
"2024-01-01 12:00:00+00:00",
"Europe/Warsaw",
{
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T13:00:00+01:00",
"is_dst": False,
},
),
# UTC non-DST
(
"2024-01-01 12:00:00+00:00",
"Europe/London",
{
"timezone": "Europe/London",
"datetime": "2024-01-01T12:00:00+00:00",
"is_dst": False,
},
),
# UTC-5 non-DST
(
"2024-01-01 12:00:00-00:00",
"America/New_York",
{
"timezone": "America/New_York",
"datetime": "2024-01-01T07:00:00-05:00",
"is_dst": False,
},
),
# UTC+1 DST
(
"2024-03-31 12:00:00+00:00",
"Europe/Warsaw",
{
"timezone": "Europe/Warsaw",
"datetime": "2024-03-31T14:00:00+02:00",
"is_dst": True,
},
),
# UTC DST
(
"2024-03-31 12:00:00+00:00",
"Europe/London",
{
"timezone": "Europe/London",
"datetime": "2024-03-31T13:00:00+01:00",
"is_dst": True,
},
),
# UTC-5 DST
(
"2024-03-31 12:00:00-00:00",
"America/New_York",
{
"timezone": "America/New_York",
"datetime": "2024-03-31T08:00:00-04:00",
"is_dst": True,
},
),
],
)
def test_get_current_time(test_time, timezone, expected):
with freeze_time(test_time):
time_server = TimeServer()
result = time_server.get_current_time(timezone)
assert result.timezone == expected["timezone"]
assert result.datetime == expected["datetime"]
assert result.is_dst == expected["is_dst"]
def test_get_current_time_with_invalid_timezone():
time_server = TimeServer()
with pytest.raises(
McpError,
match=r"Invalid timezone: 'No time zone found with key Invalid/Timezone'",
):
time_server.get_current_time("Invalid/Timezone")
@pytest.mark.parametrize(
"source_tz,time_str,target_tz,expected_error",
[
(
"invalid_tz",
"12:00",
"Europe/London",
"Invalid timezone: 'No time zone found with key invalid_tz'",
),
(
"Europe/Warsaw",
"12:00",
"invalid_tz",
"Invalid timezone: 'No time zone found with key invalid_tz'",
),
(
"Europe/Warsaw",
"25:00",
"Europe/London",
"Invalid time format. Expected HH:MM [24-hour format]",
),
],
)
def test_convert_time_errors(source_tz, time_str, target_tz, expected_error):
time_server = TimeServer()
with pytest.raises((McpError, ValueError), match=expected_error):
time_server.convert_time(source_tz, time_str, target_tz)
@pytest.mark.parametrize(
"test_time,source_tz,time_str,target_tz,expected",
[
# Basic case: Standard time conversion between Warsaw and London (1 hour difference)
# Warsaw is UTC+1, London is UTC+0
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Europe/London",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Europe/London",
"datetime": "2024-01-01T11:00:00+00:00",
"is_dst": False,
},
"time_difference": "-1.0h",
},
),
# Reverse case of above: London to Warsaw conversion
# Shows how time difference is positive when going east
(
"2024-01-01 00:00:00+00:00",
"Europe/London",
"12:00",
"Europe/Warsaw",
{
"source": {
"timezone": "Europe/London",
"datetime": "2024-01-01T12:00:00+00:00",
"is_dst": False,
},
"target": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T13:00:00+01:00",
"is_dst": False,
},
"time_difference": "+1.0h",
},
),
# Edge case: Different DST periods between Europe and USA
# Europe ends DST on Oct 27, while USA waits until Nov 3
# This creates a one-week period where Europe is in standard time but USA still observes DST
(
"2024-10-28 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"America/New_York",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-10-28T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "America/New_York",
"datetime": "2024-10-28T07:00:00-04:00",
"is_dst": True,
},
"time_difference": "-5.0h",
},
),
# Follow-up to previous case: After both regions end DST
# Shows how time difference increases by 1 hour when USA also ends DST
(
"2024-11-04 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"America/New_York",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-11-04T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "America/New_York",
"datetime": "2024-11-04T06:00:00-05:00",
"is_dst": False,
},
"time_difference": "-6.0h",
},
),
# Edge case: Nepal's unusual UTC+5:45 offset
# One of the few time zones using 45-minute offset
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Asia/Kathmandu",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Asia/Kathmandu",
"datetime": "2024-01-01T16:45:00+05:45",
"is_dst": False,
},
"time_difference": "+4.75h",
},
),
# Reverse case for Nepal
# Demonstrates how 45-minute offset works in opposite direction
(
"2024-01-01 00:00:00+00:00",
"Asia/Kathmandu",
"12:00",
"Europe/Warsaw",
{
"source": {
"timezone": "Asia/Kathmandu",
"datetime": "2024-01-01T12:00:00+05:45",
"is_dst": False,
},
"target": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T07:15:00+01:00",
"is_dst": False,
},
"time_difference": "-4.75h",
},
),
# Edge case: Lord Howe Island's unique DST rules
# One of the few places using 30-minute DST shift
# During summer (DST), they use UTC+11
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Australia/Lord_Howe",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Australia/Lord_Howe",
"datetime": "2024-01-01T22:00:00+11:00",
"is_dst": True,
},
"time_difference": "+10.0h",
},
),
# Second Lord Howe Island case: During their standard time
# Shows transition to UTC+10:30 after DST ends
(
"2024-04-07 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Australia/Lord_Howe",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-04-07T12:00:00+02:00",
"is_dst": True,
},
"target": {
"timezone": "Australia/Lord_Howe",
"datetime": "2024-04-07T20:30:00+10:30",
"is_dst": False,
},
"time_difference": "+8.5h",
},
),
# Edge case: Date line crossing with Samoa
# Demonstrates how a single time conversion can result in a date change
# Samoa is UTC+13, creating almost a full day difference with Warsaw
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"23:00",
"Pacific/Apia",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T23:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Pacific/Apia",
"datetime": "2024-01-02T11:00:00+13:00",
"is_dst": False,
},
"time_difference": "+12.0h",
},
),
# Edge case: Iran's unusual half-hour offset
# Demonstrates conversion with Iran's UTC+3:30 timezone
(
"2024-03-21 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Asia/Tehran",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-03-21T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Asia/Tehran",
"datetime": "2024-03-21T14:30:00+03:30",
"is_dst": False,
},
"time_difference": "+2.5h",
},
),
# Edge case: Venezuela's unusual -4:30 offset (historical)
# In 2016, Venezuela moved from -4:30 to -4:00
# Useful for testing historical dates
(
"2016-04-30 00:00:00+00:00", # Just before the change
"Europe/Warsaw",
"12:00",
"America/Caracas",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2016-04-30T12:00:00+02:00",
"is_dst": True,
},
"target": {
"timezone": "America/Caracas",
"datetime": "2016-04-30T05:30:00-04:30",
"is_dst": False,
},
"time_difference": "-6.5h",
},
),
# Edge case: Israel's variable DST
# Israel's DST changes don't follow a fixed pattern
# They often change dates year-to-year based on Hebrew calendar
(
"2024-10-27 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Asia/Jerusalem",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-10-27T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Asia/Jerusalem",
"datetime": "2024-10-27T13:00:00+02:00",
"is_dst": False,
},
"time_difference": "+1.0h",
},
),
# Edge case: Antarctica/Troll station
# Only timezone that uses UTC+0 in winter and UTC+2 in summer
# One of the few zones with exactly 2 hours DST difference
(
"2024-03-31 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Antarctica/Troll",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-03-31T12:00:00+02:00",
"is_dst": True,
},
"target": {
"timezone": "Antarctica/Troll",
"datetime": "2024-03-31T12:00:00+02:00",
"is_dst": True,
},
"time_difference": "+0.0h",
},
),
# Edge case: Kiribati date line anomaly
# After skipping Dec 31, 1994, eastern Kiribati is UTC+14
# The furthest forward timezone in the world
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"23:00",
"Pacific/Kiritimati",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T23:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Pacific/Kiritimati",
"datetime": "2024-01-02T12:00:00+14:00",
"is_dst": False,
},
"time_difference": "+13.0h",
},
),
# Edge case: Chatham Islands, New Zealand
# Uses unusual 45-minute offset AND observes DST
# UTC+12:45 in standard time, UTC+13:45 in DST
(
"2024-01-01 00:00:00+00:00",
"Europe/Warsaw",
"12:00",
"Pacific/Chatham",
{
"source": {
"timezone": "Europe/Warsaw",
"datetime": "2024-01-01T12:00:00+01:00",
"is_dst": False,
},
"target": {
"timezone": "Pacific/Chatham",
"datetime": "2024-01-02T00:45:00+13:45",
"is_dst": True,
},
"time_difference": "+12.75h",
},
),
],
)
def test_convert_time(test_time, source_tz, time_str, target_tz, expected):
with freeze_time(test_time):
time_server = TimeServer()
result = time_server.convert_time(source_tz, time_str, target_tz)
assert result.source.timezone == expected["source"]["timezone"]
assert result.target.timezone == expected["target"]["timezone"]
assert result.source.datetime == expected["source"]["datetime"]
assert result.target.datetime == expected["target"]["datetime"]
assert result.source.is_dst == expected["source"]["is_dst"]
assert result.target.is_dst == expected["target"]["is_dst"]
assert result.time_difference == expected["time_difference"]
def test_get_local_tz_with_override():
"""Test that timezone override works correctly."""
result = get_local_tz("America/New_York")
assert str(result) == "America/New_York"
assert isinstance(result, ZoneInfo)
def test_get_local_tz_with_invalid_override():
"""Test that invalid timezone override raises an error."""
with pytest.raises(Exception): # ZoneInfo will raise an exception
get_local_tz("Invalid/Timezone")
@patch('mcp_server_time.server.get_localzone_name')
def test_get_local_tz_with_valid_iana_name(mock_get_localzone):
"""Test that valid IANA timezone names from tzlocal work correctly."""
mock_get_localzone.return_value = "Europe/London"
result = get_local_tz()
assert str(result) == "Europe/London"
assert isinstance(result, ZoneInfo)
@patch('mcp_server_time.server.get_localzone_name')
def test_get_local_tz_when_none_returned(mock_get_localzone):
"""Test default to UTC when tzlocal returns None."""
mock_get_localzone.return_value = None
result = get_local_tz()
assert str(result) == "UTC"
@patch('mcp_server_time.server.get_localzone_name')
def test_get_local_tz_handles_windows_timezones(mock_get_localzone):
"""Test that tzlocal properly handles Windows timezone names.
Note: tzlocal should convert Windows names like 'Pacific Standard Time'
to proper IANA names like 'America/Los_Angeles'.
"""
# tzlocal should return IANA names even on Windows
mock_get_localzone.return_value = "America/Los_Angeles"
result = get_local_tz()
assert str(result) == "America/Los_Angeles"
assert isinstance(result, ZoneInfo)
@pytest.mark.parametrize(
"timezone_name",
[
"America/New_York",
"Europe/Paris",
"Asia/Tokyo",
"Australia/Sydney",
"Africa/Cairo",
"America/Sao_Paulo",
"Pacific/Auckland",
"UTC",
],
)
@patch('mcp_server_time.server.get_localzone_name')
def test_get_local_tz_various_timezones(mock_get_localzone, timezone_name):
"""Test various timezone names that tzlocal might return."""
mock_get_localzone.return_value = timezone_name
result = get_local_tz()
assert str(result) == timezone_name
assert isinstance(result, ZoneInfo)
```
--------------------------------------------------------------------------------
/src/filesystem/__tests__/lib.test.ts:
--------------------------------------------------------------------------------
```typescript
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
import fs from 'fs/promises';
import path from 'path';
import os from 'os';
import {
// Pure utility functions
formatSize,
normalizeLineEndings,
createUnifiedDiff,
// Security & validation functions
validatePath,
setAllowedDirectories,
// File operations
getFileStats,
readFileContent,
writeFileContent,
// Search & filtering functions
searchFilesWithValidation,
// File editing functions
applyFileEdits,
tailFile,
headFile
} from '../lib.js';
// Mock fs module
vi.mock('fs/promises');
const mockFs = fs as any;
describe('Lib Functions', () => {
beforeEach(() => {
vi.clearAllMocks();
// Set up allowed directories for tests
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp', 'C:\\allowed'] : ['/home/user', '/tmp', '/allowed'];
setAllowedDirectories(allowedDirs);
});
afterEach(() => {
vi.restoreAllMocks();
// Clear allowed directories after tests
setAllowedDirectories([]);
});
describe('Pure Utility Functions', () => {
describe('formatSize', () => {
it('formats bytes correctly', () => {
expect(formatSize(0)).toBe('0 B');
expect(formatSize(512)).toBe('512 B');
expect(formatSize(1024)).toBe('1.00 KB');
expect(formatSize(1536)).toBe('1.50 KB');
expect(formatSize(1048576)).toBe('1.00 MB');
expect(formatSize(1073741824)).toBe('1.00 GB');
expect(formatSize(1099511627776)).toBe('1.00 TB');
});
it('handles edge cases', () => {
expect(formatSize(1023)).toBe('1023 B');
expect(formatSize(1025)).toBe('1.00 KB');
expect(formatSize(1048575)).toBe('1024.00 KB');
});
it('handles very large numbers beyond TB', () => {
// The function only supports up to TB, so very large numbers will show as TB
expect(formatSize(1024 * 1024 * 1024 * 1024 * 1024)).toBe('1024.00 TB');
expect(formatSize(Number.MAX_SAFE_INTEGER)).toContain('TB');
});
it('handles negative numbers', () => {
// Negative numbers will result in NaN for the log calculation
expect(formatSize(-1024)).toContain('NaN');
expect(formatSize(-0)).toBe('0 B');
});
it('handles decimal numbers', () => {
expect(formatSize(1536.5)).toBe('1.50 KB');
expect(formatSize(1023.9)).toBe('1023.9 B');
});
it('handles very small positive numbers', () => {
expect(formatSize(1)).toBe('1 B');
expect(formatSize(0.5)).toBe('0.5 B');
expect(formatSize(0.1)).toBe('0.1 B');
});
});
describe('normalizeLineEndings', () => {
it('converts CRLF to LF', () => {
expect(normalizeLineEndings('line1\r\nline2\r\nline3')).toBe('line1\nline2\nline3');
});
it('leaves LF unchanged', () => {
expect(normalizeLineEndings('line1\nline2\nline3')).toBe('line1\nline2\nline3');
});
it('handles mixed line endings', () => {
expect(normalizeLineEndings('line1\r\nline2\nline3\r\n')).toBe('line1\nline2\nline3\n');
});
it('handles empty string', () => {
expect(normalizeLineEndings('')).toBe('');
});
});
describe('createUnifiedDiff', () => {
it('creates diff for simple changes', () => {
const original = 'line1\nline2\nline3';
const modified = 'line1\nmodified line2\nline3';
const diff = createUnifiedDiff(original, modified, 'test.txt');
expect(diff).toContain('--- test.txt');
expect(diff).toContain('+++ test.txt');
expect(diff).toContain('-line2');
expect(diff).toContain('+modified line2');
});
it('handles CRLF normalization', () => {
const original = 'line1\r\nline2\r\n';
const modified = 'line1\nmodified line2\n';
const diff = createUnifiedDiff(original, modified);
expect(diff).toContain('-line2');
expect(diff).toContain('+modified line2');
});
it('handles identical content', () => {
const content = 'line1\nline2\nline3';
const diff = createUnifiedDiff(content, content);
// Should not contain any +/- lines for identical content (excluding header lines)
expect(diff.split('\n').filter((line: string) => line.startsWith('+++') || line.startsWith('---'))).toHaveLength(2);
expect(diff.split('\n').filter((line: string) => line.startsWith('+') && !line.startsWith('+++'))).toHaveLength(0);
expect(diff.split('\n').filter((line: string) => line.startsWith('-') && !line.startsWith('---'))).toHaveLength(0);
});
it('handles empty content', () => {
const diff = createUnifiedDiff('', '');
expect(diff).toContain('--- file');
expect(diff).toContain('+++ file');
});
it('handles default filename parameter', () => {
const diff = createUnifiedDiff('old', 'new');
expect(diff).toContain('--- file');
expect(diff).toContain('+++ file');
});
it('handles custom filename', () => {
const diff = createUnifiedDiff('old', 'new', 'custom.txt');
expect(diff).toContain('--- custom.txt');
expect(diff).toContain('+++ custom.txt');
});
});
});
describe('Security & Validation Functions', () => {
describe('validatePath', () => {
// Use Windows-compatible paths for testing
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp'] : ['/home/user', '/tmp'];
beforeEach(() => {
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
});
it('validates allowed paths', async () => {
const testPath = process.platform === 'win32' ? 'C:\\Users\\test\\file.txt' : '/home/user/file.txt';
const result = await validatePath(testPath);
expect(result).toBe(testPath);
});
it('rejects disallowed paths', async () => {
const testPath = process.platform === 'win32' ? 'C:\\Windows\\System32\\file.txt' : '/etc/passwd';
await expect(validatePath(testPath))
.rejects.toThrow('Access denied - path outside allowed directories');
});
it('handles non-existent files by checking parent directory', async () => {
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\newfile.txt' : '/home/user/newfile.txt';
const parentPath = process.platform === 'win32' ? 'C:\\Users\\test' : '/home/user';
// Create an error with the ENOENT code that the implementation checks for
const enoentError = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError.code = 'ENOENT';
mockFs.realpath
.mockRejectedValueOnce(enoentError)
.mockResolvedValueOnce(parentPath);
const result = await validatePath(newFilePath);
expect(result).toBe(path.resolve(newFilePath));
});
it('rejects when parent directory does not exist', async () => {
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\nonexistent\\newfile.txt' : '/home/user/nonexistent/newfile.txt';
// Create errors with the ENOENT code
const enoentError1 = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError1.code = 'ENOENT';
const enoentError2 = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError2.code = 'ENOENT';
mockFs.realpath
.mockRejectedValueOnce(enoentError1)
.mockRejectedValueOnce(enoentError2);
await expect(validatePath(newFilePath))
.rejects.toThrow('Parent directory does not exist');
});
});
});
describe('File Operations', () => {
describe('getFileStats', () => {
it('returns file statistics', async () => {
const mockStats = {
size: 1024,
birthtime: new Date('2023-01-01'),
mtime: new Date('2023-01-02'),
atime: new Date('2023-01-03'),
isDirectory: () => false,
isFile: () => true,
mode: 0o644
};
mockFs.stat.mockResolvedValueOnce(mockStats as any);
const result = await getFileStats('/test/file.txt');
expect(result).toEqual({
size: 1024,
created: new Date('2023-01-01'),
modified: new Date('2023-01-02'),
accessed: new Date('2023-01-03'),
isDirectory: false,
isFile: true,
permissions: '644'
});
});
it('handles directory statistics', async () => {
const mockStats = {
size: 4096,
birthtime: new Date('2023-01-01'),
mtime: new Date('2023-01-02'),
atime: new Date('2023-01-03'),
isDirectory: () => true,
isFile: () => false,
mode: 0o755
};
mockFs.stat.mockResolvedValueOnce(mockStats as any);
const result = await getFileStats('/test/dir');
expect(result.isDirectory).toBe(true);
expect(result.isFile).toBe(false);
expect(result.permissions).toBe('755');
});
});
describe('readFileContent', () => {
it('reads file with default encoding', async () => {
mockFs.readFile.mockResolvedValueOnce('file content');
const result = await readFileContent('/test/file.txt');
expect(result).toBe('file content');
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'utf-8');
});
it('reads file with custom encoding', async () => {
mockFs.readFile.mockResolvedValueOnce('file content');
const result = await readFileContent('/test/file.txt', 'ascii');
expect(result).toBe('file content');
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'ascii');
});
});
describe('writeFileContent', () => {
it('writes file content', async () => {
mockFs.writeFile.mockResolvedValueOnce(undefined);
await writeFileContent('/test/file.txt', 'new content');
expect(mockFs.writeFile).toHaveBeenCalledWith('/test/file.txt', 'new content', { encoding: "utf-8", flag: 'wx' });
});
});
});
describe('Search & Filtering Functions', () => {
describe('searchFilesWithValidation', () => {
beforeEach(() => {
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
});
it('excludes files matching exclude patterns', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'test.log', isDirectory: () => false },
{ name: 'node_modules', isDirectory: () => true }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
// Mock realpath to return the same path for validation to pass
mockFs.realpath.mockImplementation(async (inputPath: any) => {
const pathStr = inputPath.toString();
// Return the path as-is for validation
return pathStr;
});
const result = await searchFilesWithValidation(
testDir,
'*test*',
allowedDirs,
{ excludePatterns: ['*.log', 'node_modules'] }
);
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
expect(result).toEqual([expectedResult]);
});
it('handles validation errors during search', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'invalid_file.txt', isDirectory: () => false }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
// Mock validatePath to throw error for invalid_file.txt
mockFs.realpath.mockImplementation(async (path: any) => {
if (path.toString().includes('invalid_file.txt')) {
throw new Error('Access denied');
}
return path.toString();
});
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
const result = await searchFilesWithValidation(
testDir,
'*test*',
allowedDirs,
{}
);
// Should only return the valid file, skipping the invalid one
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
expect(result).toEqual([expectedResult]);
});
it('handles complex exclude patterns with wildcards', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'test.backup', isDirectory: () => false },
{ name: 'important_test.js', isDirectory: () => false }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
const result = await searchFilesWithValidation(
testDir,
'*test*',
allowedDirs,
{ excludePatterns: ['*.backup'] }
);
const expectedResults = process.platform === 'win32' ? [
'C:\\allowed\\dir\\test.txt',
'C:\\allowed\\dir\\important_test.js'
] : [
'/allowed/dir/test.txt',
'/allowed/dir/important_test.js'
];
expect(result).toEqual(expectedResults);
});
});
});
describe('File Editing Functions', () => {
describe('applyFileEdits', () => {
beforeEach(() => {
mockFs.readFile.mockResolvedValue('line1\nline2\nline3\n');
mockFs.writeFile.mockResolvedValue(undefined);
});
it('applies simple text replacement', async () => {
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
const result = await applyFileEdits('/test/file.txt', edits, false);
expect(result).toContain('modified line2');
// Should write to temporary file then rename
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'line1\nmodified line2\nline3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('handles dry run mode', async () => {
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
const result = await applyFileEdits('/test/file.txt', edits, true);
expect(result).toContain('modified line2');
expect(mockFs.writeFile).not.toHaveBeenCalled();
});
it('applies multiple edits sequentially', async () => {
const edits = [
{ oldText: 'line1', newText: 'first line' },
{ oldText: 'line3', newText: 'third line' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'first line\nline2\nthird line\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('handles whitespace-flexible matching', async () => {
mockFs.readFile.mockResolvedValue(' line1\n line2\n line3\n');
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
' line1\n modified line2\n line3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('throws error for non-matching edits', async () => {
const edits = [
{ oldText: 'nonexistent line', newText: 'replacement' }
];
await expect(applyFileEdits('/test/file.txt', edits, false))
.rejects.toThrow('Could not find exact match for edit');
});
it('handles complex multi-line edits with indentation', async () => {
mockFs.readFile.mockResolvedValue('function test() {\n console.log("hello");\n return true;\n}');
const edits = [
{
oldText: ' console.log("hello");\n return true;',
newText: ' console.log("world");\n console.log("test");\n return false;'
}
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.js', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'function test() {\n console.log("world");\n console.log("test");\n return false;\n}',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'/test/file.js'
);
});
it('handles edits with different indentation patterns', async () => {
mockFs.readFile.mockResolvedValue(' if (condition) {\n doSomething();\n }');
const edits = [
{
oldText: 'doSomething();',
newText: 'doSomethingElse();\n doAnotherThing();'
}
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.js', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
' if (condition) {\n doSomethingElse();\n doAnotherThing();\n }',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'/test/file.js'
);
});
it('handles CRLF line endings in file content', async () => {
mockFs.readFile.mockResolvedValue('line1\r\nline2\r\nline3\r\n');
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'line1\nmodified line2\nline3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
});
describe('tailFile', () => {
it('handles empty files', async () => {
mockFs.stat.mockResolvedValue({ size: 0 } as any);
const result = await tailFile('/test/empty.txt', 5);
expect(result).toBe('');
expect(mockFs.open).not.toHaveBeenCalled();
});
it('calls stat to check file size', async () => {
mockFs.stat.mockResolvedValue({ size: 100 } as any);
// Mock file handle with proper typing
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await tailFile('/test/file.txt', 2);
expect(mockFs.stat).toHaveBeenCalledWith('/test/file.txt');
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
});
it('handles files with content and returns last lines', async () => {
mockFs.stat.mockResolvedValue({ size: 50 } as any);
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
// Simulate reading file content in chunks
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line3\nline4\nline5\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await tailFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles read errors gracefully', async () => {
mockFs.stat.mockResolvedValue({ size: 100 } as any);
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await tailFile('/test/file.txt', 5);
expect(mockFileHandle.close).toHaveBeenCalled();
});
});
describe('headFile', () => {
it('opens file for reading', async () => {
// Mock file handle with proper typing
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await headFile('/test/file.txt', 2);
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
});
it('handles files with content and returns first lines', async () => {
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
// Simulate reading file content with newlines
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line1\nline2\nline3\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles files with leftover content', async () => {
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
// Simulate reading file content without final newline
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 15, buffer: Buffer.from('line1\nline2\nend') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 5);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles reaching requested line count', async () => {
const mockFileHandle = {
read: vi.fn(),
close: vi.fn()
} as any;
// Simulate reading exactly the requested number of lines
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 12, buffer: Buffer.from('line1\nline2\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
});
});
});
```
--------------------------------------------------------------------------------
/src/filesystem/index.ts:
--------------------------------------------------------------------------------
```typescript
#!/usr/bin/env node
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolResult,
RootsListChangedNotificationSchema,
type Root,
} from "@modelcontextprotocol/sdk/types.js";
import fs from "fs/promises";
import { createReadStream } from "fs";
import path from "path";
import { z } from "zod";
import { minimatch } from "minimatch";
import { normalizePath, expandHome } from './path-utils.js';
import { getValidRootDirectories } from './roots-utils.js';
import {
// Function imports
formatSize,
validatePath,
getFileStats,
readFileContent,
writeFileContent,
searchFilesWithValidation,
applyFileEdits,
tailFile,
headFile,
setAllowedDirectories,
} from './lib.js';
// Command line argument parsing
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Usage: mcp-server-filesystem [allowed-directory] [additional-directories...]");
console.error("Note: Allowed directories can be provided via:");
console.error(" 1. Command-line arguments (shown above)");
console.error(" 2. MCP roots protocol (if client supports it)");
console.error("At least one directory must be provided by EITHER method for the server to operate.");
}
// Store allowed directories in normalized and resolved form
let allowedDirectories = await Promise.all(
args.map(async (dir) => {
const expanded = expandHome(dir);
const absolute = path.resolve(expanded);
try {
// Security: Resolve symlinks in allowed directories during startup
// This ensures we know the real paths and can validate against them later
const resolved = await fs.realpath(absolute);
return normalizePath(resolved);
} catch (error) {
// If we can't resolve (doesn't exist), use the normalized absolute path
// This allows configuring allowed dirs that will be created later
return normalizePath(absolute);
}
})
);
// Validate that all directories exist and are accessible
await Promise.all(allowedDirectories.map(async (dir) => {
try {
const stats = await fs.stat(dir);
if (!stats.isDirectory()) {
console.error(`Error: ${dir} is not a directory`);
process.exit(1);
}
} catch (error) {
console.error(`Error accessing directory ${dir}:`, error);
process.exit(1);
}
}));
// Initialize the global allowedDirectories in lib.ts
setAllowedDirectories(allowedDirectories);
// Schema definitions
const ReadTextFileArgsSchema = z.object({
path: z.string(),
tail: z.number().optional().describe('If provided, returns only the last N lines of the file'),
head: z.number().optional().describe('If provided, returns only the first N lines of the file')
});
const ReadMediaFileArgsSchema = z.object({
path: z.string()
});
const ReadMultipleFilesArgsSchema = z.object({
paths: z
.array(z.string())
.min(1, "At least one file path must be provided")
.describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories."),
});
const WriteFileArgsSchema = z.object({
path: z.string(),
content: z.string(),
});
const EditOperation = z.object({
oldText: z.string().describe('Text to search for - must match exactly'),
newText: z.string().describe('Text to replace with')
});
const EditFileArgsSchema = z.object({
path: z.string(),
edits: z.array(EditOperation),
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
});
const CreateDirectoryArgsSchema = z.object({
path: z.string(),
});
const ListDirectoryArgsSchema = z.object({
path: z.string(),
});
const ListDirectoryWithSizesArgsSchema = z.object({
path: z.string(),
sortBy: z.enum(['name', 'size']).optional().default('name').describe('Sort entries by name or size'),
});
const DirectoryTreeArgsSchema = z.object({
path: z.string(),
excludePatterns: z.array(z.string()).optional().default([])
});
const MoveFileArgsSchema = z.object({
source: z.string(),
destination: z.string(),
});
const SearchFilesArgsSchema = z.object({
path: z.string(),
pattern: z.string(),
excludePatterns: z.array(z.string()).optional().default([])
});
const GetFileInfoArgsSchema = z.object({
path: z.string(),
});
// Server setup
const server = new McpServer(
{
name: "secure-filesystem-server",
version: "0.2.0",
}
);
// Reads a file as a stream of buffers, concatenates them, and then encodes
// the result to a Base64 string. This is a memory-efficient way to handle
// binary data from a stream before the final encoding.
async function readFileAsBase64Stream(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
const stream = createReadStream(filePath);
const chunks: Buffer[] = [];
stream.on('data', (chunk) => {
chunks.push(chunk as Buffer);
});
stream.on('end', () => {
const finalBuffer = Buffer.concat(chunks);
resolve(finalBuffer.toString('base64'));
});
stream.on('error', (err) => reject(err));
});
}
// Tool registrations
// read_file (deprecated) and read_text_file
const readTextFileHandler = async (args: z.infer<typeof ReadTextFileArgsSchema>) => {
const validPath = await validatePath(args.path);
if (args.head && args.tail) {
throw new Error("Cannot specify both head and tail parameters simultaneously");
}
let content: string;
if (args.tail) {
content = await tailFile(validPath, args.tail);
} else if (args.head) {
content = await headFile(validPath, args.head);
} else {
content = await readFileContent(validPath);
}
return {
content: [{ type: "text" as const, text: content }],
structuredContent: { content }
};
};
server.registerTool(
"read_file",
{
title: "Read File (Deprecated)",
description: "Read the complete contents of a file as text. DEPRECATED: Use read_text_file instead.",
inputSchema: ReadTextFileArgsSchema.shape,
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
readTextFileHandler
);
server.registerTool(
"read_text_file",
{
title: "Read Text File",
description:
"Read the complete contents of a file from the file system as text. " +
"Handles various text encodings and provides detailed error messages " +
"if the file cannot be read. Use this tool when you need to examine " +
"the contents of a single file. Use the 'head' parameter to read only " +
"the first N lines of a file, or the 'tail' parameter to read only " +
"the last N lines of a file. Operates on the file as text regardless of extension. " +
"Only works within allowed directories.",
inputSchema: {
path: z.string(),
tail: z.number().optional().describe("If provided, returns only the last N lines of the file"),
head: z.number().optional().describe("If provided, returns only the first N lines of the file")
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
readTextFileHandler
);
server.registerTool(
"read_media_file",
{
title: "Read Media File",
description:
"Read an image or audio file. Returns the base64 encoded data and MIME type. " +
"Only works within allowed directories.",
inputSchema: {
path: z.string()
},
outputSchema: {
content: z.array(z.object({
type: z.enum(["image", "audio", "blob"]),
data: z.string(),
mimeType: z.string()
}))
},
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof ReadMediaFileArgsSchema>) => {
const validPath = await validatePath(args.path);
const extension = path.extname(validPath).toLowerCase();
const mimeTypes: Record<string, string> = {
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".bmp": "image/bmp",
".svg": "image/svg+xml",
".mp3": "audio/mpeg",
".wav": "audio/wav",
".ogg": "audio/ogg",
".flac": "audio/flac",
};
const mimeType = mimeTypes[extension] || "application/octet-stream";
const data = await readFileAsBase64Stream(validPath);
const type = mimeType.startsWith("image/")
? "image"
: mimeType.startsWith("audio/")
? "audio"
// Fallback for other binary types, not officially supported by the spec but has been used for some time
: "blob";
const contentItem = { type: type as 'image' | 'audio' | 'blob', data, mimeType };
return {
content: [contentItem],
structuredContent: { content: [contentItem] }
} as unknown as CallToolResult;
}
);
server.registerTool(
"read_multiple_files",
{
title: "Read Multiple Files",
description:
"Read the contents of multiple files simultaneously. This is more " +
"efficient than reading files one by one when you need to analyze " +
"or compare multiple files. Each file's content is returned with its " +
"path as a reference. Failed reads for individual files won't stop " +
"the entire operation. Only works within allowed directories.",
inputSchema: {
paths: z.array(z.string())
.min(1)
.describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories.")
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof ReadMultipleFilesArgsSchema>) => {
const results = await Promise.all(
args.paths.map(async (filePath: string) => {
try {
const validPath = await validatePath(filePath);
const content = await readFileContent(validPath);
return `${filePath}:\n${content}\n`;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return `${filePath}: Error - ${errorMessage}`;
}
}),
);
const text = results.join("\n---\n");
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
server.registerTool(
"write_file",
{
title: "Write File",
description:
"Create a new file or completely overwrite an existing file with new content. " +
"Use with caution as it will overwrite existing files without warning. " +
"Handles text content with proper encoding. Only works within allowed directories.",
inputSchema: {
path: z.string(),
content: z.string()
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: true }
},
async (args: z.infer<typeof WriteFileArgsSchema>) => {
const validPath = await validatePath(args.path);
await writeFileContent(validPath, args.content);
const text = `Successfully wrote to ${args.path}`;
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
server.registerTool(
"edit_file",
{
title: "Edit File",
description:
"Make line-based edits to a text file. Each edit replaces exact line sequences " +
"with new content. Returns a git-style diff showing the changes made. " +
"Only works within allowed directories.",
inputSchema: {
path: z.string(),
edits: z.array(z.object({
oldText: z.string().describe("Text to search for - must match exactly"),
newText: z.string().describe("Text to replace with")
})),
dryRun: z.boolean().default(false).describe("Preview changes using git-style diff format")
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: true }
},
async (args: z.infer<typeof EditFileArgsSchema>) => {
const validPath = await validatePath(args.path);
const result = await applyFileEdits(validPath, args.edits, args.dryRun);
return {
content: [{ type: "text" as const, text: result }],
structuredContent: { content: result }
};
}
);
server.registerTool(
"create_directory",
{
title: "Create Directory",
description:
"Create a new directory or ensure a directory exists. Can create multiple " +
"nested directories in one operation. If the directory already exists, " +
"this operation will succeed silently. Perfect for setting up directory " +
"structures for projects or ensuring required paths exist. Only works within allowed directories.",
inputSchema: {
path: z.string()
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: false }
},
async (args: z.infer<typeof CreateDirectoryArgsSchema>) => {
const validPath = await validatePath(args.path);
await fs.mkdir(validPath, { recursive: true });
const text = `Successfully created directory ${args.path}`;
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
server.registerTool(
"list_directory",
{
title: "List Directory",
description:
"Get a detailed listing of all files and directories in a specified path. " +
"Results clearly distinguish between files and directories with [FILE] and [DIR] " +
"prefixes. This tool is essential for understanding directory structure and " +
"finding specific files within a directory. Only works within allowed directories.",
inputSchema: {
path: z.string()
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof ListDirectoryArgsSchema>) => {
const validPath = await validatePath(args.path);
const entries = await fs.readdir(validPath, { withFileTypes: true });
const formatted = entries
.map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`)
.join("\n");
return {
content: [{ type: "text" as const, text: formatted }],
structuredContent: { content: formatted }
};
}
);
server.registerTool(
"list_directory_with_sizes",
{
title: "List Directory with Sizes",
description:
"Get a detailed listing of all files and directories in a specified path, including sizes. " +
"Results clearly distinguish between files and directories with [FILE] and [DIR] " +
"prefixes. This tool is useful for understanding directory structure and " +
"finding specific files within a directory. Only works within allowed directories.",
inputSchema: {
path: z.string(),
sortBy: z.enum(["name", "size"]).optional().default("name").describe("Sort entries by name or size")
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof ListDirectoryWithSizesArgsSchema>) => {
const validPath = await validatePath(args.path);
const entries = await fs.readdir(validPath, { withFileTypes: true });
// Get detailed information for each entry
const detailedEntries = await Promise.all(
entries.map(async (entry) => {
const entryPath = path.join(validPath, entry.name);
try {
const stats = await fs.stat(entryPath);
return {
name: entry.name,
isDirectory: entry.isDirectory(),
size: stats.size,
mtime: stats.mtime
};
} catch (error) {
return {
name: entry.name,
isDirectory: entry.isDirectory(),
size: 0,
mtime: new Date(0)
};
}
})
);
// Sort entries based on sortBy parameter
const sortedEntries = [...detailedEntries].sort((a, b) => {
if (args.sortBy === 'size') {
return b.size - a.size; // Descending by size
}
// Default sort by name
return a.name.localeCompare(b.name);
});
// Format the output
const formattedEntries = sortedEntries.map(entry =>
`${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${
entry.isDirectory ? "" : formatSize(entry.size).padStart(10)
}`
);
// Add summary
const totalFiles = detailedEntries.filter(e => !e.isDirectory).length;
const totalDirs = detailedEntries.filter(e => e.isDirectory).length;
const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0);
const summary = [
"",
`Total: ${totalFiles} files, ${totalDirs} directories`,
`Combined size: ${formatSize(totalSize)}`
];
const text = [...formattedEntries, ...summary].join("\n");
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
structuredContent: { content: [contentBlock] }
};
}
);
server.registerTool(
"directory_tree",
{
title: "Directory Tree",
description:
"Get a recursive tree view of files and directories as a JSON structure. " +
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
"Files have no children array, while directories always have a children array (which may be empty). " +
"The output is formatted with 2-space indentation for readability. Only works within allowed directories.",
inputSchema: {
path: z.string(),
excludePatterns: z.array(z.string()).optional().default([])
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof DirectoryTreeArgsSchema>) => {
interface TreeEntry {
name: string;
type: 'file' | 'directory';
children?: TreeEntry[];
}
const rootPath = args.path;
async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise<TreeEntry[]> {
const validPath = await validatePath(currentPath);
const entries = await fs.readdir(validPath, { withFileTypes: true });
const result: TreeEntry[] = [];
for (const entry of entries) {
const relativePath = path.relative(rootPath, path.join(currentPath, entry.name));
const shouldExclude = excludePatterns.some(pattern => {
if (pattern.includes('*')) {
return minimatch(relativePath, pattern, { dot: true });
}
// For files: match exact name or as part of path
// For directories: match as directory path
return minimatch(relativePath, pattern, { dot: true }) ||
minimatch(relativePath, `**/${pattern}`, { dot: true }) ||
minimatch(relativePath, `**/${pattern}/**`, { dot: true });
});
if (shouldExclude)
continue;
const entryData: TreeEntry = {
name: entry.name,
type: entry.isDirectory() ? 'directory' : 'file'
};
if (entry.isDirectory()) {
const subPath = path.join(currentPath, entry.name);
entryData.children = await buildTree(subPath, excludePatterns);
}
result.push(entryData);
}
return result;
}
const treeData = await buildTree(rootPath, args.excludePatterns);
const text = JSON.stringify(treeData, null, 2);
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
structuredContent: { content: [contentBlock] }
};
}
);
server.registerTool(
"move_file",
{
title: "Move File",
description:
"Move or rename files and directories. Can move files between directories " +
"and rename them in a single operation. If the destination exists, the " +
"operation will fail. Works across different directories and can be used " +
"for simple renaming within the same directory. Both source and destination must be within allowed directories.",
inputSchema: {
source: z.string(),
destination: z.string()
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: false }
},
async (args: z.infer<typeof MoveFileArgsSchema>) => {
const validSourcePath = await validatePath(args.source);
const validDestPath = await validatePath(args.destination);
await fs.rename(validSourcePath, validDestPath);
const text = `Successfully moved ${args.source} to ${args.destination}`;
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
structuredContent: { content: [contentBlock] }
};
}
);
server.registerTool(
"search_files",
{
title: "Search Files",
description:
"Recursively search for files and directories matching a pattern. " +
"The patterns should be glob-style patterns that match paths relative to the working directory. " +
"Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " +
"Returns full paths to all matching items. Great for finding files when you don't know their exact location. " +
"Only searches within allowed directories.",
inputSchema: {
path: z.string(),
pattern: z.string(),
excludePatterns: z.array(z.string()).optional().default([])
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof SearchFilesArgsSchema>) => {
const validPath = await validatePath(args.path);
const results = await searchFilesWithValidation(validPath, args.pattern, allowedDirectories, { excludePatterns: args.excludePatterns });
const text = results.length > 0 ? results.join("\n") : "No matches found";
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
server.registerTool(
"get_file_info",
{
title: "Get File Info",
description:
"Retrieve detailed metadata about a file or directory. Returns comprehensive " +
"information including size, creation time, last modified time, permissions, " +
"and type. This tool is perfect for understanding file characteristics " +
"without reading the actual content. Only works within allowed directories.",
inputSchema: {
path: z.string()
},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async (args: z.infer<typeof GetFileInfoArgsSchema>) => {
const validPath = await validatePath(args.path);
const info = await getFileStats(validPath);
const text = Object.entries(info)
.map(([key, value]) => `${key}: ${value}`)
.join("\n");
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
server.registerTool(
"list_allowed_directories",
{
title: "List Allowed Directories",
description:
"Returns the list of directories that this server is allowed to access. " +
"Subdirectories within these allowed directories are also accessible. " +
"Use this to understand which directories and their nested paths are available " +
"before trying to access files.",
inputSchema: {},
outputSchema: { content: z.string() },
annotations: { readOnlyHint: true }
},
async () => {
const text = `Allowed directories:\n${allowedDirectories.join('\n')}`;
return {
content: [{ type: "text" as const, text }],
structuredContent: { content: text }
};
}
);
// Updates allowed directories based on MCP client roots
async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) {
const validatedRootDirs = await getValidRootDirectories(requestedRoots);
if (validatedRootDirs.length > 0) {
allowedDirectories = [...validatedRootDirs];
setAllowedDirectories(allowedDirectories); // Update the global state in lib.ts
console.error(`Updated allowed directories from MCP roots: ${validatedRootDirs.length} valid directories`);
} else {
console.error("No valid root directories provided by client");
}
}
// Handles dynamic roots updates during runtime, when client sends "roots/list_changed" notification, server fetches the updated roots and replaces all allowed directories with the new roots.
server.server.setNotificationHandler(RootsListChangedNotificationSchema, async () => {
try {
// Request the updated roots list from the client
const response = await server.server.listRoots();
if (response && 'roots' in response) {
await updateAllowedDirectoriesFromRoots(response.roots);
}
} catch (error) {
console.error("Failed to request roots from client:", error instanceof Error ? error.message : String(error));
}
});
// Handles post-initialization setup, specifically checking for and fetching MCP roots.
server.server.oninitialized = async () => {
const clientCapabilities = server.server.getClientCapabilities();
if (clientCapabilities?.roots) {
try {
const response = await server.server.listRoots();
if (response && 'roots' in response) {
await updateAllowedDirectoriesFromRoots(response.roots);
} else {
console.error("Client returned no roots set, keeping current settings");
}
} catch (error) {
console.error("Failed to request initial roots from client:", error instanceof Error ? error.message : String(error));
}
} else {
if (allowedDirectories.length > 0) {
console.error("Client does not support MCP Roots, using allowed directories set from server args:", allowedDirectories);
}else{
throw new Error(`Server cannot operate: No allowed directories available. Server was started without command-line directories and client either does not support MCP roots protocol or provided empty roots. Please either: 1) Start server with directory arguments, or 2) Use a client that supports MCP roots protocol and provides valid root directories.`);
}
}
};
// Start server
async function runServer() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Secure MCP Filesystem Server running on stdio");
if (allowedDirectories.length === 0) {
console.error("Started without allowed directories - waiting for client to provide roots via MCP protocol");
}
}
runServer().catch((error) => {
console.error("Fatal error running server:", error);
process.exit(1);
});
```
--------------------------------------------------------------------------------
/src/everything/everything.ts:
--------------------------------------------------------------------------------
```typescript
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js";
import {
CallToolRequestSchema,
ClientCapabilities,
CompleteRequestSchema,
CreateMessageRequest,
CreateMessageResultSchema,
ElicitResultSchema,
GetPromptRequestSchema,
ListPromptsRequestSchema,
ListResourcesRequestSchema,
ListResourceTemplatesRequestSchema,
ListToolsRequestSchema,
LoggingLevel,
ReadResourceRequestSchema,
Resource,
RootsListChangedNotificationSchema,
ServerNotification,
ServerRequest,
SubscribeRequestSchema,
Tool,
UnsubscribeRequestSchema,
type Root
} from "@modelcontextprotocol/sdk/types.js";
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { readFileSync } from "fs";
import { fileURLToPath } from "url";
import { dirname, join } from "path";
import JSZip from "jszip";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const instructions = readFileSync(join(__dirname, "instructions.md"), "utf-8");
type ToolInput = Tool["inputSchema"];
type ToolOutput = Tool["outputSchema"];
type SendRequest = RequestHandlerExtra<ServerRequest, ServerNotification>["sendRequest"];
/* Input schemas for tools implemented in this server */
const EchoSchema = z.object({
message: z.string().describe("Message to echo"),
});
const AddSchema = z.object({
a: z.number().describe("First number"),
b: z.number().describe("Second number"),
});
const LongRunningOperationSchema = z.object({
duration: z
.number()
.default(10)
.describe("Duration of the operation in seconds"),
steps: z
.number()
.default(5)
.describe("Number of steps in the operation"),
});
const PrintEnvSchema = z.object({});
const SampleLLMSchema = z.object({
prompt: z.string().describe("The prompt to send to the LLM"),
maxTokens: z
.number()
.default(100)
.describe("Maximum number of tokens to generate"),
});
const GetTinyImageSchema = z.object({});
const AnnotatedMessageSchema = z.object({
messageType: z
.enum(["error", "success", "debug"])
.describe("Type of message to demonstrate different annotation patterns"),
includeImage: z
.boolean()
.default(false)
.describe("Whether to include an example image"),
});
const GetResourceReferenceSchema = z.object({
resourceId: z
.number()
.min(1)
.max(100)
.describe("ID of the resource to reference (1-100)"),
});
const ElicitationSchema = z.object({});
const GetResourceLinksSchema = z.object({
count: z
.number()
.min(1)
.max(10)
.default(3)
.describe("Number of resource links to return (1-10)"),
});
const ListRootsSchema = z.object({});
const StructuredContentSchema = {
input: z.object({
location: z
.string()
.trim()
.min(1)
.describe("City name or zip code"),
}),
output: z.object({
temperature: z
.number()
.describe("Temperature in celsius"),
conditions: z
.string()
.describe("Weather conditions description"),
humidity: z
.number()
.describe("Humidity percentage"),
})
};
const ZipResourcesInputSchema = z.object({
files: z.record(z.string().url().describe("URL of the file to include in the zip")).describe("Mapping of file names to URLs to include in the zip"),
});
enum ToolName {
ECHO = "echo",
ADD = "add",
LONG_RUNNING_OPERATION = "longRunningOperation",
PRINT_ENV = "printEnv",
SAMPLE_LLM = "sampleLLM",
GET_TINY_IMAGE = "getTinyImage",
ANNOTATED_MESSAGE = "annotatedMessage",
GET_RESOURCE_REFERENCE = "getResourceReference",
ELICITATION = "startElicitation",
GET_RESOURCE_LINKS = "getResourceLinks",
STRUCTURED_CONTENT = "structuredContent",
ZIP_RESOURCES = "zip",
LIST_ROOTS = "listRoots"
}
enum PromptName {
SIMPLE = "simple_prompt",
COMPLEX = "complex_prompt",
RESOURCE = "resource_prompt",
}
// Example completion values
const EXAMPLE_COMPLETIONS = {
style: ["casual", "formal", "technical", "friendly"],
temperature: ["0", "0.5", "0.7", "1.0"],
resourceId: ["1", "2", "3", "4", "5"],
};
export const createServer = () => {
const server = new Server(
{
name: "example-servers/everything",
title: "Everything Example Server",
version: "1.0.0",
},
{
capabilities: {
prompts: {},
resources: { subscribe: true },
tools: {},
logging: {},
completions: {}
},
instructions
}
);
let subscriptions: Set<string> = new Set();
let subsUpdateInterval: NodeJS.Timeout | undefined;
let stdErrUpdateInterval: NodeJS.Timeout | undefined;
let logsUpdateInterval: NodeJS.Timeout | undefined;
// Store client capabilities
let clientCapabilities: ClientCapabilities | undefined;
// Roots state management
let currentRoots: Root[] = [];
let clientSupportsRoots = false;
let sessionId: string | undefined;
// Function to start notification intervals when a client connects
const startNotificationIntervals = (sid?: string|undefined) => {
sessionId = sid;
if (!subsUpdateInterval) {
subsUpdateInterval = setInterval(() => {
for (const uri of subscriptions) {
server.notification({
method: "notifications/resources/updated",
params: { uri },
});
}
}, 10000);
}
const maybeAppendSessionId = sessionId ? ` - SessionId ${sessionId}`: "";
const messages: { level: LoggingLevel; data: string }[] = [
{ level: "debug", data: `Debug-level message${maybeAppendSessionId}` },
{ level: "info", data: `Info-level message${maybeAppendSessionId}` },
{ level: "notice", data: `Notice-level message${maybeAppendSessionId}` },
{ level: "warning", data: `Warning-level message${maybeAppendSessionId}` },
{ level: "error", data: `Error-level message${maybeAppendSessionId}` },
{ level: "critical", data: `Critical-level message${maybeAppendSessionId}` },
{ level: "alert", data: `Alert level-message${maybeAppendSessionId}` },
{ level: "emergency", data: `Emergency-level message${maybeAppendSessionId}` },
];
if (!logsUpdateInterval) {
console.error("Starting logs update interval");
logsUpdateInterval = setInterval(async () => {
await server.sendLoggingMessage( messages[Math.floor(Math.random() * messages.length)], sessionId);
}, 15000);
}
};
// Helper method to request sampling from client
const requestSampling = async (
context: string,
uri: string,
maxTokens: number = 100,
sendRequest: SendRequest
) => {
const request: CreateMessageRequest = {
method: "sampling/createMessage",
params: {
messages: [
{
role: "user",
content: {
type: "text",
text: `Resource ${uri} context: ${context}`,
},
},
],
systemPrompt: "You are a helpful test server.",
maxTokens,
temperature: 0.7,
includeContext: "thisServer",
},
};
return await sendRequest(request, CreateMessageResultSchema);
};
const ALL_RESOURCES: Resource[] = Array.from({ length: 100 }, (_, i) => {
const uri = `test://static/resource/${i + 1}`;
if (i % 2 === 0) {
return {
uri,
name: `Resource ${i + 1}`,
mimeType: "text/plain",
text: `Resource ${i + 1}: This is a plaintext resource`,
};
} else {
const buffer = Buffer.from(`Resource ${i + 1}: This is a base64 blob`);
return {
uri,
name: `Resource ${i + 1}`,
mimeType: "application/octet-stream",
blob: buffer.toString("base64"),
};
}
});
const PAGE_SIZE = 10;
server.setRequestHandler(ListResourcesRequestSchema, async (request) => {
const cursor = request.params?.cursor;
let startIndex = 0;
if (cursor) {
const decodedCursor = parseInt(atob(cursor), 10);
if (!isNaN(decodedCursor)) {
startIndex = decodedCursor;
}
}
const endIndex = Math.min(startIndex + PAGE_SIZE, ALL_RESOURCES.length);
const resources = ALL_RESOURCES.slice(startIndex, endIndex);
let nextCursor: string | undefined;
if (endIndex < ALL_RESOURCES.length) {
nextCursor = btoa(endIndex.toString());
}
return {
resources,
nextCursor,
};
});
server.setRequestHandler(ListResourceTemplatesRequestSchema, async () => {
return {
resourceTemplates: [
{
uriTemplate: "test://static/resource/{id}",
name: "Static Resource",
description: "A static resource with a numeric ID",
},
],
};
});
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
const uri = request.params.uri;
if (uri.startsWith("test://static/resource/")) {
const index = parseInt(uri.split("/").pop() ?? "", 10) - 1;
if (index >= 0 && index < ALL_RESOURCES.length) {
const resource = ALL_RESOURCES[index];
return {
contents: [resource],
};
}
}
throw new Error(`Unknown resource: ${uri}`);
});
server.setRequestHandler(SubscribeRequestSchema, async (request, extra) => {
const { uri } = request.params;
subscriptions.add(uri);
return {};
});
server.setRequestHandler(UnsubscribeRequestSchema, async (request) => {
subscriptions.delete(request.params.uri);
return {};
});
server.setRequestHandler(ListPromptsRequestSchema, async () => {
return {
prompts: [
{
name: PromptName.SIMPLE,
description: "A prompt without arguments",
},
{
name: PromptName.COMPLEX,
description: "A prompt with arguments",
arguments: [
{
name: "temperature",
description: "Temperature setting",
required: true,
},
{
name: "style",
description: "Output style",
required: false,
},
],
},
{
name: PromptName.RESOURCE,
description: "A prompt that includes an embedded resource reference",
arguments: [
{
name: "resourceId",
description: "Resource ID to include (1-100)",
required: true,
},
],
},
],
};
});
server.setRequestHandler(GetPromptRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
if (name === PromptName.SIMPLE) {
return {
messages: [
{
role: "user",
content: {
type: "text",
text: "This is a simple prompt without arguments.",
},
},
],
};
}
if (name === PromptName.COMPLEX) {
return {
messages: [
{
role: "user",
content: {
type: "text",
text: `This is a complex prompt with arguments: temperature=${args?.temperature}, style=${args?.style}`,
},
},
{
role: "assistant",
content: {
type: "text",
text: "I understand. You've provided a complex prompt with temperature and style arguments. How would you like me to proceed?",
},
},
{
role: "user",
content: {
type: "image",
data: MCP_TINY_IMAGE,
mimeType: "image/png",
},
},
],
};
}
if (name === PromptName.RESOURCE) {
const resourceId = parseInt(args?.resourceId as string, 10);
if (isNaN(resourceId) || resourceId < 1 || resourceId > 100) {
throw new Error(
`Invalid resourceId: ${args?.resourceId}. Must be a number between 1 and 100.`
);
}
const resourceIndex = resourceId - 1;
const resource = ALL_RESOURCES[resourceIndex];
return {
messages: [
{
role: "user",
content: {
type: "text",
text: `This prompt includes Resource ${resourceId}. Please analyze the following resource:`,
},
},
{
role: "user",
content: {
type: "resource",
resource: resource,
},
},
],
};
}
throw new Error(`Unknown prompt: ${name}`);
});
server.setRequestHandler(ListToolsRequestSchema, async () => {
const tools: Tool[] = [
{
name: ToolName.ECHO,
description: "Echoes back the input",
inputSchema: zodToJsonSchema(EchoSchema) as ToolInput,
},
{
name: ToolName.ADD,
description: "Adds two numbers",
inputSchema: zodToJsonSchema(AddSchema) as ToolInput,
},
{
name: ToolName.LONG_RUNNING_OPERATION,
description:
"Demonstrates a long running operation with progress updates",
inputSchema: zodToJsonSchema(LongRunningOperationSchema) as ToolInput,
},
{
name: ToolName.PRINT_ENV,
description:
"Prints all environment variables, helpful for debugging MCP server configuration",
inputSchema: zodToJsonSchema(PrintEnvSchema) as ToolInput,
},
{
name: ToolName.SAMPLE_LLM,
description: "Samples from an LLM using MCP's sampling feature",
inputSchema: zodToJsonSchema(SampleLLMSchema) as ToolInput,
},
{
name: ToolName.GET_TINY_IMAGE,
description: "Returns the MCP_TINY_IMAGE",
inputSchema: zodToJsonSchema(GetTinyImageSchema) as ToolInput,
},
{
name: ToolName.ANNOTATED_MESSAGE,
description:
"Demonstrates how annotations can be used to provide metadata about content",
inputSchema: zodToJsonSchema(AnnotatedMessageSchema) as ToolInput,
},
{
name: ToolName.GET_RESOURCE_REFERENCE,
description:
"Returns a resource reference that can be used by MCP clients",
inputSchema: zodToJsonSchema(GetResourceReferenceSchema) as ToolInput,
},
{
name: ToolName.GET_RESOURCE_LINKS,
description:
"Returns multiple resource links that reference different types of resources",
inputSchema: zodToJsonSchema(GetResourceLinksSchema) as ToolInput,
},
{
name: ToolName.STRUCTURED_CONTENT,
description:
"Returns structured content along with an output schema for client data validation",
inputSchema: zodToJsonSchema(StructuredContentSchema.input) as ToolInput,
outputSchema: zodToJsonSchema(StructuredContentSchema.output) as ToolOutput,
},
{
name: ToolName.ZIP_RESOURCES,
description: "Compresses the provided resource files (mapping of name to URI, which can be a data URI) to a zip file, which it returns as a data URI resource link.",
inputSchema: zodToJsonSchema(ZipResourcesInputSchema) as ToolInput,
}
];
if (clientCapabilities!.roots) tools.push ({
name: ToolName.LIST_ROOTS,
description:
"Lists the current MCP roots provided by the client. Demonstrates the roots protocol capability even though this server doesn't access files.",
inputSchema: zodToJsonSchema(ListRootsSchema) as ToolInput,
});
if (clientCapabilities!.elicitation) tools.push ({
name: ToolName.ELICITATION,
description: "Elicitation test tool that demonstrates how to request user input with various field types (string, boolean, email, uri, date, integer, number, enum)",
inputSchema: zodToJsonSchema(ElicitationSchema) as ToolInput,
});
return { tools };
});
server.setRequestHandler(CallToolRequestSchema, async (request,extra) => {
const { name, arguments: args } = request.params;
if (name === ToolName.ECHO) {
const validatedArgs = EchoSchema.parse(args);
return {
content: [{ type: "text", text: `Echo: ${validatedArgs.message}` }],
};
}
if (name === ToolName.ADD) {
const validatedArgs = AddSchema.parse(args);
const sum = validatedArgs.a + validatedArgs.b;
return {
content: [
{
type: "text",
text: `The sum of ${validatedArgs.a} and ${validatedArgs.b} is ${sum}.`,
},
],
};
}
if (name === ToolName.LONG_RUNNING_OPERATION) {
const validatedArgs = LongRunningOperationSchema.parse(args);
const { duration, steps } = validatedArgs;
const stepDuration = duration / steps;
const progressToken = request.params._meta?.progressToken;
for (let i = 1; i < steps + 1; i++) {
await new Promise((resolve) =>
setTimeout(resolve, stepDuration * 1000)
);
if (progressToken !== undefined) {
await server.notification({
method: "notifications/progress",
params: {
progress: i,
total: steps,
progressToken,
},
},{relatedRequestId: extra.requestId});
}
}
return {
content: [
{
type: "text",
text: `Long running operation completed. Duration: ${duration} seconds, Steps: ${steps}.`,
},
],
};
}
if (name === ToolName.PRINT_ENV) {
return {
content: [
{
type: "text",
text: JSON.stringify(process.env, null, 2),
},
],
};
}
if (name === ToolName.SAMPLE_LLM) {
const validatedArgs = SampleLLMSchema.parse(args);
const { prompt, maxTokens } = validatedArgs;
const result = await requestSampling(
prompt,
ToolName.SAMPLE_LLM,
maxTokens,
extra.sendRequest
);
return {
content: [
{ type: "text", text: `LLM sampling result: ${Array.isArray(result.content) ? result.content.map(c => c.type === "text" ? c.text : JSON.stringify(c)).join("") : (result.content.type === "text" ? result.content.text : JSON.stringify(result.content))}` },
],
};
}
if (name === ToolName.GET_TINY_IMAGE) {
GetTinyImageSchema.parse(args);
return {
content: [
{
type: "text",
text: "This is a tiny image:",
},
{
type: "image",
data: MCP_TINY_IMAGE,
mimeType: "image/png",
},
{
type: "text",
text: "The image above is the MCP tiny image.",
},
],
};
}
if (name === ToolName.ANNOTATED_MESSAGE) {
const { messageType, includeImage } = AnnotatedMessageSchema.parse(args);
const content = [];
// Main message with different priorities/audiences based on type
if (messageType === "error") {
content.push({
type: "text",
text: "Error: Operation failed",
annotations: {
priority: 1.0, // Errors are highest priority
audience: ["user", "assistant"], // Both need to know about errors
},
});
} else if (messageType === "success") {
content.push({
type: "text",
text: "Operation completed successfully",
annotations: {
priority: 0.7, // Success messages are important but not critical
audience: ["user"], // Success mainly for user consumption
},
});
} else if (messageType === "debug") {
content.push({
type: "text",
text: "Debug: Cache hit ratio 0.95, latency 150ms",
annotations: {
priority: 0.3, // Debug info is low priority
audience: ["assistant"], // Technical details for assistant
},
});
}
// Optional image with its own annotations
if (includeImage) {
content.push({
type: "image",
data: MCP_TINY_IMAGE,
mimeType: "image/png",
annotations: {
priority: 0.5,
audience: ["user"], // Images primarily for user visualization
},
});
}
return { content };
}
if (name === ToolName.GET_RESOURCE_REFERENCE) {
const validatedArgs = GetResourceReferenceSchema.parse(args);
const resourceId = validatedArgs.resourceId;
const resourceIndex = resourceId - 1;
if (resourceIndex < 0 || resourceIndex >= ALL_RESOURCES.length) {
throw new Error(`Resource with ID ${resourceId} does not exist`);
}
const resource = ALL_RESOURCES[resourceIndex];
return {
content: [
{
type: "text",
text: `Returning resource reference for Resource ${resourceId}:`,
},
{
type: "resource",
resource: resource,
},
{
type: "text",
text: `You can access this resource using the URI: ${resource.uri}`,
},
],
};
}
if (name === ToolName.ELICITATION) {
ElicitationSchema.parse(args);
const elicitationResult = await extra.sendRequest({
method: 'elicitation/create',
params: {
message: 'Please provide inputs for the following fields:',
requestedSchema: {
type: 'object',
properties: {
name: {
title: 'Full Name',
type: 'string',
description: 'Your full, legal name',
},
check: {
title: 'Agree to terms',
type: 'boolean',
description: 'A boolean check',
},
color: {
title: 'Favorite Color',
type: 'string',
description: 'Favorite color (open text)',
default: 'blue',
},
email: {
title: 'Email Address',
type: 'string',
format: 'email',
description: 'Your email address (will be verified, and never shared with anyone else)',
},
homepage: {
type: 'string',
format: 'uri',
description: 'Homepage / personal site',
},
birthdate: {
title: 'Birthdate',
type: 'string',
format: 'date',
description: 'Your date of birth (will never be shared with anyone else)',
},
integer: {
title: 'Favorite Integer',
type: 'integer',
description: 'Your favorite integer (do not give us your phone number, pin, or other sensitive info)',
minimum: 1,
maximum: 100,
default: 42,
},
number: {
title: 'Favorite Number',
type: 'number',
description: 'Favorite number (there are no wrong answers)',
minimum: 0,
maximum: 1000,
default: 3.14,
},
petType: {
title: 'Pet type',
type: 'string',
enum: ['cats', 'dogs', 'birds', 'fish', 'reptiles'],
enumNames: ['Cats', 'Dogs', 'Birds', 'Fish', 'Reptiles'],
default: 'dogs',
description: 'Your favorite pet type',
},
},
required: ['name'],
},
},
}, ElicitResultSchema, { timeout: 10 * 60 * 1000 /* 10 minutes */ });
// Handle different response actions
const content = [];
if (elicitationResult.action === 'accept' && elicitationResult.content) {
content.push({
type: "text",
text: `✅ User provided the requested information!`,
});
// Only access elicitationResult.content when action is accept
const userData = elicitationResult.content;
const lines = [];
if (userData.name) lines.push(`- Name: ${userData.name}`);
if (userData.check !== undefined) lines.push(`- Agreed to terms: ${userData.check}`);
if (userData.color) lines.push(`- Favorite Color: ${userData.color}`);
if (userData.email) lines.push(`- Email: ${userData.email}`);
if (userData.homepage) lines.push(`- Homepage: ${userData.homepage}`);
if (userData.birthdate) lines.push(`- Birthdate: ${userData.birthdate}`);
if (userData.integer !== undefined) lines.push(`- Favorite Integer: ${userData.integer}`);
if (userData.number !== undefined) lines.push(`- Favorite Number: ${userData.number}`);
if (userData.petType) lines.push(`- Pet Type: ${userData.petType}`);
content.push({
type: "text",
text: `User inputs:\n${lines.join('\n')}`,
});
} else if (elicitationResult.action === 'decline') {
content.push({
type: "text",
text: `❌ User declined to provide the requested information.`,
});
} else if (elicitationResult.action === 'cancel') {
content.push({
type: "text",
text: `⚠️ User cancelled the elicitation dialog.`,
});
}
// Include raw result for debugging
content.push({
type: "text",
text: `\nRaw result: ${JSON.stringify(elicitationResult, null, 2)}`,
});
return { content };
}
if (name === ToolName.GET_RESOURCE_LINKS) {
const { count } = GetResourceLinksSchema.parse(args);
const content = [];
// Add intro text
content.push({
type: "text",
text: `Here are ${count} resource links to resources available in this server (see full output in tool response if your client does not support resource_link yet):`,
});
// Return resource links to actual resources from ALL_RESOURCES
const actualCount = Math.min(count, ALL_RESOURCES.length);
for (let i = 0; i < actualCount; i++) {
const resource = ALL_RESOURCES[i];
content.push({
type: "resource_link",
uri: resource.uri,
name: resource.name,
description: `Resource ${i + 1}: ${resource.mimeType === "text/plain"
? "plaintext resource"
: "binary blob resource"
}`,
mimeType: resource.mimeType,
});
}
return { content };
}
if (name === ToolName.STRUCTURED_CONTENT) {
// The same response is returned for every input.
const validatedArgs = StructuredContentSchema.input.parse(args);
const weather = {
temperature: 22.5,
conditions: "Partly cloudy",
humidity: 65
}
const backwardCompatiblecontent = {
type: "text",
text: JSON.stringify(weather)
}
return {
content: [backwardCompatiblecontent],
structuredContent: weather
};
}
if (name === ToolName.ZIP_RESOURCES) {
const { files } = ZipResourcesInputSchema.parse(args);
const zip = new JSZip();
for (const [fileName, fileUrl] of Object.entries(files)) {
try {
const response = await fetch(fileUrl);
if (!response.ok) {
throw new Error(`Failed to fetch ${fileUrl}: ${response.statusText}`);
}
const arrayBuffer = await response.arrayBuffer();
zip.file(fileName, arrayBuffer);
} catch (error) {
throw new Error(`Error fetching file ${fileUrl}: ${error instanceof Error ? error.message : String(error)}`);
}
}
const uri = `data:application/zip;base64,${await zip.generateAsync({ type: "base64" })}`;
return {
content: [
{
type: "resource_link",
mimeType: "application/zip",
uri,
},
],
};
}
if (name === ToolName.LIST_ROOTS) {
ListRootsSchema.parse(args);
if (!clientSupportsRoots) {
return {
content: [
{
type: "text",
text: "The MCP client does not support the roots protocol.\n\n" +
"This means the server cannot access information about the client's workspace directories or file system roots."
}
]
};
}
if (currentRoots.length === 0) {
return {
content: [
{
type: "text",
text: "The client supports roots but no roots are currently configured.\n\n" +
"This could mean:\n" +
"1. The client hasn't provided any roots yet\n" +
"2. The client provided an empty roots list\n" +
"3. The roots configuration is still being loaded"
}
]
};
}
const rootsList = currentRoots.map((root, index) => {
return `${index + 1}. ${root.name || 'Unnamed Root'}\n URI: ${root.uri}`;
}).join('\n\n');
return {
content: [
{
type: "text",
text: `Current MCP Roots (${currentRoots.length} total):\n\n${rootsList}\n\n` +
"Note: This server demonstrates the roots protocol capability but doesn't actually access files. " +
"The roots are provided by the MCP client and can be used by servers that need file system access."
}
]
};
}
throw new Error(`Unknown tool: ${name}`);
});
server.setRequestHandler(CompleteRequestSchema, async (request) => {
const { ref, argument } = request.params;
if (ref.type === "ref/resource") {
const resourceId = ref.uri.split("/").pop();
if (!resourceId) return { completion: { values: [] } };
// Filter resource IDs that start with the input value
const values = EXAMPLE_COMPLETIONS.resourceId.filter((id) =>
id.startsWith(argument.value)
);
return { completion: { values, hasMore: false, total: values.length } };
}
if (ref.type === "ref/prompt") {
// Handle completion for prompt arguments
const completions =
EXAMPLE_COMPLETIONS[argument.name as keyof typeof EXAMPLE_COMPLETIONS];
if (!completions) return { completion: { values: [] } };
const values = completions.filter((value) =>
value.startsWith(argument.value)
);
return { completion: { values, hasMore: false, total: values.length } };
}
throw new Error(`Unknown reference type`);
});
// Roots protocol handlers
server.setNotificationHandler(RootsListChangedNotificationSchema, async () => {
try {
// Request the updated roots list from the client
const response = await server.listRoots();
if (response && 'roots' in response) {
currentRoots = response.roots;
// Log the roots update for demonstration
await server.sendLoggingMessage({
level: "info",
logger: "everything-server",
data: `Roots updated: ${currentRoots.length} root(s) received from client`,
}, sessionId);
}
} catch (error) {
await server.sendLoggingMessage({
level: "error",
logger: "everything-server",
data: `Failed to request roots from client: ${error instanceof Error ? error.message : String(error)}`,
}, sessionId);
}
});
// Handle post-initialization setup for roots
server.oninitialized = async () => {
clientCapabilities = server.getClientCapabilities();
if (clientCapabilities?.roots) {
clientSupportsRoots = true;
try {
const response = await server.listRoots();
if (response && 'roots' in response) {
currentRoots = response.roots;
await server.sendLoggingMessage({
level: "info",
logger: "everything-server",
data: `Initial roots received: ${currentRoots.length} root(s) from client`,
}, sessionId);
} else {
await server.sendLoggingMessage({
level: "warning",
logger: "everything-server",
data: "Client returned no roots set",
}, sessionId);
}
} catch (error) {
await server.sendLoggingMessage({
level: "error",
logger: "everything-server",
data: `Failed to request initial roots from client: ${error instanceof Error ? error.message : String(error)}`,
}, sessionId);
}
} else {
await server.sendLoggingMessage({
level: "info",
logger: "everything-server",
data: "Client does not support MCP roots protocol",
}, sessionId);
}
};
const cleanup = async () => {
if (subsUpdateInterval) clearInterval(subsUpdateInterval);
if (logsUpdateInterval) clearInterval(logsUpdateInterval);
if (stdErrUpdateInterval) clearInterval(stdErrUpdateInterval);
};
return { server, cleanup, startNotificationIntervals };
};
const MCP_TINY_IMAGE =
"iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAYAAACNiR0NAAAKsGlDQ1BJQ0MgUHJvZmlsZQAASImVlwdUU+kSgOfe9JDQEiIgJfQmSCeAlBBaAAXpYCMkAUKJMRBU7MriClZURLCs6KqIgo0idizYFsWC3QVZBNR1sWDDlXeBQ9jdd9575805c+a7c+efmf+e/z9nLgCdKZDJMlF1gCxpjjwyyI8dn5DIJvUABRiY0kBdIMyWcSMiwgCTUft3+dgGyJC9YzuU69/f/1fREImzhQBIBMbJomxhFsbHMe0TyuQ5ALg9mN9kbo5siK9gzJRjDWL8ZIhTR7hviJOHGY8fjomO5GGsDUCmCQTyVACaKeZn5wpTsTw0f4ztpSKJFGPsGbyzsmaLMMbqgiUWI8N4KD8n+S95Uv+WM1mZUyBIVfLIXoaF7C/JlmUK5v+fn+N/S1amYrSGOaa0NHlwJGaxvpAHGbNDlSxNnhI+yhLRcPwwpymCY0ZZmM1LHGWRwD9UuTZzStgop0gC+co8OfzoURZnB0SNsnx2pLJWipzHHWWBfKyuIiNG6U8T85X589Ki40Y5VxI7ZZSzM6JCx2J4Sr9cEansXywN8hurG6jce1b2X/Yr4SvX5qRFByv3LhjrXyzljuXMjlf2JhL7B4zFxCjjZTl+ylqyzAhlvDgzSOnPzo1Srs3BDuTY2gjlN0wXhESMMoRBELAhBjIhB+QggECQgBTEOeJ5Q2cUeLNl8+WS1LQcNhe7ZWI2Xyq0m8B2tHd0Bhi6syNH4j1r+C4irGtjvhWVAF4nBgcHT475Qm4BHEkCoNaO+SxnAKh3A1w5JVTIc0d8Q9cJCEAFNWCCDhiACViCLTiCK3iCLwRACIRDNCTATBBCGmRhnc+FhbAMCqAI1sNmKIOdsBv2wyE4CvVwCs7DZbgOt+AePIZ26IJX0AcfYQBBEBJCRxiIDmKImCE2iCPCQbyRACQMiUQSkCQkFZEiCmQhsgIpQoqRMmQXUokcQU4g55GrSCvyEOlAepF3yFcUh9JQJqqPmqMTUQ7KRUPRaHQGmorOQfPQfHQtWopWoAfROvQ8eh29h7ajr9B+HOBUcCycEc4Wx8HxcOG4RFwKTo5bjCvEleAqcNW4Rlwz7g6uHfca9wVPxDPwbLwt3hMfjI/BC/Fz8Ivxq/Fl+P34OvxF/B18B74P/51AJ+gRbAgeBD4hnpBKmEsoIJQQ9hJqCZcI9whdhI9EIpFFtCC6EYOJCcR04gLiauJ2Yg3xHLGV2EnsJ5FIOiQbkhcpnCQg5ZAKSFtJB0lnSbdJXaTPZBWyIdmRHEhOJEvJy8kl5APkM+Tb5G7yAEWdYkbxoIRTRJT5lHWUPZRGyk1KF2WAqkG1oHpRo6np1GXUUmo19RL1CfW9ioqKsYq7ylQVicpSlVKVwypXVDpUvtA0adY0Hm06TUFbS9tHO0d7SHtPp9PN6b70RHoOfS29kn6B/oz+WZWhaqfKVxWpLlEtV61Tva36Ro2iZqbGVZuplqdWonZM7abaa3WKurk6T12gvli9XP2E+n31fg2GhoNGuEaWxmqNAxpXNXo0SZrmmgGaIs18zd2aFzQ7GTiGCYPHEDJWMPYwLjG6mESmBZPPTGcWMQ8xW5h9WppazlqxWvO0yrVOa7WzcCxzFp+VyVrHOspqY30dpz+OO048btW46nG3x33SHq/tqy3WLtSu0b6n/VWHrROgk6GzQade56kuXtdad6ruXN0dupd0X49njvccLxxfOP7o+Ed6qJ61XqTeAr3dejf0+vUN9IP0Zfpb9S/ovzZgGfgapBtsMjhj0GvIMPQ2lBhuMjxr+JKtxeayM9ml7IvsPiM9o2AjhdEuoxajAWML4xjj5cY1xk9NqCYckxSTTSZNJn2mhqaTTReaVpk+MqOYcczSzLaYNZt9MrcwjzNfaV5v3mOhbcG3yLOosnhiSbf0sZxjWWF514poxbHKsNpudcsatXaxTrMut75pg9q42khsttu0TiBMcJ8gnVAx4b4tzZZrm2tbZdthx7ILs1tuV2/3ZqLpxMSJGyY2T/xu72Kfab/H/rGDpkOIw3KHRod3jtaOQsdyx7tOdKdApyVODU5vnW2cxc47nB+4MFwmu6x0aXL509XNVe5a7drrZuqW5LbN7T6HyYngrOZccSe4+7kvcT/l/sXD1SPH46jHH562nhmeBzx7JllMEk/aM6nTy9hL4LXLq92b7Z3k/ZN3u4+Rj8Cnwue5r4mvyHevbzfXipvOPch942fvJ/er9fvE8+At4p3zx/kH+Rf6twRoBsQElAU8CzQOTA2sCuwLcglaEHQumBAcGrwh+D5fny/kV/L7QtxCFoVcDKWFRoWWhT4Psw6ThzVORieHTN44+ckUsynSKfXhEM4P3xj+NMIiYk7EyanEqRFTy6e+iHSIXBjZHMWImhV1IOpjtF/0uujHMZYxipimWLXY6bGVsZ/i/OOK49rjJ8Yvir+eoJsgSWhIJCXGJu5N7J8WMG3ztK7pLtMLprfNsJgxb8bVmbozM2eenqU2SzDrWBIhKS7pQNI3QbigQtCfzE/eltwn5Am3CF+JfEWbRL1iL3GxuDvFK6U4pSfVK3Vjam+aT1pJ2msJT1ImeZsenL4z/VNGeMa+jMHMuMyaLHJWUtYJqaY0Q3pxtsHsebNbZTayAln7HI85m+f0yUPle7OR7BnZDTlMbDi6obBU/KDoyPXOLc/9PDd27rF5GvOk827Mt56/an53XmDezwvwC4QLmhYaLVy2sGMRd9Guxcji5MVNS0yW5C/pWhq0dP8y6rKMZb8st19evPzDirgVjfn6+UvzO38I+qGqQLVAXnB/pefKnT/if5T82LLKadXWVd8LRYXXiuyLSoq+rRauvrbGYU3pmsG1KWtb1rmu27GeuF66vm2Dz4b9xRrFecWdGydvrNvE3lS46cPmWZuvljiX7NxC3aLY0l4aVtqw1XTr+q3fytLK7pX7ldds09u2atun7aLtt3f47qjeqb+zaOfXnyQ/PdgVtKuuwryiZDdxd+7uF3ti9zT/zPm5cq/u3qK9f+6T7mvfH7n/YqVbZeUBvQPrqtAqRVXvwekHbx3yP9RQbVu9q4ZVU3QYDisOvzySdKTtaOjRpmOcY9XHzY5vq2XUFtYhdfPr+urT6tsbEhpaT4ScaGr0bKw9aXdy3ymjU+WntU6vO0M9k39m8Gze2f5zsnOvz6ee72ya1fT4QvyFuxenXmy5FHrpyuXAyxeauc1nr3hdOXXV4+qJa5xr9dddr9fdcLlR+4vLL7Utri11N91uNtzyv9XYOqn1zG2f2+fv+N+5fJd/9/q9Kfda22LaHtyffr/9gehBz8PMh28f5T4aeLz0CeFJ4VP1pyXP9J5V/Gr1a027a/vpDv+OG8+jnj/uFHa++i37t29d+S/oL0q6Dbsrexx7TvUG9t56Oe1l1yvZq4HXBb9r/L7tjeWb43/4/nGjL76v66387eC71e913u/74PyhqT+i/9nHrI8Dnwo/63ze/4Xzpflr3NfugbnfSN9K/7T6s/F76Pcng1mDgzKBXDA8CuAwRVNSAN7tA6AnADCwGYI6bWSmHhZk5D9gmOA/8cjcPSyuANWYGRqNeOcADmNqvhRAzRdgaCyK9gXUyUmpo/Pv8Kw+JAbYv8K0HECi2x6tebQU/iEjc/xf+v6nBWXWv9l/AV0EC6JTIblRAAAAeGVYSWZNTQAqAAAACAAFARIAAwAAAAEAAQAAARoABQAAAAEAAABKARsABQAAAAEAAABSASgAAwAAAAEAAgAAh2kABAAAAAEAAABaAAAAAAAAAJAAAAABAAAAkAAAAAEAAqACAAQAAAABAAAAFKADAAQAAAABAAAAFAAAAAAXNii1AAAACXBIWXMAABYlAAAWJQFJUiTwAAAB82lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOllSZXNvbHV0aW9uPjE0NDwvdGlmZjpZUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgICAgPHRpZmY6WFJlc29sdXRpb24+MTQ0PC90aWZmOlhSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KReh49gAAAjRJREFUOBGFlD2vMUEUx2clvoNCcW8hCqFAo1dKhEQpvsF9KrWEBh/ALbQ0KkInBI3SWyGPCCJEQliXgsTLefaca/bBWjvJzs6cOf/fnDkzOQJIjWm06/XKBEGgD8c6nU5VIWgBtQDPZPWtJE8O63a7LBgMMo/Hw0ql0jPjcY4RvmqXy4XMjUYDUwLtdhtmsxnYbDbI5/O0djqdFFKmsEiGZ9jP9gem0yn0ej2Yz+fg9XpfycimAD7DttstQTDKfr8Po9GIIg6Hw1Cr1RTgB+A72GAwgMPhQLBMJgNSXsFqtUI2myUo18pA6QJogefsPrLBX4QdCVatViklw+EQRFGEj88P2O12pEUGATmsXq+TaLPZ0AXgMRF2vMEqlQoJTSYTpNNpApvNZliv1/+BHDaZTAi2Wq1A3Ig0xmMej7+RcZjdbodUKkWAaDQK+GHjHPnImB88JrZIJAKFQgH2+z2BOczhcMiwRCIBgUAA+NN5BP6mj2DYff35gk6nA61WCzBn2JxO5wPM7/fLz4vD0E+OECfn8xl/0Gw2KbLxeAyLxQIsFgt8p75pDSO7h/HbpUWpewCike9WLpfB7XaDy+WCYrFI/slk8i0MnRRAUt46hPMI4vE4+Hw+ec7t9/44VgWigEeby+UgFArJWjUYOqhWG6x50rpcSfR6PVUfNOgEVRlTX0HhrZBKz4MZjUYWi8VoA+lc9H/VaRZYjBKrtXR8tlwumcFgeMWRbZpA9ORQWfVm8A/FsrLaxebd5wAAAABJRU5ErkJggg==";
```
--------------------------------------------------------------------------------
/src/filesystem/__tests__/path-validation.test.ts:
--------------------------------------------------------------------------------
```typescript
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as path from 'path';
import * as fs from 'fs/promises';
import * as os from 'os';
import { isPathWithinAllowedDirectories } from '../path-validation.js';
/**
* Check if the current environment supports symlink creation
*/
async function checkSymlinkSupport(): Promise<boolean> {
const testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'symlink-test-'));
try {
const targetFile = path.join(testDir, 'target.txt');
const linkFile = path.join(testDir, 'link.txt');
await fs.writeFile(targetFile, 'test');
await fs.symlink(targetFile, linkFile);
// If we get here, symlinks are supported
return true;
} catch (error) {
// EPERM indicates no symlink permissions
if ((error as NodeJS.ErrnoException).code === 'EPERM') {
return false;
}
// Other errors might indicate a real problem
throw error;
} finally {
await fs.rm(testDir, { recursive: true, force: true });
}
}
// Global variable to store symlink support status
let symlinkSupported: boolean | null = null;
/**
* Get cached symlink support status, checking once per test run
*/
async function getSymlinkSupport(): Promise<boolean> {
if (symlinkSupported === null) {
symlinkSupported = await checkSymlinkSupport();
if (!symlinkSupported) {
console.log('\n⚠️ Symlink tests will be skipped - symlink creation not supported in this environment');
console.log(' On Windows, enable Developer Mode or run as Administrator to enable symlink tests');
}
}
return symlinkSupported;
}
describe('Path Validation', () => {
it('allows exact directory match', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
});
it('allows subdirectories', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/src/index.js', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/deeply/nested/file.txt', allowed)).toBe(true);
});
it('blocks similar directory names (prefix vulnerability)', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project_backup', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project-old', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/projectile', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project.bak', allowed)).toBe(false);
});
it('blocks paths outside allowed directories', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false);
});
it('handles multiple allowed directories', () => {
const allowed = ['/home/user/project1', '/home/user/project2'];
expect(isPathWithinAllowedDirectories('/home/user/project1/src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project2/src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project3', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project1_backup', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project2-old', allowed)).toBe(false);
});
it('blocks parent and sibling directories', () => {
const allowed = ['/test/allowed'];
// Parent directory
expect(isPathWithinAllowedDirectories('/test', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false);
// Sibling with common prefix
expect(isPathWithinAllowedDirectories('/test/allowed_sibling', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/test/allowed2', allowed)).toBe(false);
});
it('handles paths with special characters', () => {
const allowed = ['/home/user/my-project (v2)'];
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)/src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)_backup', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/my-project', allowed)).toBe(false);
});
describe('Input validation', () => {
it('rejects empty inputs', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project', [])).toBe(false);
});
it('handles trailing separators correctly', () => {
const allowed = ['/home/user/project'];
// Path with trailing separator should still match
expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true);
// Allowed directory with trailing separator
const allowedWithSep = ['/home/user/project/'];
expect(isPathWithinAllowedDirectories('/home/user/project', allowedWithSep)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/', allowedWithSep)).toBe(true);
// Should still block similar names with or without trailing separators
expect(isPathWithinAllowedDirectories('/home/user/project2', allowedWithSep)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project2/', allowed)).toBe(false);
});
it('skips empty directory entries in allowed list', () => {
const allowed = ['', '/home/user/project', ''];
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
// Should still validate properly with empty entries
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
});
it('handles Windows paths with trailing separators', () => {
if (path.sep === '\\') {
const allowed = ['C:\\Users\\project'];
// Path with trailing separator
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowed)).toBe(true);
// Allowed with trailing separator
const allowedWithSep = ['C:\\Users\\project\\'];
expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowedWithSep)).toBe(true);
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowedWithSep)).toBe(true);
// Should still block similar names
expect(isPathWithinAllowedDirectories('C:\\Users\\project2\\', allowed)).toBe(false);
}
});
});
describe('Error handling', () => {
it('normalizes relative paths to absolute', () => {
const allowed = [process.cwd()];
// Relative paths get normalized to absolute paths based on cwd
expect(isPathWithinAllowedDirectories('relative/path', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('./file', allowed)).toBe(true);
// Parent directory references that escape allowed directory
const parentAllowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('../parent', parentAllowed)).toBe(false);
});
it('returns false for relative paths in allowed directories', () => {
const badAllowed = ['relative/path', '/some/other/absolute/path'];
// Relative paths in allowed dirs are normalized to absolute based on cwd
// The normalized 'relative/path' won't match our test path
expect(isPathWithinAllowedDirectories('/some/other/absolute/path/file', badAllowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/absolute/path/file', badAllowed)).toBe(false);
});
it('handles null and undefined inputs gracefully', () => {
const allowed = ['/home/user/project'];
// Should return false, not crash
expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/path', null as any)).toBe(false);
expect(isPathWithinAllowedDirectories('/path', undefined as any)).toBe(false);
});
});
describe('Unicode and special characters', () => {
it('handles unicode characters in paths', () => {
const allowed = ['/home/user/café'];
expect(isPathWithinAllowedDirectories('/home/user/café', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/café/file', allowed)).toBe(true);
// Different unicode representation won't match (not normalized)
const decomposed = '/home/user/cafe\u0301'; // e + combining accent
expect(isPathWithinAllowedDirectories(decomposed, allowed)).toBe(false);
});
it('handles paths with spaces correctly', () => {
const allowed = ['/home/user/my project'];
expect(isPathWithinAllowedDirectories('/home/user/my project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/my project/file', allowed)).toBe(true);
// Partial matches should fail
expect(isPathWithinAllowedDirectories('/home/user/my', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/my proj', allowed)).toBe(false);
});
});
describe('Overlapping allowed directories', () => {
it('handles nested allowed directories correctly', () => {
const allowed = ['/home', '/home/user', '/home/user/project'];
// All paths under /home are allowed
expect(isPathWithinAllowedDirectories('/home/anything', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/anything', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/anything', allowed)).toBe(true);
// First match wins (most permissive)
expect(isPathWithinAllowedDirectories('/home/other/deep/path', allowed)).toBe(true);
});
it('handles root directory as allowed', () => {
const allowed = ['/'];
// Everything is allowed under root (dangerous configuration)
expect(isPathWithinAllowedDirectories('/', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/any/path', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/secret', allowed)).toBe(true);
// But only on the same filesystem root
if (path.sep === '\\') {
expect(isPathWithinAllowedDirectories('D:\\other', ['/'])).toBe(false);
}
});
});
describe('Cross-platform behavior', () => {
it('handles Windows-style paths on Windows', () => {
if (path.sep === '\\') {
const allowed = ['C:\\Users\\project'];
expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('C:\\Users\\project\\src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('C:\\Users\\project2', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('C:\\Users\\project_backup', allowed)).toBe(false);
}
});
it('handles Unix-style paths on Unix', () => {
if (path.sep === '/') {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false);
}
});
});
describe('Validation Tests - Path Traversal', () => {
it('blocks path traversal attempts', () => {
const allowed = ['/home/user/project'];
// Basic traversal attempts
expect(isPathWithinAllowedDirectories('/home/user/project/../../../etc/passwd', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/../../other', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/../project2', allowed)).toBe(false);
// Mixed traversal with valid segments
expect(isPathWithinAllowedDirectories('/home/user/project/src/../../project2', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/./../../other', allowed)).toBe(false);
// Multiple traversal sequences
expect(isPathWithinAllowedDirectories('/home/user/project/../project/../../../etc', allowed)).toBe(false);
});
it('blocks traversal in allowed directories', () => {
const allowed = ['/home/user/project/../safe'];
// The allowed directory itself should be normalized and safe
expect(isPathWithinAllowedDirectories('/home/user/safe/file', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false);
});
it('handles complex traversal patterns', () => {
const allowed = ['/home/user/project'];
// Double dots in filenames (not traversal) - these normalize to paths within allowed dir
expect(isPathWithinAllowedDirectories('/home/user/project/..test', allowed)).toBe(true); // Not traversal
expect(isPathWithinAllowedDirectories('/home/user/project/test..', allowed)).toBe(true); // Not traversal
expect(isPathWithinAllowedDirectories('/home/user/project/te..st', allowed)).toBe(true); // Not traversal
// Actual traversal
expect(isPathWithinAllowedDirectories('/home/user/project/../test', allowed)).toBe(false); // Is traversal - goes to /home/user/test
// Edge case: /home/user/project/.. normalizes to /home/user (parent dir)
expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // Goes to parent
});
});
describe('Validation Tests - Null Bytes', () => {
it('rejects paths with null bytes', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories('/home/user/project\x00/etc/passwd', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/test\x00.txt', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('\x00/home/user/project', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/\x00', allowed)).toBe(false);
});
it('rejects allowed directories with null bytes', () => {
const allowed = ['/home/user/project\x00'];
expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false);
});
});
describe('Validation Tests - Special Characters', () => {
it('allows percent signs in filenames', () => {
const allowed = ['/home/user/project'];
// Percent is a valid filename character
expect(isPathWithinAllowedDirectories('/home/user/project/report_50%.pdf', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/Q1_25%_growth', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/%41', allowed)).toBe(true); // File named %41
// URL encoding is NOT decoded by path.normalize, so these are just odd filenames
expect(isPathWithinAllowedDirectories('/home/user/project/%2e%2e', allowed)).toBe(true); // File named "%2e%2e"
expect(isPathWithinAllowedDirectories('/home/user/project/file%20name', allowed)).toBe(true); // File with %20 in name
});
it('handles percent signs in allowed directories', () => {
const allowed = ['/home/user/project%20files'];
// This is a directory literally named "project%20files"
expect(isPathWithinAllowedDirectories('/home/user/project%20files/test', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project files/test', allowed)).toBe(false); // Different dir
});
});
describe('Path Normalization', () => {
it('normalizes paths before comparison', () => {
const allowed = ['/home/user/project'];
// Trailing slashes
expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project//', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project///', allowed)).toBe(true);
// Current directory references
expect(isPathWithinAllowedDirectories('/home/user/project/./src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/./project/src', allowed)).toBe(true);
// Multiple slashes
expect(isPathWithinAllowedDirectories('/home/user/project//src//file', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home//user//project//src', allowed)).toBe(true);
// Should still block outside paths
expect(isPathWithinAllowedDirectories('/home/user//project2', allowed)).toBe(false);
});
it('handles mixed separators correctly', () => {
if (path.sep === '\\') {
const allowed = ['C:\\Users\\project'];
// Mixed separators should be normalized
expect(isPathWithinAllowedDirectories('C:/Users/project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('C:\\Users/project\\src', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('C:/Users\\project/src', allowed)).toBe(true);
}
});
});
describe('Edge Cases', () => {
it('rejects non-string inputs safely', () => {
const allowed = ['/home/user/project'];
expect(isPathWithinAllowedDirectories(123 as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories({} as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories([] as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false);
expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false);
// Non-string in allowed directories
expect(isPathWithinAllowedDirectories('/home/user/project', [123 as any])).toBe(false);
expect(isPathWithinAllowedDirectories('/home/user/project', [{} as any])).toBe(false);
});
it('handles very long paths', () => {
const allowed = ['/home/user/project'];
// Create a very long path that's still valid
const longSubPath = 'a/'.repeat(1000) + 'file.txt';
expect(isPathWithinAllowedDirectories(`/home/user/project/${longSubPath}`, allowed)).toBe(true);
// Very long path that escapes
const escapePath = 'a/'.repeat(1000) + '../'.repeat(1001) + 'etc/passwd';
expect(isPathWithinAllowedDirectories(`/home/user/project/${escapePath}`, allowed)).toBe(false);
});
});
describe('Additional Coverage', () => {
it('handles allowed directories with traversal that normalizes safely', () => {
// These allowed dirs contain traversal but normalize to valid paths
const allowed = ['/home/user/../user/project'];
// Should normalize to /home/user/project and work correctly
expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false);
});
it('handles symbolic dots in filenames', () => {
const allowed = ['/home/user/project'];
// Single and double dots as actual filenames (not traversal)
expect(isPathWithinAllowedDirectories('/home/user/project/.', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // This normalizes to parent
expect(isPathWithinAllowedDirectories('/home/user/project/...', allowed)).toBe(true); // Three dots is a valid filename
expect(isPathWithinAllowedDirectories('/home/user/project/....', allowed)).toBe(true); // Four dots is a valid filename
});
it('handles UNC paths on Windows', () => {
if (path.sep === '\\') {
const allowed = ['\\\\server\\share\\project'];
expect(isPathWithinAllowedDirectories('\\\\server\\share\\project', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('\\\\server\\share\\project\\file', allowed)).toBe(true);
expect(isPathWithinAllowedDirectories('\\\\server\\share\\other', allowed)).toBe(false);
expect(isPathWithinAllowedDirectories('\\\\other\\share\\project', allowed)).toBe(false);
}
});
});
describe('Symlink Tests', () => {
let testDir: string;
let allowedDir: string;
let forbiddenDir: string;
beforeEach(async () => {
testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fs-error-test-'));
allowedDir = path.join(testDir, 'allowed');
forbiddenDir = path.join(testDir, 'forbidden');
await fs.mkdir(allowedDir, { recursive: true });
await fs.mkdir(forbiddenDir, { recursive: true });
});
afterEach(async () => {
await fs.rm(testDir, { recursive: true, force: true });
});
it('validates symlink handling', async () => {
// Test with symlinks
try {
const linkPath = path.join(allowedDir, 'bad-link');
const targetPath = path.join(forbiddenDir, 'target.txt');
await fs.writeFile(targetPath, 'content');
await fs.symlink(targetPath, linkPath);
// In real implementation, this would throw with the resolved path
const realPath = await fs.realpath(linkPath);
const allowed = [allowedDir];
// Symlink target should be outside allowed directory
expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false);
} catch (error) {
// Skip if no symlink permissions
}
});
it('handles non-existent paths correctly', async () => {
const newFilePath = path.join(allowedDir, 'subdir', 'newfile.txt');
// Parent directory doesn't exist
try {
await fs.access(newFilePath);
} catch (error) {
expect((error as NodeJS.ErrnoException).code).toBe('ENOENT');
}
// After creating parent, validation should work
await fs.mkdir(path.dirname(newFilePath), { recursive: true });
const allowed = [allowedDir];
expect(isPathWithinAllowedDirectories(newFilePath, allowed)).toBe(true);
});
// Test path resolution consistency for symlinked files
it('validates symlinked files consistently between path and resolved forms', async () => {
try {
// Setup: Create target file in forbidden area
const targetFile = path.join(forbiddenDir, 'target.txt');
await fs.writeFile(targetFile, 'TARGET_CONTENT');
// Create symlink inside allowed directory pointing to forbidden file
const symlinkPath = path.join(allowedDir, 'link-to-target.txt');
await fs.symlink(targetFile, symlinkPath);
// The symlink path itself passes validation (looks like it's in allowed dir)
expect(isPathWithinAllowedDirectories(symlinkPath, [allowedDir])).toBe(true);
// But the resolved path should fail validation
const resolvedPath = await fs.realpath(symlinkPath);
expect(isPathWithinAllowedDirectories(resolvedPath, [allowedDir])).toBe(false);
// Verify the resolved path goes to the forbidden location (normalize both paths for macOS temp dirs)
expect(await fs.realpath(resolvedPath)).toBe(await fs.realpath(targetFile));
} catch (error) {
// Skip if no symlink permissions on the system
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
throw error;
}
}
});
// Test allowed directory resolution behavior
it('validates paths correctly when allowed directory is resolved from symlink', async () => {
try {
// Setup: Create the actual target directory with content
const actualTargetDir = path.join(testDir, 'actual-target');
await fs.mkdir(actualTargetDir, { recursive: true });
const targetFile = path.join(actualTargetDir, 'file.txt');
await fs.writeFile(targetFile, 'FILE_CONTENT');
// Setup: Create symlink directory that points to target
const symlinkDir = path.join(testDir, 'symlink-dir');
await fs.symlink(actualTargetDir, symlinkDir);
// Simulate resolved allowed directory (what the server startup should do)
const resolvedAllowedDir = await fs.realpath(symlinkDir);
const resolvedTargetDir = await fs.realpath(actualTargetDir);
expect(resolvedAllowedDir).toBe(resolvedTargetDir);
// Test 1: File access through original symlink path should pass validation with resolved allowed dir
const fileViaSymlink = path.join(symlinkDir, 'file.txt');
const resolvedFile = await fs.realpath(fileViaSymlink);
expect(isPathWithinAllowedDirectories(resolvedFile, [resolvedAllowedDir])).toBe(true);
// Test 2: File access through resolved path should also pass validation
const fileViaResolved = path.join(resolvedTargetDir, 'file.txt');
expect(isPathWithinAllowedDirectories(fileViaResolved, [resolvedAllowedDir])).toBe(true);
// Test 3: Demonstrate inconsistent behavior with unresolved allowed directories
// If allowed dirs were not resolved (storing symlink paths instead):
const unresolvedAllowedDirs = [symlinkDir];
// This validation would incorrectly fail for the same content:
expect(isPathWithinAllowedDirectories(resolvedFile, unresolvedAllowedDirs)).toBe(false);
} catch (error) {
// Skip if no symlink permissions on the system
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
throw error;
}
}
});
it('resolves nested symlink chains completely', async () => {
try {
// Setup: Create target file in forbidden area
const actualTarget = path.join(forbiddenDir, 'target-file.txt');
await fs.writeFile(actualTarget, 'FINAL_CONTENT');
// Create chain of symlinks: allowedFile -> link2 -> link1 -> actualTarget
const link1 = path.join(testDir, 'intermediate-link1');
const link2 = path.join(testDir, 'intermediate-link2');
const allowedFile = path.join(allowedDir, 'seemingly-safe-file');
await fs.symlink(actualTarget, link1);
await fs.symlink(link1, link2);
await fs.symlink(link2, allowedFile);
// The allowed file path passes basic validation
expect(isPathWithinAllowedDirectories(allowedFile, [allowedDir])).toBe(true);
// But complete resolution reveals the forbidden target
const fullyResolvedPath = await fs.realpath(allowedFile);
expect(isPathWithinAllowedDirectories(fullyResolvedPath, [allowedDir])).toBe(false);
expect(await fs.realpath(fullyResolvedPath)).toBe(await fs.realpath(actualTarget));
} catch (error) {
// Skip if no symlink permissions on the system
if ((error as NodeJS.ErrnoException).code !== 'EPERM') {
throw error;
}
}
});
});
describe('Path Validation Race Condition Tests', () => {
let testDir: string;
let allowedDir: string;
let forbiddenDir: string;
let targetFile: string;
let testPath: string;
beforeEach(async () => {
testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'race-test-'));
allowedDir = path.join(testDir, 'allowed');
forbiddenDir = path.join(testDir, 'outside');
targetFile = path.join(forbiddenDir, 'target.txt');
testPath = path.join(allowedDir, 'test.txt');
await fs.mkdir(allowedDir, { recursive: true });
await fs.mkdir(forbiddenDir, { recursive: true });
await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8');
});
afterEach(async () => {
await fs.rm(testDir, { recursive: true, force: true });
});
it('validates non-existent file paths based on parent directory', async () => {
const allowed = [allowedDir];
expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true);
await expect(fs.access(testPath)).rejects.toThrow();
const parentDir = path.dirname(testPath);
expect(isPathWithinAllowedDirectories(parentDir, allowed)).toBe(true);
});
it('demonstrates symlink race condition allows writing outside allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlink race condition test - symlinks not supported');
return;
}
const allowed = [allowedDir];
await expect(fs.access(testPath)).rejects.toThrow();
expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true);
await fs.symlink(targetFile, testPath);
await fs.writeFile(testPath, 'MODIFIED CONTENT', 'utf-8');
const targetContent = await fs.readFile(targetFile, 'utf-8');
expect(targetContent).toBe('MODIFIED CONTENT');
const resolvedPath = await fs.realpath(testPath);
expect(isPathWithinAllowedDirectories(resolvedPath, allowed)).toBe(false);
});
it('shows timing differences between validation approaches', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping timing validation test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const validation1 = isPathWithinAllowedDirectories(testPath, allowed);
expect(validation1).toBe(true);
await fs.symlink(targetFile, testPath);
const resolvedPath = await fs.realpath(testPath);
const validation2 = isPathWithinAllowedDirectories(resolvedPath, allowed);
expect(validation2).toBe(false);
expect(validation1).not.toBe(validation2);
});
it('validates directory creation timing', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping directory creation timing test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const testDir = path.join(allowedDir, 'newdir');
expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true);
await fs.symlink(forbiddenDir, testDir);
expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true);
const resolved = await fs.realpath(testDir);
expect(isPathWithinAllowedDirectories(resolved, allowed)).toBe(false);
});
it('demonstrates exclusive file creation behavior', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping exclusive file creation test - symlinks not supported');
return;
}
const allowed = [allowedDir];
await fs.symlink(targetFile, testPath);
await expect(fs.open(testPath, 'wx')).rejects.toThrow(/EEXIST/);
await fs.writeFile(testPath, 'NEW CONTENT', 'utf-8');
const targetContent = await fs.readFile(targetFile, 'utf-8');
expect(targetContent).toBe('NEW CONTENT');
});
it('should use resolved parent paths for non-existent files', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping resolved parent paths test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const symlinkDir = path.join(allowedDir, 'link');
await fs.symlink(forbiddenDir, symlinkDir);
const fileThroughSymlink = path.join(symlinkDir, 'newfile.txt');
expect(fileThroughSymlink.startsWith(allowedDir)).toBe(true);
const parentDir = path.dirname(fileThroughSymlink);
const resolvedParent = await fs.realpath(parentDir);
expect(isPathWithinAllowedDirectories(resolvedParent, allowed)).toBe(false);
const expectedSafePath = path.join(resolvedParent, path.basename(fileThroughSymlink));
expect(isPathWithinAllowedDirectories(expectedSafePath, allowed)).toBe(false);
});
it('demonstrates parent directory symlink traversal', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping parent directory symlink traversal test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const deepPath = path.join(allowedDir, 'sub1', 'sub2', 'file.txt');
expect(isPathWithinAllowedDirectories(deepPath, allowed)).toBe(true);
const sub1Path = path.join(allowedDir, 'sub1');
await fs.symlink(forbiddenDir, sub1Path);
await fs.mkdir(path.join(sub1Path, 'sub2'), { recursive: true });
await fs.writeFile(deepPath, 'CONTENT', 'utf-8');
const realPath = await fs.realpath(deepPath);
const realAllowedDir = await fs.realpath(allowedDir);
const realForbiddenDir = await fs.realpath(forbiddenDir);
expect(realPath.startsWith(realAllowedDir)).toBe(false);
expect(realPath.startsWith(realForbiddenDir)).toBe(true);
});
it('should prevent race condition between validatePath and file operation', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping race condition prevention test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const racePath = path.join(allowedDir, 'race-file.txt');
const targetFile = path.join(forbiddenDir, 'target.txt');
await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8');
// Path validation would pass (file doesn't exist, parent is in allowed dir)
expect(await fs.access(racePath).then(() => false).catch(() => true)).toBe(true);
expect(isPathWithinAllowedDirectories(racePath, allowed)).toBe(true);
// Race condition: symlink created after validation but before write
await fs.symlink(targetFile, racePath);
// With exclusive write flag, write should fail on symlink
await expect(
fs.writeFile(racePath, 'NEW CONTENT', { encoding: 'utf-8', flag: 'wx' })
).rejects.toThrow(/EEXIST/);
// Verify content unchanged
const targetContent = await fs.readFile(targetFile, 'utf-8');
expect(targetContent).toBe('ORIGINAL CONTENT');
// The symlink exists but write was blocked
const actualWritePath = await fs.realpath(racePath);
expect(actualWritePath).toBe(await fs.realpath(targetFile));
expect(isPathWithinAllowedDirectories(actualWritePath, allowed)).toBe(false);
});
it('should allow overwrites to legitimate files within allowed directories', async () => {
const allowed = [allowedDir];
const legitFile = path.join(allowedDir, 'legit-file.txt');
// Create a legitimate file
await fs.writeFile(legitFile, 'ORIGINAL', 'utf-8');
// Opening with w should work for legitimate files
const fd = await fs.open(legitFile, 'w');
try {
await fd.write('UPDATED', 0, 'utf-8');
} finally {
await fd.close();
}
const content = await fs.readFile(legitFile, 'utf-8');
expect(content).toBe('UPDATED');
});
it('should handle symlinks that point within allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlinks within allowed directories test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const targetFile = path.join(allowedDir, 'target.txt');
const symlinkPath = path.join(allowedDir, 'symlink.txt');
// Create target file within allowed directory
await fs.writeFile(targetFile, 'TARGET CONTENT', 'utf-8');
// Create symlink pointing to allowed file
await fs.symlink(targetFile, symlinkPath);
// Opening symlink with w follows it to the target
const fd = await fs.open(symlinkPath, 'w');
try {
await fd.write('UPDATED VIA SYMLINK', 0, 'utf-8');
} finally {
await fd.close();
}
// Both symlink and target should show updated content
const symlinkContent = await fs.readFile(symlinkPath, 'utf-8');
const targetContent = await fs.readFile(targetFile, 'utf-8');
expect(symlinkContent).toBe('UPDATED VIA SYMLINK');
expect(targetContent).toBe('UPDATED VIA SYMLINK');
});
it('should prevent overwriting files through symlinks pointing outside allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlink overwrite prevention test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const legitFile = path.join(allowedDir, 'existing.txt');
const targetFile = path.join(forbiddenDir, 'target.txt');
// Create a legitimate file first
await fs.writeFile(legitFile, 'LEGIT CONTENT', 'utf-8');
// Create target file in forbidden directory
await fs.writeFile(targetFile, 'FORBIDDEN CONTENT', 'utf-8');
// Now replace the legitimate file with a symlink to forbidden location
await fs.unlink(legitFile);
await fs.symlink(targetFile, legitFile);
// Simulate the server's validation logic
const stats = await fs.lstat(legitFile);
expect(stats.isSymbolicLink()).toBe(true);
const realPath = await fs.realpath(legitFile);
expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false);
// With atomic rename, symlinks are replaced not followed
// So this test now demonstrates the protection
// Verify content remains unchanged
const targetContent = await fs.readFile(targetFile, 'utf-8');
expect(targetContent).toBe('FORBIDDEN CONTENT');
});
it('demonstrates race condition in read operations', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping race condition in read operations test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const legitFile = path.join(allowedDir, 'readable.txt');
const secretFile = path.join(forbiddenDir, 'secret.txt');
// Create legitimate file
await fs.writeFile(legitFile, 'PUBLIC CONTENT', 'utf-8');
// Create secret file in forbidden directory
await fs.writeFile(secretFile, 'SECRET CONTENT', 'utf-8');
// Step 1: validatePath would pass for legitimate file
expect(isPathWithinAllowedDirectories(legitFile, allowed)).toBe(true);
// Step 2: Race condition - replace file with symlink after validation
await fs.unlink(legitFile);
await fs.symlink(secretFile, legitFile);
// Step 3: Read operation follows symlink to forbidden location
const content = await fs.readFile(legitFile, 'utf-8');
// This shows the vulnerability - we read forbidden content
expect(content).toBe('SECRET CONTENT');
expect(isPathWithinAllowedDirectories(await fs.realpath(legitFile), allowed)).toBe(false);
});
it('verifies rename does not follow symlinks', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping rename symlink test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const tempFile = path.join(allowedDir, 'temp.txt');
const targetSymlink = path.join(allowedDir, 'target-symlink.txt');
const forbiddenTarget = path.join(forbiddenDir, 'forbidden-target.txt');
// Create forbidden target
await fs.writeFile(forbiddenTarget, 'ORIGINAL CONTENT', 'utf-8');
// Create symlink pointing to forbidden location
await fs.symlink(forbiddenTarget, targetSymlink);
// Write temp file
await fs.writeFile(tempFile, 'NEW CONTENT', 'utf-8');
// Rename temp file to symlink path
await fs.rename(tempFile, targetSymlink);
// Check what happened
const symlinkExists = await fs.lstat(targetSymlink).then(() => true).catch(() => false);
const isSymlink = symlinkExists && (await fs.lstat(targetSymlink)).isSymbolicLink();
const targetContent = await fs.readFile(targetSymlink, 'utf-8');
const forbiddenContent = await fs.readFile(forbiddenTarget, 'utf-8');
// Rename should replace the symlink with a regular file
expect(isSymlink).toBe(false);
expect(targetContent).toBe('NEW CONTENT');
expect(forbiddenContent).toBe('ORIGINAL CONTENT'); // Unchanged
});
});
});
```