#
tokens: 38703/50000 7/82 files (page 4/5)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 4 of 5. Use http://codebase.md/modelcontextprotocol/servers?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .gitattributes
├── .github
│   ├── pull_request_template.md
│   └── workflows
│       ├── claude.yml
│       ├── python.yml
│       ├── release.yml
│       └── typescript.yml
├── .gitignore
├── .mcp.json
├── .npmrc
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── package-lock.json
├── package.json
├── README.md
├── scripts
│   └── release.py
├── SECURITY.md
├── src
│   ├── everything
│   │   ├── CLAUDE.md
│   │   ├── Dockerfile
│   │   ├── everything.ts
│   │   ├── index.ts
│   │   ├── instructions.md
│   │   ├── package.json
│   │   ├── README.md
│   │   ├── sse.ts
│   │   ├── stdio.ts
│   │   ├── streamableHttp.ts
│   │   └── tsconfig.json
│   ├── fetch
│   │   ├── .python-version
│   │   ├── Dockerfile
│   │   ├── LICENSE
│   │   ├── pyproject.toml
│   │   ├── README.md
│   │   ├── src
│   │   │   └── mcp_server_fetch
│   │   │       ├── __init__.py
│   │   │       ├── __main__.py
│   │   │       └── server.py
│   │   └── uv.lock
│   ├── filesystem
│   │   ├── __tests__
│   │   │   ├── directory-tree.test.ts
│   │   │   ├── lib.test.ts
│   │   │   ├── path-utils.test.ts
│   │   │   ├── path-validation.test.ts
│   │   │   └── roots-utils.test.ts
│   │   ├── Dockerfile
│   │   ├── index.ts
│   │   ├── lib.ts
│   │   ├── package.json
│   │   ├── path-utils.ts
│   │   ├── path-validation.ts
│   │   ├── README.md
│   │   ├── roots-utils.ts
│   │   ├── tsconfig.json
│   │   └── vitest.config.ts
│   ├── git
│   │   ├── .gitignore
│   │   ├── .python-version
│   │   ├── Dockerfile
│   │   ├── LICENSE
│   │   ├── pyproject.toml
│   │   ├── README.md
│   │   ├── src
│   │   │   └── mcp_server_git
│   │   │       ├── __init__.py
│   │   │       ├── __main__.py
│   │   │       ├── py.typed
│   │   │       └── server.py
│   │   ├── tests
│   │   │   └── test_server.py
│   │   └── uv.lock
│   ├── memory
│   │   ├── __tests__
│   │   │   ├── file-path.test.ts
│   │   │   └── knowledge-graph.test.ts
│   │   ├── Dockerfile
│   │   ├── index.ts
│   │   ├── package.json
│   │   ├── README.md
│   │   ├── tsconfig.json
│   │   └── vitest.config.ts
│   ├── sequentialthinking
│   │   ├── __tests__
│   │   │   └── lib.test.ts
│   │   ├── Dockerfile
│   │   ├── index.ts
│   │   ├── lib.ts
│   │   ├── package.json
│   │   ├── README.md
│   │   ├── tsconfig.json
│   │   └── vitest.config.ts
│   └── time
│       ├── .python-version
│       ├── Dockerfile
│       ├── pyproject.toml
│       ├── README.md
│       ├── src
│       │   └── mcp_server_time
│       │       ├── __init__.py
│       │       ├── __main__.py
│       │       └── server.py
│       ├── test
│       │   └── time_server_test.py
│       └── uv.lock
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/src/filesystem/__tests__/path-utils.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { describe, it, expect, afterEach } from 'vitest';
  2 | import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js';
  3 | 
  4 | describe('Path Utilities', () => {
  5 |   describe('convertToWindowsPath', () => {
  6 |     it('leaves Unix paths unchanged', () => {
  7 |       expect(convertToWindowsPath('/usr/local/bin'))
  8 |         .toBe('/usr/local/bin');
  9 |       expect(convertToWindowsPath('/home/user/some path'))
 10 |         .toBe('/home/user/some path');
 11 |     });
 12 | 
 13 |     it('never converts WSL paths (they work correctly in WSL with Node.js fs)', () => {
 14 |       // WSL paths should NEVER be converted, regardless of platform
 15 |       // They are valid Linux paths that work with Node.js fs operations inside WSL
 16 |       expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent'))
 17 |         .toBe('/mnt/c/NS/MyKindleContent');
 18 |       expect(convertToWindowsPath('/mnt/d/Documents'))
 19 |         .toBe('/mnt/d/Documents');
 20 |     });
 21 | 
 22 |     it('converts Unix-style Windows paths only on Windows platform', () => {
 23 |       // On Windows, /c/ style paths should be converted
 24 |       if (process.platform === 'win32') {
 25 |         expect(convertToWindowsPath('/c/NS/MyKindleContent'))
 26 |           .toBe('C:\\NS\\MyKindleContent');
 27 |       } else {
 28 |         // On Linux, leave them unchanged
 29 |         expect(convertToWindowsPath('/c/NS/MyKindleContent'))
 30 |           .toBe('/c/NS/MyKindleContent');
 31 |       }
 32 |     });
 33 | 
 34 |     it('leaves Windows paths unchanged but ensures backslashes', () => {
 35 |       expect(convertToWindowsPath('C:\\NS\\MyKindleContent'))
 36 |         .toBe('C:\\NS\\MyKindleContent');
 37 |       expect(convertToWindowsPath('C:/NS/MyKindleContent'))
 38 |         .toBe('C:\\NS\\MyKindleContent');
 39 |     });
 40 | 
 41 |     it('handles Windows paths with spaces', () => {
 42 |       expect(convertToWindowsPath('C:\\Program Files\\Some App'))
 43 |         .toBe('C:\\Program Files\\Some App');
 44 |       expect(convertToWindowsPath('C:/Program Files/Some App'))
 45 |         .toBe('C:\\Program Files\\Some App');
 46 |     });
 47 | 
 48 |     it('handles drive letter paths based on platform', () => {
 49 |       // WSL paths should never be converted
 50 |       expect(convertToWindowsPath('/mnt/d/some/path'))
 51 |         .toBe('/mnt/d/some/path');
 52 | 
 53 |       if (process.platform === 'win32') {
 54 |         // On Windows, Unix-style paths like /d/ should be converted
 55 |         expect(convertToWindowsPath('/d/some/path'))
 56 |           .toBe('D:\\some\\path');
 57 |       } else {
 58 |         // On Linux, /d/ is just a regular Unix path
 59 |         expect(convertToWindowsPath('/d/some/path'))
 60 |           .toBe('/d/some/path');
 61 |       }
 62 |     });
 63 |   });
 64 | 
 65 |   describe('normalizePath', () => {
 66 |     it('preserves Unix paths', () => {
 67 |       expect(normalizePath('/usr/local/bin'))
 68 |         .toBe('/usr/local/bin');
 69 |       expect(normalizePath('/home/user/some path'))
 70 |         .toBe('/home/user/some path');
 71 |       expect(normalizePath('"/usr/local/some app/"'))
 72 |         .toBe('/usr/local/some app');
 73 |     });
 74 | 
 75 |     it('removes surrounding quotes', () => {
 76 |       expect(normalizePath('"C:\\NS\\My Kindle Content"'))
 77 |         .toBe('C:\\NS\\My Kindle Content');
 78 |     });
 79 | 
 80 |     it('normalizes backslashes', () => {
 81 |       expect(normalizePath('C:\\\\NS\\\\MyKindleContent'))
 82 |         .toBe('C:\\NS\\MyKindleContent');
 83 |     });
 84 | 
 85 |     it('converts forward slashes to backslashes on Windows', () => {
 86 |       expect(normalizePath('C:/NS/MyKindleContent'))
 87 |         .toBe('C:\\NS\\MyKindleContent');
 88 |     });
 89 | 
 90 |     it('always preserves WSL paths (they work correctly in WSL)', () => {
 91 |       // WSL paths should ALWAYS be preserved, regardless of platform
 92 |       // This is the fix for issue #2795
 93 |       expect(normalizePath('/mnt/c/NS/MyKindleContent'))
 94 |         .toBe('/mnt/c/NS/MyKindleContent');
 95 |       expect(normalizePath('/mnt/d/Documents'))
 96 |         .toBe('/mnt/d/Documents');
 97 |     });
 98 | 
 99 |     it('handles Unix-style Windows paths', () => {
100 |       // On Windows, /c/ paths should be converted
101 |       if (process.platform === 'win32') {
102 |         expect(normalizePath('/c/NS/MyKindleContent'))
103 |           .toBe('C:\\NS\\MyKindleContent');
104 |       } else if (process.platform === 'linux') {
105 |         // On Linux, /c/ is just a regular Unix path
106 |         expect(normalizePath('/c/NS/MyKindleContent'))
107 |           .toBe('/c/NS/MyKindleContent');
108 |       }
109 |     });
110 | 
111 |     it('handles paths with spaces and mixed slashes', () => {
112 |       expect(normalizePath('C:/NS/My Kindle Content'))
113 |         .toBe('C:\\NS\\My Kindle Content');
114 |       // WSL paths should always be preserved
115 |       expect(normalizePath('/mnt/c/NS/My Kindle Content'))
116 |         .toBe('/mnt/c/NS/My Kindle Content');
117 |       expect(normalizePath('C:\\Program Files (x86)\\App Name'))
118 |         .toBe('C:\\Program Files (x86)\\App Name');
119 |       expect(normalizePath('"C:\\Program Files\\App Name"'))
120 |         .toBe('C:\\Program Files\\App Name');
121 |       expect(normalizePath('  C:\\Program Files\\App Name  '))
122 |         .toBe('C:\\Program Files\\App Name');
123 |     });
124 | 
125 |     it('preserves spaces in all path formats', () => {
126 |       // WSL paths should always be preserved
127 |       expect(normalizePath('/mnt/c/Program Files/App Name'))
128 |         .toBe('/mnt/c/Program Files/App Name');
129 | 
130 |       if (process.platform === 'win32') {
131 |         // On Windows, Unix-style paths like /c/ should be converted
132 |         expect(normalizePath('/c/Program Files/App Name'))
133 |           .toBe('C:\\Program Files\\App Name');
134 |       } else {
135 |         // On Linux, /c/ is just a regular Unix path
136 |         expect(normalizePath('/c/Program Files/App Name'))
137 |           .toBe('/c/Program Files/App Name');
138 |       }
139 |       expect(normalizePath('C:/Program Files/App Name'))
140 |         .toBe('C:\\Program Files\\App Name');
141 |     });
142 | 
143 |     it('handles special characters in paths', () => {
144 |       // Test ampersand in path
145 |       expect(normalizePath('C:\\NS\\Sub&Folder'))
146 |         .toBe('C:\\NS\\Sub&Folder');
147 |       expect(normalizePath('C:/NS/Sub&Folder'))
148 |         .toBe('C:\\NS\\Sub&Folder');
149 |       // WSL paths should always be preserved
150 |       expect(normalizePath('/mnt/c/NS/Sub&Folder'))
151 |         .toBe('/mnt/c/NS/Sub&Folder');
152 | 
153 |       // Test tilde in path (short names in Windows)
154 |       expect(normalizePath('C:\\NS\\MYKIND~1'))
155 |         .toBe('C:\\NS\\MYKIND~1');
156 |       expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'))
157 |         .toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1');
158 | 
159 |       // Test other special characters
160 |       expect(normalizePath('C:\\Path with #hash'))
161 |         .toBe('C:\\Path with #hash');
162 |       expect(normalizePath('C:\\Path with (parentheses)'))
163 |         .toBe('C:\\Path with (parentheses)');
164 |       expect(normalizePath('C:\\Path with [brackets]'))
165 |         .toBe('C:\\Path with [brackets]');
166 |       expect(normalizePath('C:\\Path with @at+plus$dollar%percent'))
167 |         .toBe('C:\\Path with @at+plus$dollar%percent');
168 |     });
169 | 
170 |     it('capitalizes lowercase drive letters for Windows paths', () => {
171 |       expect(normalizePath('c:/windows/system32'))
172 |         .toBe('C:\\windows\\system32');
173 |       // WSL paths should always be preserved
174 |       expect(normalizePath('/mnt/d/my/folder'))
175 |         .toBe('/mnt/d/my/folder');
176 | 
177 |       if (process.platform === 'win32') {
178 |         // On Windows, Unix-style paths should be converted and capitalized
179 |         expect(normalizePath('/e/another/folder'))
180 |           .toBe('E:\\another\\folder');
181 |       } else {
182 |         // On Linux, /e/ is just a regular Unix path
183 |         expect(normalizePath('/e/another/folder'))
184 |           .toBe('/e/another/folder');
185 |       }
186 |     });
187 | 
188 |     it('handles UNC paths correctly', () => {
189 |       // UNC paths should preserve the leading double backslash
190 |       const uncPath = '\\\\SERVER\\share\\folder';
191 |       expect(normalizePath(uncPath)).toBe('\\\\SERVER\\share\\folder');
192 |       
193 |       // Test UNC path with double backslashes that need normalization
194 |       const uncPathWithDoubles = '\\\\\\\\SERVER\\\\share\\\\folder';
195 |       expect(normalizePath(uncPathWithDoubles)).toBe('\\\\SERVER\\share\\folder');
196 |     });
197 | 
198 |     it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => {
199 |       // A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion
200 |       // These paths should be preserved as-is (not converted to Windows C:\ format or WSL format)
201 |       const otherAbsolutePath = '\\someserver\\share\\file';
202 |       expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath);
203 |     });
204 |   });
205 | 
206 |   describe('expandHome', () => {
207 |     it('expands ~ to home directory', () => {
208 |       const result = expandHome('~/test');
209 |       expect(result).toContain('test');
210 |       expect(result).not.toContain('~');
211 |     });
212 | 
213 |     it('expands bare ~ to home directory', () => {
214 |       const result = expandHome('~');
215 |       expect(result).not.toContain('~');
216 |       expect(result.length).toBeGreaterThan(0);
217 |     });
218 | 
219 |     it('leaves other paths unchanged', () => {
220 |       expect(expandHome('C:/test')).toBe('C:/test');
221 |     });
222 |   });
223 | 
224 |   describe('WSL path handling (issue #2795 fix)', () => {
225 |     // Save original platform
226 |     const originalPlatform = process.platform;
227 | 
228 |     afterEach(() => {
229 |       // Restore platform after each test
230 |       Object.defineProperty(process, 'platform', {
231 |         value: originalPlatform,
232 |         writable: true,
233 |         configurable: true
234 |       });
235 |     });
236 | 
237 |     it('should NEVER convert WSL paths - they work correctly in WSL with Node.js fs', () => {
238 |       // The key insight: When running `wsl npx ...`, Node.js runs INSIDE WSL (process.platform === 'linux')
239 |       // and /mnt/c/ paths work correctly with Node.js fs operations in that environment.
240 |       // Converting them to C:\ format breaks fs operations because Windows paths don't work inside WSL.
241 | 
242 |       // Mock Linux platform (inside WSL)
243 |       Object.defineProperty(process, 'platform', {
244 |         value: 'linux',
245 |         writable: true,
246 |         configurable: true
247 |       });
248 | 
249 |       // WSL paths should NOT be converted, even inside WSL
250 |       expect(normalizePath('/mnt/c/Users/username/folder'))
251 |         .toBe('/mnt/c/Users/username/folder');
252 | 
253 |       expect(normalizePath('/mnt/d/Documents/project'))
254 |         .toBe('/mnt/d/Documents/project');
255 |     });
256 | 
257 |     it('should also preserve WSL paths when running on Windows', () => {
258 |       // Mock Windows platform
259 |       Object.defineProperty(process, 'platform', {
260 |         value: 'win32',
261 |         writable: true,
262 |         configurable: true
263 |       });
264 | 
265 |       // WSL paths should still be preserved (though they wouldn't be accessible from Windows Node.js)
266 |       expect(normalizePath('/mnt/c/Users/username/folder'))
267 |         .toBe('/mnt/c/Users/username/folder');
268 | 
269 |       expect(normalizePath('/mnt/d/Documents/project'))
270 |         .toBe('/mnt/d/Documents/project');
271 |     });
272 | 
273 |     it('should convert Unix-style Windows paths (/c/) only when running on Windows (win32)', () => {
274 |       // Mock process.platform to be 'win32' (Windows)
275 |       Object.defineProperty(process, 'platform', {
276 |         value: 'win32',
277 |         writable: true,
278 |         configurable: true
279 |       });
280 | 
281 |       // Unix-style Windows paths like /c/ should be converted on Windows
282 |       expect(normalizePath('/c/Users/username/folder'))
283 |         .toBe('C:\\Users\\username\\folder');
284 | 
285 |       expect(normalizePath('/d/Documents/project'))
286 |         .toBe('D:\\Documents\\project');
287 |     });
288 | 
289 |     it('should NOT convert Unix-style paths (/c/) when running inside WSL (linux)', () => {
290 |       // Mock process.platform to be 'linux' (WSL/Linux)
291 |       Object.defineProperty(process, 'platform', {
292 |         value: 'linux',
293 |         writable: true,
294 |         configurable: true
295 |       });
296 | 
297 |       // When on Linux, /c/ is just a regular Unix directory, not a drive letter
298 |       expect(normalizePath('/c/some/path'))
299 |         .toBe('/c/some/path');
300 | 
301 |       expect(normalizePath('/d/another/path'))
302 |         .toBe('/d/another/path');
303 |     });
304 | 
305 |     it('should preserve regular Unix paths on all platforms', () => {
306 |       // Test on Linux
307 |       Object.defineProperty(process, 'platform', {
308 |         value: 'linux',
309 |         writable: true,
310 |         configurable: true
311 |       });
312 | 
313 |       expect(normalizePath('/home/user/documents'))
314 |         .toBe('/home/user/documents');
315 | 
316 |       expect(normalizePath('/var/log/app'))
317 |         .toBe('/var/log/app');
318 | 
319 |       // Test on Windows (though these paths wouldn't work on Windows)
320 |       Object.defineProperty(process, 'platform', {
321 |         value: 'win32',
322 |         writable: true,
323 |         configurable: true
324 |       });
325 | 
326 |       expect(normalizePath('/home/user/documents'))
327 |         .toBe('/home/user/documents');
328 | 
329 |       expect(normalizePath('/var/log/app'))
330 |         .toBe('/var/log/app');
331 |     });
332 | 
333 |     it('reproduces exact scenario from issue #2795', () => {
334 |       // Simulate running inside WSL: wsl npx @modelcontextprotocol/server-filesystem /mnt/c/Users/username/folder
335 |       Object.defineProperty(process, 'platform', {
336 |         value: 'linux',
337 |         writable: true,
338 |         configurable: true
339 |       });
340 | 
341 |       // This is the exact path from the issue
342 |       const inputPath = '/mnt/c/Users/username/folder';
343 |       const result = normalizePath(inputPath);
344 | 
345 |       // Should NOT convert to C:\Users\username\folder
346 |       expect(result).toBe('/mnt/c/Users/username/folder');
347 |       expect(result).not.toContain('C:');
348 |       expect(result).not.toContain('\\');
349 |     });
350 | 
351 |     it('should handle relative path slash conversion based on platform', () => {
352 |       // This test verifies platform-specific behavior naturally without mocking
353 |       // On Windows: forward slashes converted to backslashes
354 |       // On Linux/Unix: forward slashes preserved
355 |       const relativePath = 'some/relative/path';
356 |       const result = normalizePath(relativePath);
357 | 
358 |       if (originalPlatform === 'win32') {
359 |         expect(result).toBe('some\\relative\\path');
360 |       } else {
361 |         expect(result).toBe('some/relative/path');
362 |       }
363 |     });
364 |   });
365 | });
366 | 
```

--------------------------------------------------------------------------------
/src/memory/__tests__/knowledge-graph.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { describe, it, expect, beforeEach, afterEach } from 'vitest';
  2 | import { promises as fs } from 'fs';
  3 | import path from 'path';
  4 | import { fileURLToPath } from 'url';
  5 | import { KnowledgeGraphManager, Entity, Relation, KnowledgeGraph } from '../index.js';
  6 | 
  7 | describe('KnowledgeGraphManager', () => {
  8 |   let manager: KnowledgeGraphManager;
  9 |   let testFilePath: string;
 10 | 
 11 |   beforeEach(async () => {
 12 |     // Create a temporary test file path
 13 |     testFilePath = path.join(
 14 |       path.dirname(fileURLToPath(import.meta.url)),
 15 |       `test-memory-${Date.now()}.jsonl`
 16 |     );
 17 |     manager = new KnowledgeGraphManager(testFilePath);
 18 |   });
 19 | 
 20 |   afterEach(async () => {
 21 |     // Clean up test file
 22 |     try {
 23 |       await fs.unlink(testFilePath);
 24 |     } catch (error) {
 25 |       // Ignore errors if file doesn't exist
 26 |     }
 27 |   });
 28 | 
 29 |   describe('createEntities', () => {
 30 |     it('should create new entities', async () => {
 31 |       const entities: Entity[] = [
 32 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
 33 |         { name: 'Bob', entityType: 'person', observations: ['likes programming'] },
 34 |       ];
 35 | 
 36 |       const newEntities = await manager.createEntities(entities);
 37 |       expect(newEntities).toHaveLength(2);
 38 |       expect(newEntities).toEqual(entities);
 39 | 
 40 |       const graph = await manager.readGraph();
 41 |       expect(graph.entities).toHaveLength(2);
 42 |     });
 43 | 
 44 |     it('should not create duplicate entities', async () => {
 45 |       const entities: Entity[] = [
 46 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
 47 |       ];
 48 | 
 49 |       await manager.createEntities(entities);
 50 |       const newEntities = await manager.createEntities(entities);
 51 | 
 52 |       expect(newEntities).toHaveLength(0);
 53 | 
 54 |       const graph = await manager.readGraph();
 55 |       expect(graph.entities).toHaveLength(1);
 56 |     });
 57 | 
 58 |     it('should handle empty entity arrays', async () => {
 59 |       const newEntities = await manager.createEntities([]);
 60 |       expect(newEntities).toHaveLength(0);
 61 |     });
 62 |   });
 63 | 
 64 |   describe('createRelations', () => {
 65 |     it('should create new relations', async () => {
 66 |       await manager.createEntities([
 67 |         { name: 'Alice', entityType: 'person', observations: [] },
 68 |         { name: 'Bob', entityType: 'person', observations: [] },
 69 |       ]);
 70 | 
 71 |       const relations: Relation[] = [
 72 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
 73 |       ];
 74 | 
 75 |       const newRelations = await manager.createRelations(relations);
 76 |       expect(newRelations).toHaveLength(1);
 77 |       expect(newRelations).toEqual(relations);
 78 | 
 79 |       const graph = await manager.readGraph();
 80 |       expect(graph.relations).toHaveLength(1);
 81 |     });
 82 | 
 83 |     it('should not create duplicate relations', async () => {
 84 |       await manager.createEntities([
 85 |         { name: 'Alice', entityType: 'person', observations: [] },
 86 |         { name: 'Bob', entityType: 'person', observations: [] },
 87 |       ]);
 88 | 
 89 |       const relations: Relation[] = [
 90 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
 91 |       ];
 92 | 
 93 |       await manager.createRelations(relations);
 94 |       const newRelations = await manager.createRelations(relations);
 95 | 
 96 |       expect(newRelations).toHaveLength(0);
 97 | 
 98 |       const graph = await manager.readGraph();
 99 |       expect(graph.relations).toHaveLength(1);
100 |     });
101 | 
102 |     it('should handle empty relation arrays', async () => {
103 |       const newRelations = await manager.createRelations([]);
104 |       expect(newRelations).toHaveLength(0);
105 |     });
106 |   });
107 | 
108 |   describe('addObservations', () => {
109 |     it('should add observations to existing entities', async () => {
110 |       await manager.createEntities([
111 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
112 |       ]);
113 | 
114 |       const results = await manager.addObservations([
115 |         { entityName: 'Alice', contents: ['likes coffee', 'has a dog'] },
116 |       ]);
117 | 
118 |       expect(results).toHaveLength(1);
119 |       expect(results[0].entityName).toBe('Alice');
120 |       expect(results[0].addedObservations).toHaveLength(2);
121 | 
122 |       const graph = await manager.readGraph();
123 |       const alice = graph.entities.find(e => e.name === 'Alice');
124 |       expect(alice?.observations).toHaveLength(3);
125 |     });
126 | 
127 |     it('should not add duplicate observations', async () => {
128 |       await manager.createEntities([
129 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
130 |       ]);
131 | 
132 |       await manager.addObservations([
133 |         { entityName: 'Alice', contents: ['likes coffee'] },
134 |       ]);
135 | 
136 |       const results = await manager.addObservations([
137 |         { entityName: 'Alice', contents: ['likes coffee', 'has a dog'] },
138 |       ]);
139 | 
140 |       expect(results[0].addedObservations).toHaveLength(1);
141 |       expect(results[0].addedObservations).toContain('has a dog');
142 | 
143 |       const graph = await manager.readGraph();
144 |       const alice = graph.entities.find(e => e.name === 'Alice');
145 |       expect(alice?.observations).toHaveLength(3);
146 |     });
147 | 
148 |     it('should throw error for non-existent entity', async () => {
149 |       await expect(
150 |         manager.addObservations([
151 |           { entityName: 'NonExistent', contents: ['some observation'] },
152 |         ])
153 |       ).rejects.toThrow('Entity with name NonExistent not found');
154 |     });
155 |   });
156 | 
157 |   describe('deleteEntities', () => {
158 |     it('should delete entities', async () => {
159 |       await manager.createEntities([
160 |         { name: 'Alice', entityType: 'person', observations: [] },
161 |         { name: 'Bob', entityType: 'person', observations: [] },
162 |       ]);
163 | 
164 |       await manager.deleteEntities(['Alice']);
165 | 
166 |       const graph = await manager.readGraph();
167 |       expect(graph.entities).toHaveLength(1);
168 |       expect(graph.entities[0].name).toBe('Bob');
169 |     });
170 | 
171 |     it('should cascade delete relations when deleting entities', async () => {
172 |       await manager.createEntities([
173 |         { name: 'Alice', entityType: 'person', observations: [] },
174 |         { name: 'Bob', entityType: 'person', observations: [] },
175 |         { name: 'Charlie', entityType: 'person', observations: [] },
176 |       ]);
177 | 
178 |       await manager.createRelations([
179 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
180 |         { from: 'Bob', to: 'Charlie', relationType: 'knows' },
181 |       ]);
182 | 
183 |       await manager.deleteEntities(['Bob']);
184 | 
185 |       const graph = await manager.readGraph();
186 |       expect(graph.entities).toHaveLength(2);
187 |       expect(graph.relations).toHaveLength(0);
188 |     });
189 | 
190 |     it('should handle deleting non-existent entities', async () => {
191 |       await manager.deleteEntities(['NonExistent']);
192 |       const graph = await manager.readGraph();
193 |       expect(graph.entities).toHaveLength(0);
194 |     });
195 |   });
196 | 
197 |   describe('deleteObservations', () => {
198 |     it('should delete observations from entities', async () => {
199 |       await manager.createEntities([
200 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp', 'likes coffee'] },
201 |       ]);
202 | 
203 |       await manager.deleteObservations([
204 |         { entityName: 'Alice', observations: ['likes coffee'] },
205 |       ]);
206 | 
207 |       const graph = await manager.readGraph();
208 |       const alice = graph.entities.find(e => e.name === 'Alice');
209 |       expect(alice?.observations).toHaveLength(1);
210 |       expect(alice?.observations).toContain('works at Acme Corp');
211 |     });
212 | 
213 |     it('should handle deleting from non-existent entities', async () => {
214 |       await manager.deleteObservations([
215 |         { entityName: 'NonExistent', observations: ['some observation'] },
216 |       ]);
217 |       // Should not throw error
218 |       const graph = await manager.readGraph();
219 |       expect(graph.entities).toHaveLength(0);
220 |     });
221 |   });
222 | 
223 |   describe('deleteRelations', () => {
224 |     it('should delete specific relations', async () => {
225 |       await manager.createEntities([
226 |         { name: 'Alice', entityType: 'person', observations: [] },
227 |         { name: 'Bob', entityType: 'person', observations: [] },
228 |       ]);
229 | 
230 |       await manager.createRelations([
231 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
232 |         { from: 'Alice', to: 'Bob', relationType: 'works_with' },
233 |       ]);
234 | 
235 |       await manager.deleteRelations([
236 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
237 |       ]);
238 | 
239 |       const graph = await manager.readGraph();
240 |       expect(graph.relations).toHaveLength(1);
241 |       expect(graph.relations[0].relationType).toBe('works_with');
242 |     });
243 |   });
244 | 
245 |   describe('readGraph', () => {
246 |     it('should return empty graph when file does not exist', async () => {
247 |       const graph = await manager.readGraph();
248 |       expect(graph.entities).toHaveLength(0);
249 |       expect(graph.relations).toHaveLength(0);
250 |     });
251 | 
252 |     it('should return complete graph with entities and relations', async () => {
253 |       await manager.createEntities([
254 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
255 |       ]);
256 | 
257 |       await manager.createRelations([
258 |         { from: 'Alice', to: 'Alice', relationType: 'self' },
259 |       ]);
260 | 
261 |       const graph = await manager.readGraph();
262 |       expect(graph.entities).toHaveLength(1);
263 |       expect(graph.relations).toHaveLength(1);
264 |     });
265 |   });
266 | 
267 |   describe('searchNodes', () => {
268 |     beforeEach(async () => {
269 |       await manager.createEntities([
270 |         { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp', 'likes programming'] },
271 |         { name: 'Bob', entityType: 'person', observations: ['works at TechCo'] },
272 |         { name: 'Acme Corp', entityType: 'company', observations: ['tech company'] },
273 |       ]);
274 | 
275 |       await manager.createRelations([
276 |         { from: 'Alice', to: 'Acme Corp', relationType: 'works_at' },
277 |         { from: 'Bob', to: 'Acme Corp', relationType: 'competitor' },
278 |       ]);
279 |     });
280 | 
281 |     it('should search by entity name', async () => {
282 |       const result = await manager.searchNodes('Alice');
283 |       expect(result.entities).toHaveLength(1);
284 |       expect(result.entities[0].name).toBe('Alice');
285 |     });
286 | 
287 |     it('should search by entity type', async () => {
288 |       const result = await manager.searchNodes('company');
289 |       expect(result.entities).toHaveLength(1);
290 |       expect(result.entities[0].name).toBe('Acme Corp');
291 |     });
292 | 
293 |     it('should search by observation content', async () => {
294 |       const result = await manager.searchNodes('programming');
295 |       expect(result.entities).toHaveLength(1);
296 |       expect(result.entities[0].name).toBe('Alice');
297 |     });
298 | 
299 |     it('should be case insensitive', async () => {
300 |       const result = await manager.searchNodes('ALICE');
301 |       expect(result.entities).toHaveLength(1);
302 |       expect(result.entities[0].name).toBe('Alice');
303 |     });
304 | 
305 |     it('should include relations between matched entities', async () => {
306 |       const result = await manager.searchNodes('Acme');
307 |       expect(result.entities).toHaveLength(2); // Alice and Acme Corp
308 |       expect(result.relations).toHaveLength(1); // Only Alice -> Acme Corp relation
309 |     });
310 | 
311 |     it('should return empty graph for no matches', async () => {
312 |       const result = await manager.searchNodes('NonExistent');
313 |       expect(result.entities).toHaveLength(0);
314 |       expect(result.relations).toHaveLength(0);
315 |     });
316 |   });
317 | 
318 |   describe('openNodes', () => {
319 |     beforeEach(async () => {
320 |       await manager.createEntities([
321 |         { name: 'Alice', entityType: 'person', observations: [] },
322 |         { name: 'Bob', entityType: 'person', observations: [] },
323 |         { name: 'Charlie', entityType: 'person', observations: [] },
324 |       ]);
325 | 
326 |       await manager.createRelations([
327 |         { from: 'Alice', to: 'Bob', relationType: 'knows' },
328 |         { from: 'Bob', to: 'Charlie', relationType: 'knows' },
329 |       ]);
330 |     });
331 | 
332 |     it('should open specific nodes by name', async () => {
333 |       const result = await manager.openNodes(['Alice', 'Bob']);
334 |       expect(result.entities).toHaveLength(2);
335 |       expect(result.entities.map(e => e.name)).toContain('Alice');
336 |       expect(result.entities.map(e => e.name)).toContain('Bob');
337 |     });
338 | 
339 |     it('should include relations between opened nodes', async () => {
340 |       const result = await manager.openNodes(['Alice', 'Bob']);
341 |       expect(result.relations).toHaveLength(1);
342 |       expect(result.relations[0].from).toBe('Alice');
343 |       expect(result.relations[0].to).toBe('Bob');
344 |     });
345 | 
346 |     it('should exclude relations to unopened nodes', async () => {
347 |       const result = await manager.openNodes(['Bob']);
348 |       expect(result.relations).toHaveLength(0);
349 |     });
350 | 
351 |     it('should handle opening non-existent nodes', async () => {
352 |       const result = await manager.openNodes(['NonExistent']);
353 |       expect(result.entities).toHaveLength(0);
354 |     });
355 | 
356 |     it('should handle empty node list', async () => {
357 |       const result = await manager.openNodes([]);
358 |       expect(result.entities).toHaveLength(0);
359 |       expect(result.relations).toHaveLength(0);
360 |     });
361 |   });
362 | 
363 |   describe('file persistence', () => {
364 |     it('should persist data across manager instances', async () => {
365 |       await manager.createEntities([
366 |         { name: 'Alice', entityType: 'person', observations: ['persistent data'] },
367 |       ]);
368 | 
369 |       // Create new manager instance with same file path
370 |       const manager2 = new KnowledgeGraphManager(testFilePath);
371 |       const graph = await manager2.readGraph();
372 | 
373 |       expect(graph.entities).toHaveLength(1);
374 |       expect(graph.entities[0].name).toBe('Alice');
375 |     });
376 | 
377 |     it('should handle JSONL format correctly', async () => {
378 |       await manager.createEntities([
379 |         { name: 'Alice', entityType: 'person', observations: [] },
380 |       ]);
381 |       await manager.createRelations([
382 |         { from: 'Alice', to: 'Alice', relationType: 'self' },
383 |       ]);
384 | 
385 |       // Read file directly
386 |       const fileContent = await fs.readFile(testFilePath, 'utf-8');
387 |       const lines = fileContent.split('\n').filter(line => line.trim());
388 | 
389 |       expect(lines).toHaveLength(2);
390 |       expect(JSON.parse(lines[0])).toHaveProperty('type', 'entity');
391 |       expect(JSON.parse(lines[1])).toHaveProperty('type', 'relation');
392 |     });
393 |   });
394 | });
395 | 
```

--------------------------------------------------------------------------------
/src/git/src/mcp_server_git/server.py:
--------------------------------------------------------------------------------

```python
  1 | import logging
  2 | from pathlib import Path
  3 | from typing import Sequence, Optional
  4 | from mcp.server import Server
  5 | from mcp.server.session import ServerSession
  6 | from mcp.server.stdio import stdio_server
  7 | from mcp.types import (
  8 |     ClientCapabilities,
  9 |     TextContent,
 10 |     Tool,
 11 |     ListRootsResult,
 12 |     RootsCapability,
 13 | )
 14 | from enum import Enum
 15 | import git
 16 | from pydantic import BaseModel, Field
 17 | 
 18 | # Default number of context lines to show in diff output
 19 | DEFAULT_CONTEXT_LINES = 3
 20 | 
 21 | class GitStatus(BaseModel):
 22 |     repo_path: str
 23 | 
 24 | class GitDiffUnstaged(BaseModel):
 25 |     repo_path: str
 26 |     context_lines: int = DEFAULT_CONTEXT_LINES
 27 | 
 28 | class GitDiffStaged(BaseModel):
 29 |     repo_path: str
 30 |     context_lines: int = DEFAULT_CONTEXT_LINES
 31 | 
 32 | class GitDiff(BaseModel):
 33 |     repo_path: str
 34 |     target: str
 35 |     context_lines: int = DEFAULT_CONTEXT_LINES
 36 | 
 37 | class GitCommit(BaseModel):
 38 |     repo_path: str
 39 |     message: str
 40 | 
 41 | class GitAdd(BaseModel):
 42 |     repo_path: str
 43 |     files: list[str]
 44 | 
 45 | class GitReset(BaseModel):
 46 |     repo_path: str
 47 | 
 48 | class GitLog(BaseModel):
 49 |     repo_path: str
 50 |     max_count: int = 10
 51 |     start_timestamp: Optional[str] = Field(
 52 |         None,
 53 |         description="Start timestamp for filtering commits. Accepts: ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')"
 54 |     )
 55 |     end_timestamp: Optional[str] = Field(
 56 |         None,
 57 |         description="End timestamp for filtering commits. Accepts: ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')"
 58 |     )
 59 | 
 60 | class GitCreateBranch(BaseModel):
 61 |     repo_path: str
 62 |     branch_name: str
 63 |     base_branch: str | None = None
 64 | 
 65 | class GitCheckout(BaseModel):
 66 |     repo_path: str
 67 |     branch_name: str
 68 | 
 69 | class GitShow(BaseModel):
 70 |     repo_path: str
 71 |     revision: str
 72 | 
 73 | 
 74 | 
 75 | class GitBranch(BaseModel):
 76 |     repo_path: str = Field(
 77 |         ...,
 78 |         description="The path to the Git repository.",
 79 |     )
 80 |     branch_type: str = Field(
 81 |         ...,
 82 |         description="Whether to list local branches ('local'), remote branches ('remote') or all branches('all').",
 83 |     )
 84 |     contains: Optional[str] = Field(
 85 |         None,
 86 |         description="The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified",
 87 |     )
 88 |     not_contains: Optional[str] = Field(
 89 |         None,
 90 |         description="The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified",
 91 |     )
 92 | 
 93 | 
 94 | class GitTools(str, Enum):
 95 |     STATUS = "git_status"
 96 |     DIFF_UNSTAGED = "git_diff_unstaged"
 97 |     DIFF_STAGED = "git_diff_staged"
 98 |     DIFF = "git_diff"
 99 |     COMMIT = "git_commit"
100 |     ADD = "git_add"
101 |     RESET = "git_reset"
102 |     LOG = "git_log"
103 |     CREATE_BRANCH = "git_create_branch"
104 |     CHECKOUT = "git_checkout"
105 |     SHOW = "git_show"
106 | 
107 |     BRANCH = "git_branch"
108 | 
109 | def git_status(repo: git.Repo) -> str:
110 |     return repo.git.status()
111 | 
112 | def git_diff_unstaged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
113 |     return repo.git.diff(f"--unified={context_lines}")
114 | 
115 | def git_diff_staged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
116 |     return repo.git.diff(f"--unified={context_lines}", "--cached")
117 | 
118 | def git_diff(repo: git.Repo, target: str, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
119 |     return repo.git.diff(f"--unified={context_lines}", target)
120 | 
121 | def git_commit(repo: git.Repo, message: str) -> str:
122 |     commit = repo.index.commit(message)
123 |     return f"Changes committed successfully with hash {commit.hexsha}"
124 | 
125 | def git_add(repo: git.Repo, files: list[str]) -> str:
126 |     if files == ["."]:
127 |         repo.git.add(".")
128 |     else:
129 |         repo.index.add(files)
130 |     return "Files staged successfully"
131 | 
132 | def git_reset(repo: git.Repo) -> str:
133 |     repo.index.reset()
134 |     return "All staged changes reset"
135 | 
136 | def git_log(repo: git.Repo, max_count: int = 10, start_timestamp: Optional[str] = None, end_timestamp: Optional[str] = None) -> list[str]:
137 |     if start_timestamp or end_timestamp:
138 |         # Use git log command with date filtering
139 |         args = []
140 |         if start_timestamp:
141 |             args.extend(['--since', start_timestamp])
142 |         if end_timestamp:
143 |             args.extend(['--until', end_timestamp])
144 |         args.extend(['--format=%H%n%an%n%ad%n%s%n'])
145 | 
146 |         log_output = repo.git.log(*args).split('\n')
147 | 
148 |         log = []
149 |         # Process commits in groups of 4 (hash, author, date, message)
150 |         for i in range(0, len(log_output), 4):
151 |             if i + 3 < len(log_output) and len(log) < max_count:
152 |                 log.append(
153 |                     f"Commit: {log_output[i]}\n"
154 |                     f"Author: {log_output[i+1]}\n"
155 |                     f"Date: {log_output[i+2]}\n"
156 |                     f"Message: {log_output[i+3]}\n"
157 |                 )
158 |         return log
159 |     else:
160 |         # Use existing logic for simple log without date filtering
161 |         commits = list(repo.iter_commits(max_count=max_count))
162 |         log = []
163 |         for commit in commits:
164 |             log.append(
165 |                 f"Commit: {commit.hexsha!r}\n"
166 |                 f"Author: {commit.author!r}\n"
167 |                 f"Date: {commit.authored_datetime}\n"
168 |                 f"Message: {commit.message!r}\n"
169 |             )
170 |         return log
171 | 
172 | def git_create_branch(repo: git.Repo, branch_name: str, base_branch: str | None = None) -> str:
173 |     if base_branch:
174 |         base = repo.references[base_branch]
175 |     else:
176 |         base = repo.active_branch
177 | 
178 |     repo.create_head(branch_name, base)
179 |     return f"Created branch '{branch_name}' from '{base.name}'"
180 | 
181 | def git_checkout(repo: git.Repo, branch_name: str) -> str:
182 |     repo.git.checkout(branch_name)
183 |     return f"Switched to branch '{branch_name}'"
184 | 
185 | 
186 | 
187 | def git_show(repo: git.Repo, revision: str) -> str:
188 |     commit = repo.commit(revision)
189 |     output = [
190 |         f"Commit: {commit.hexsha!r}\n"
191 |         f"Author: {commit.author!r}\n"
192 |         f"Date: {commit.authored_datetime!r}\n"
193 |         f"Message: {commit.message!r}\n"
194 |     ]
195 |     if commit.parents:
196 |         parent = commit.parents[0]
197 |         diff = parent.diff(commit, create_patch=True)
198 |     else:
199 |         diff = commit.diff(git.NULL_TREE, create_patch=True)
200 |     for d in diff:
201 |         output.append(f"\n--- {d.a_path}\n+++ {d.b_path}\n")
202 |         if d.diff is None:
203 |             continue
204 |         if isinstance(d.diff, bytes):
205 |             output.append(d.diff.decode('utf-8'))
206 |         else:
207 |             output.append(d.diff)
208 |     return "".join(output)
209 | 
210 | def git_branch(repo: git.Repo, branch_type: str, contains: str | None = None, not_contains: str | None = None) -> str:
211 |     match contains:
212 |         case None:
213 |             contains_sha = (None,)
214 |         case _:
215 |             contains_sha = ("--contains", contains)
216 | 
217 |     match not_contains:
218 |         case None:
219 |             not_contains_sha = (None,)
220 |         case _:
221 |             not_contains_sha = ("--no-contains", not_contains)
222 | 
223 |     match branch_type:
224 |         case 'local':
225 |             b_type = None
226 |         case 'remote':
227 |             b_type = "-r"
228 |         case 'all':
229 |             b_type = "-a"
230 |         case _:
231 |             return f"Invalid branch type: {branch_type}"
232 | 
233 |     # None value will be auto deleted by GitPython
234 |     branch_info = repo.git.branch(b_type, *contains_sha, *not_contains_sha)
235 | 
236 |     return branch_info
237 | 
238 | 
239 | async def serve(repository: Path | None) -> None:
240 |     logger = logging.getLogger(__name__)
241 | 
242 |     if repository is not None:
243 |         try:
244 |             git.Repo(repository)
245 |             logger.info(f"Using repository at {repository}")
246 |         except git.InvalidGitRepositoryError:
247 |             logger.error(f"{repository} is not a valid Git repository")
248 |             return
249 | 
250 |     server = Server("mcp-git")
251 | 
252 |     @server.list_tools()
253 |     async def list_tools() -> list[Tool]:
254 |         return [
255 |             Tool(
256 |                 name=GitTools.STATUS,
257 |                 description="Shows the working tree status",
258 |                 inputSchema=GitStatus.model_json_schema(),
259 |             ),
260 |             Tool(
261 |                 name=GitTools.DIFF_UNSTAGED,
262 |                 description="Shows changes in the working directory that are not yet staged",
263 |                 inputSchema=GitDiffUnstaged.model_json_schema(),
264 |             ),
265 |             Tool(
266 |                 name=GitTools.DIFF_STAGED,
267 |                 description="Shows changes that are staged for commit",
268 |                 inputSchema=GitDiffStaged.model_json_schema(),
269 |             ),
270 |             Tool(
271 |                 name=GitTools.DIFF,
272 |                 description="Shows differences between branches or commits",
273 |                 inputSchema=GitDiff.model_json_schema(),
274 |             ),
275 |             Tool(
276 |                 name=GitTools.COMMIT,
277 |                 description="Records changes to the repository",
278 |                 inputSchema=GitCommit.model_json_schema(),
279 |             ),
280 |             Tool(
281 |                 name=GitTools.ADD,
282 |                 description="Adds file contents to the staging area",
283 |                 inputSchema=GitAdd.model_json_schema(),
284 |             ),
285 |             Tool(
286 |                 name=GitTools.RESET,
287 |                 description="Unstages all staged changes",
288 |                 inputSchema=GitReset.model_json_schema(),
289 |             ),
290 |             Tool(
291 |                 name=GitTools.LOG,
292 |                 description="Shows the commit logs",
293 |                 inputSchema=GitLog.model_json_schema(),
294 |             ),
295 |             Tool(
296 |                 name=GitTools.CREATE_BRANCH,
297 |                 description="Creates a new branch from an optional base branch",
298 |                 inputSchema=GitCreateBranch.model_json_schema(),
299 |             ),
300 |             Tool(
301 |                 name=GitTools.CHECKOUT,
302 |                 description="Switches branches",
303 |                 inputSchema=GitCheckout.model_json_schema(),
304 |             ),
305 |             Tool(
306 |                 name=GitTools.SHOW,
307 |                 description="Shows the contents of a commit",
308 |                 inputSchema=GitShow.model_json_schema(),
309 |             ),
310 | 
311 |             Tool(
312 |                 name=GitTools.BRANCH,
313 |                 description="List Git branches",
314 |                 inputSchema=GitBranch.model_json_schema(),
315 | 
316 |             )
317 |         ]
318 | 
319 |     async def list_repos() -> Sequence[str]:
320 |         async def by_roots() -> Sequence[str]:
321 |             if not isinstance(server.request_context.session, ServerSession):
322 |                 raise TypeError("server.request_context.session must be a ServerSession")
323 | 
324 |             if not server.request_context.session.check_client_capability(
325 |                 ClientCapabilities(roots=RootsCapability())
326 |             ):
327 |                 return []
328 | 
329 |             roots_result: ListRootsResult = await server.request_context.session.list_roots()
330 |             logger.debug(f"Roots result: {roots_result}")
331 |             repo_paths = []
332 |             for root in roots_result.roots:
333 |                 path = root.uri.path
334 |                 try:
335 |                     git.Repo(path)
336 |                     repo_paths.append(str(path))
337 |                 except git.InvalidGitRepositoryError:
338 |                     pass
339 |             return repo_paths
340 | 
341 |         def by_commandline() -> Sequence[str]:
342 |             return [str(repository)] if repository is not None else []
343 | 
344 |         cmd_repos = by_commandline()
345 |         root_repos = await by_roots()
346 |         return [*root_repos, *cmd_repos]
347 | 
348 |     @server.call_tool()
349 |     async def call_tool(name: str, arguments: dict) -> list[TextContent]:
350 |         repo_path = Path(arguments["repo_path"])
351 | 
352 |         # For all commands, we need an existing repo
353 |         repo = git.Repo(repo_path)
354 | 
355 |         match name:
356 |             case GitTools.STATUS:
357 |                 status = git_status(repo)
358 |                 return [TextContent(
359 |                     type="text",
360 |                     text=f"Repository status:\n{status}"
361 |                 )]
362 | 
363 |             case GitTools.DIFF_UNSTAGED:
364 |                 diff = git_diff_unstaged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
365 |                 return [TextContent(
366 |                     type="text",
367 |                     text=f"Unstaged changes:\n{diff}"
368 |                 )]
369 | 
370 |             case GitTools.DIFF_STAGED:
371 |                 diff = git_diff_staged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
372 |                 return [TextContent(
373 |                     type="text",
374 |                     text=f"Staged changes:\n{diff}"
375 |                 )]
376 | 
377 |             case GitTools.DIFF:
378 |                 diff = git_diff(repo, arguments["target"], arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
379 |                 return [TextContent(
380 |                     type="text",
381 |                     text=f"Diff with {arguments['target']}:\n{diff}"
382 |                 )]
383 | 
384 |             case GitTools.COMMIT:
385 |                 result = git_commit(repo, arguments["message"])
386 |                 return [TextContent(
387 |                     type="text",
388 |                     text=result
389 |                 )]
390 | 
391 |             case GitTools.ADD:
392 |                 result = git_add(repo, arguments["files"])
393 |                 return [TextContent(
394 |                     type="text",
395 |                     text=result
396 |                 )]
397 | 
398 |             case GitTools.RESET:
399 |                 result = git_reset(repo)
400 |                 return [TextContent(
401 |                     type="text",
402 |                     text=result
403 |                 )]
404 | 
405 |             # Update the LOG case:
406 |             case GitTools.LOG:
407 |                 log = git_log(
408 |                     repo,
409 |                     arguments.get("max_count", 10),
410 |                     arguments.get("start_timestamp"),
411 |                     arguments.get("end_timestamp")
412 |                 )
413 |                 return [TextContent(
414 |                     type="text",
415 |                     text="Commit history:\n" + "\n".join(log)
416 |                 )]
417 | 
418 |             case GitTools.CREATE_BRANCH:
419 |                 result = git_create_branch(
420 |                     repo,
421 |                     arguments["branch_name"],
422 |                     arguments.get("base_branch")
423 |                 )
424 |                 return [TextContent(
425 |                     type="text",
426 |                     text=result
427 |                 )]
428 | 
429 |             case GitTools.CHECKOUT:
430 |                 result = git_checkout(repo, arguments["branch_name"])
431 |                 return [TextContent(
432 |                     type="text",
433 |                     text=result
434 |                 )]
435 | 
436 |             case GitTools.SHOW:
437 |                 result = git_show(repo, arguments["revision"])
438 |                 return [TextContent(
439 |                     type="text",
440 |                     text=result
441 |                 )]
442 | 
443 |             case GitTools.BRANCH:
444 |                 result = git_branch(
445 |                     repo,
446 |                     arguments.get("branch_type", 'local'),
447 |                     arguments.get("contains", None),
448 |                     arguments.get("not_contains", None),
449 |                 )
450 |                 return [TextContent(
451 |                     type="text",
452 |                     text=result
453 |                 )]
454 | 
455 |             case _:
456 |                 raise ValueError(f"Unknown tool: {name}")
457 | 
458 |     options = server.create_initialization_options()
459 |     async with stdio_server() as (read_stream, write_stream):
460 |         await server.run(read_stream, write_stream, options, raise_exceptions=True)
461 | 
```

--------------------------------------------------------------------------------
/src/memory/index.ts:
--------------------------------------------------------------------------------

```typescript
  1 | #!/usr/bin/env node
  2 | 
  3 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
  4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
  5 | import { z } from "zod";
  6 | import { promises as fs } from 'fs';
  7 | import path from 'path';
  8 | import { fileURLToPath } from 'url';
  9 | 
 10 | // Define memory file path using environment variable with fallback
 11 | export const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.jsonl');
 12 | 
 13 | // Handle backward compatibility: migrate memory.json to memory.jsonl if needed
 14 | export async function ensureMemoryFilePath(): Promise<string> {
 15 |   if (process.env.MEMORY_FILE_PATH) {
 16 |     // Custom path provided, use it as-is (with absolute path resolution)
 17 |     return path.isAbsolute(process.env.MEMORY_FILE_PATH)
 18 |       ? process.env.MEMORY_FILE_PATH
 19 |       : path.join(path.dirname(fileURLToPath(import.meta.url)), process.env.MEMORY_FILE_PATH);
 20 |   }
 21 |   
 22 |   // No custom path set, check for backward compatibility migration
 23 |   const oldMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.json');
 24 |   const newMemoryPath = defaultMemoryPath;
 25 |   
 26 |   try {
 27 |     // Check if old file exists and new file doesn't
 28 |     await fs.access(oldMemoryPath);
 29 |     try {
 30 |       await fs.access(newMemoryPath);
 31 |       // Both files exist, use new one (no migration needed)
 32 |       return newMemoryPath;
 33 |     } catch {
 34 |       // Old file exists, new file doesn't - migrate
 35 |       console.error('DETECTED: Found legacy memory.json file, migrating to memory.jsonl for JSONL format compatibility');
 36 |       await fs.rename(oldMemoryPath, newMemoryPath);
 37 |       console.error('COMPLETED: Successfully migrated memory.json to memory.jsonl');
 38 |       return newMemoryPath;
 39 |     }
 40 |   } catch {
 41 |     // Old file doesn't exist, use new path
 42 |     return newMemoryPath;
 43 |   }
 44 | }
 45 | 
 46 | // Initialize memory file path (will be set during startup)
 47 | let MEMORY_FILE_PATH: string;
 48 | 
 49 | // We are storing our memory using entities, relations, and observations in a graph structure
 50 | export interface Entity {
 51 |   name: string;
 52 |   entityType: string;
 53 |   observations: string[];
 54 | }
 55 | 
 56 | export interface Relation {
 57 |   from: string;
 58 |   to: string;
 59 |   relationType: string;
 60 | }
 61 | 
 62 | export interface KnowledgeGraph {
 63 |   entities: Entity[];
 64 |   relations: Relation[];
 65 | }
 66 | 
 67 | // The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
 68 | export class KnowledgeGraphManager {
 69 |   constructor(private memoryFilePath: string) {}
 70 | 
 71 |   private async loadGraph(): Promise<KnowledgeGraph> {
 72 |     try {
 73 |       const data = await fs.readFile(this.memoryFilePath, "utf-8");
 74 |       const lines = data.split("\n").filter(line => line.trim() !== "");
 75 |       return lines.reduce((graph: KnowledgeGraph, line) => {
 76 |         const item = JSON.parse(line);
 77 |         if (item.type === "entity") graph.entities.push(item as Entity);
 78 |         if (item.type === "relation") graph.relations.push(item as Relation);
 79 |         return graph;
 80 |       }, { entities: [], relations: [] });
 81 |     } catch (error) {
 82 |       if (error instanceof Error && 'code' in error && (error as any).code === "ENOENT") {
 83 |         return { entities: [], relations: [] };
 84 |       }
 85 |       throw error;
 86 |     }
 87 |   }
 88 | 
 89 |   private async saveGraph(graph: KnowledgeGraph): Promise<void> {
 90 |     const lines = [
 91 |       ...graph.entities.map(e => JSON.stringify({
 92 |         type: "entity",
 93 |         name: e.name,
 94 |         entityType: e.entityType,
 95 |         observations: e.observations
 96 |       })),
 97 |       ...graph.relations.map(r => JSON.stringify({
 98 |         type: "relation",
 99 |         from: r.from,
100 |         to: r.to,
101 |         relationType: r.relationType
102 |       })),
103 |     ];
104 |     await fs.writeFile(this.memoryFilePath, lines.join("\n"));
105 |   }
106 | 
107 |   async createEntities(entities: Entity[]): Promise<Entity[]> {
108 |     const graph = await this.loadGraph();
109 |     const newEntities = entities.filter(e => !graph.entities.some(existingEntity => existingEntity.name === e.name));
110 |     graph.entities.push(...newEntities);
111 |     await this.saveGraph(graph);
112 |     return newEntities;
113 |   }
114 | 
115 |   async createRelations(relations: Relation[]): Promise<Relation[]> {
116 |     const graph = await this.loadGraph();
117 |     const newRelations = relations.filter(r => !graph.relations.some(existingRelation => 
118 |       existingRelation.from === r.from && 
119 |       existingRelation.to === r.to && 
120 |       existingRelation.relationType === r.relationType
121 |     ));
122 |     graph.relations.push(...newRelations);
123 |     await this.saveGraph(graph);
124 |     return newRelations;
125 |   }
126 | 
127 |   async addObservations(observations: { entityName: string; contents: string[] }[]): Promise<{ entityName: string; addedObservations: string[] }[]> {
128 |     const graph = await this.loadGraph();
129 |     const results = observations.map(o => {
130 |       const entity = graph.entities.find(e => e.name === o.entityName);
131 |       if (!entity) {
132 |         throw new Error(`Entity with name ${o.entityName} not found`);
133 |       }
134 |       const newObservations = o.contents.filter(content => !entity.observations.includes(content));
135 |       entity.observations.push(...newObservations);
136 |       return { entityName: o.entityName, addedObservations: newObservations };
137 |     });
138 |     await this.saveGraph(graph);
139 |     return results;
140 |   }
141 | 
142 |   async deleteEntities(entityNames: string[]): Promise<void> {
143 |     const graph = await this.loadGraph();
144 |     graph.entities = graph.entities.filter(e => !entityNames.includes(e.name));
145 |     graph.relations = graph.relations.filter(r => !entityNames.includes(r.from) && !entityNames.includes(r.to));
146 |     await this.saveGraph(graph);
147 |   }
148 | 
149 |   async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise<void> {
150 |     const graph = await this.loadGraph();
151 |     deletions.forEach(d => {
152 |       const entity = graph.entities.find(e => e.name === d.entityName);
153 |       if (entity) {
154 |         entity.observations = entity.observations.filter(o => !d.observations.includes(o));
155 |       }
156 |     });
157 |     await this.saveGraph(graph);
158 |   }
159 | 
160 |   async deleteRelations(relations: Relation[]): Promise<void> {
161 |     const graph = await this.loadGraph();
162 |     graph.relations = graph.relations.filter(r => !relations.some(delRelation => 
163 |       r.from === delRelation.from && 
164 |       r.to === delRelation.to && 
165 |       r.relationType === delRelation.relationType
166 |     ));
167 |     await this.saveGraph(graph);
168 |   }
169 | 
170 |   async readGraph(): Promise<KnowledgeGraph> {
171 |     return this.loadGraph();
172 |   }
173 | 
174 |   // Very basic search function
175 |   async searchNodes(query: string): Promise<KnowledgeGraph> {
176 |     const graph = await this.loadGraph();
177 |     
178 |     // Filter entities
179 |     const filteredEntities = graph.entities.filter(e => 
180 |       e.name.toLowerCase().includes(query.toLowerCase()) ||
181 |       e.entityType.toLowerCase().includes(query.toLowerCase()) ||
182 |       e.observations.some(o => o.toLowerCase().includes(query.toLowerCase()))
183 |     );
184 |   
185 |     // Create a Set of filtered entity names for quick lookup
186 |     const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
187 |   
188 |     // Filter relations to only include those between filtered entities
189 |     const filteredRelations = graph.relations.filter(r => 
190 |       filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
191 |     );
192 |   
193 |     const filteredGraph: KnowledgeGraph = {
194 |       entities: filteredEntities,
195 |       relations: filteredRelations,
196 |     };
197 |   
198 |     return filteredGraph;
199 |   }
200 | 
201 |   async openNodes(names: string[]): Promise<KnowledgeGraph> {
202 |     const graph = await this.loadGraph();
203 |     
204 |     // Filter entities
205 |     const filteredEntities = graph.entities.filter(e => names.includes(e.name));
206 |   
207 |     // Create a Set of filtered entity names for quick lookup
208 |     const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
209 |   
210 |     // Filter relations to only include those between filtered entities
211 |     const filteredRelations = graph.relations.filter(r => 
212 |       filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
213 |     );
214 |   
215 |     const filteredGraph: KnowledgeGraph = {
216 |       entities: filteredEntities,
217 |       relations: filteredRelations,
218 |     };
219 |   
220 |     return filteredGraph;
221 |   }
222 | }
223 | 
224 | let knowledgeGraphManager: KnowledgeGraphManager;
225 | 
226 | // Zod schemas for entities and relations
227 | const EntitySchema = z.object({
228 |   name: z.string().describe("The name of the entity"),
229 |   entityType: z.string().describe("The type of the entity"),
230 |   observations: z.array(z.string()).describe("An array of observation contents associated with the entity")
231 | });
232 | 
233 | const RelationSchema = z.object({
234 |   from: z.string().describe("The name of the entity where the relation starts"),
235 |   to: z.string().describe("The name of the entity where the relation ends"),
236 |   relationType: z.string().describe("The type of the relation")
237 | });
238 | 
239 | // The server instance and tools exposed to Claude
240 | const server = new McpServer({
241 |   name: "memory-server",
242 |   version: "0.6.3",
243 | });
244 | 
245 | // Register create_entities tool
246 | server.registerTool(
247 |   "create_entities",
248 |   {
249 |     title: "Create Entities",
250 |     description: "Create multiple new entities in the knowledge graph",
251 |     inputSchema: {
252 |       entities: z.array(EntitySchema)
253 |     },
254 |     outputSchema: {
255 |       entities: z.array(EntitySchema)
256 |     }
257 |   },
258 |   async ({ entities }) => {
259 |     const result = await knowledgeGraphManager.createEntities(entities);
260 |     return {
261 |       content: [{ type: "text" as const, text: JSON.stringify(result, null, 2) }],
262 |       structuredContent: { entities: result }
263 |     };
264 |   }
265 | );
266 | 
267 | // Register create_relations tool
268 | server.registerTool(
269 |   "create_relations",
270 |   {
271 |     title: "Create Relations",
272 |     description: "Create multiple new relations between entities in the knowledge graph. Relations should be in active voice",
273 |     inputSchema: {
274 |       relations: z.array(RelationSchema)
275 |     },
276 |     outputSchema: {
277 |       relations: z.array(RelationSchema)
278 |     }
279 |   },
280 |   async ({ relations }) => {
281 |     const result = await knowledgeGraphManager.createRelations(relations);
282 |     return {
283 |       content: [{ type: "text" as const, text: JSON.stringify(result, null, 2) }],
284 |       structuredContent: { relations: result }
285 |     };
286 |   }
287 | );
288 | 
289 | // Register add_observations tool
290 | server.registerTool(
291 |   "add_observations",
292 |   {
293 |     title: "Add Observations",
294 |     description: "Add new observations to existing entities in the knowledge graph",
295 |     inputSchema: {
296 |       observations: z.array(z.object({
297 |         entityName: z.string().describe("The name of the entity to add the observations to"),
298 |         contents: z.array(z.string()).describe("An array of observation contents to add")
299 |       }))
300 |     },
301 |     outputSchema: {
302 |       results: z.array(z.object({
303 |         entityName: z.string(),
304 |         addedObservations: z.array(z.string())
305 |       }))
306 |     }
307 |   },
308 |   async ({ observations }) => {
309 |     const result = await knowledgeGraphManager.addObservations(observations);
310 |     return {
311 |       content: [{ type: "text" as const, text: JSON.stringify(result, null, 2) }],
312 |       structuredContent: { results: result }
313 |     };
314 |   }
315 | );
316 | 
317 | // Register delete_entities tool
318 | server.registerTool(
319 |   "delete_entities",
320 |   {
321 |     title: "Delete Entities",
322 |     description: "Delete multiple entities and their associated relations from the knowledge graph",
323 |     inputSchema: {
324 |       entityNames: z.array(z.string()).describe("An array of entity names to delete")
325 |     },
326 |     outputSchema: {
327 |       success: z.boolean(),
328 |       message: z.string()
329 |     }
330 |   },
331 |   async ({ entityNames }) => {
332 |     await knowledgeGraphManager.deleteEntities(entityNames);
333 |     return {
334 |       content: [{ type: "text" as const, text: "Entities deleted successfully" }],
335 |       structuredContent: { success: true, message: "Entities deleted successfully" }
336 |     };
337 |   }
338 | );
339 | 
340 | // Register delete_observations tool
341 | server.registerTool(
342 |   "delete_observations",
343 |   {
344 |     title: "Delete Observations",
345 |     description: "Delete specific observations from entities in the knowledge graph",
346 |     inputSchema: {
347 |       deletions: z.array(z.object({
348 |         entityName: z.string().describe("The name of the entity containing the observations"),
349 |         observations: z.array(z.string()).describe("An array of observations to delete")
350 |       }))
351 |     },
352 |     outputSchema: {
353 |       success: z.boolean(),
354 |       message: z.string()
355 |     }
356 |   },
357 |   async ({ deletions }) => {
358 |     await knowledgeGraphManager.deleteObservations(deletions);
359 |     return {
360 |       content: [{ type: "text" as const, text: "Observations deleted successfully" }],
361 |       structuredContent: { success: true, message: "Observations deleted successfully" }
362 |     };
363 |   }
364 | );
365 | 
366 | // Register delete_relations tool
367 | server.registerTool(
368 |   "delete_relations",
369 |   {
370 |     title: "Delete Relations",
371 |     description: "Delete multiple relations from the knowledge graph",
372 |     inputSchema: {
373 |       relations: z.array(RelationSchema).describe("An array of relations to delete")
374 |     },
375 |     outputSchema: {
376 |       success: z.boolean(),
377 |       message: z.string()
378 |     }
379 |   },
380 |   async ({ relations }) => {
381 |     await knowledgeGraphManager.deleteRelations(relations);
382 |     return {
383 |       content: [{ type: "text" as const, text: "Relations deleted successfully" }],
384 |       structuredContent: { success: true, message: "Relations deleted successfully" }
385 |     };
386 |   }
387 | );
388 | 
389 | // Register read_graph tool
390 | server.registerTool(
391 |   "read_graph",
392 |   {
393 |     title: "Read Graph",
394 |     description: "Read the entire knowledge graph",
395 |     inputSchema: {},
396 |     outputSchema: {
397 |       entities: z.array(EntitySchema),
398 |       relations: z.array(RelationSchema)
399 |     }
400 |   },
401 |   async () => {
402 |     const graph = await knowledgeGraphManager.readGraph();
403 |     return {
404 |       content: [{ type: "text" as const, text: JSON.stringify(graph, null, 2) }],
405 |       structuredContent: { ...graph }
406 |     };
407 |   }
408 | );
409 | 
410 | // Register search_nodes tool
411 | server.registerTool(
412 |   "search_nodes",
413 |   {
414 |     title: "Search Nodes",
415 |     description: "Search for nodes in the knowledge graph based on a query",
416 |     inputSchema: {
417 |       query: z.string().describe("The search query to match against entity names, types, and observation content")
418 |     },
419 |     outputSchema: {
420 |       entities: z.array(EntitySchema),
421 |       relations: z.array(RelationSchema)
422 |     }
423 |   },
424 |   async ({ query }) => {
425 |     const graph = await knowledgeGraphManager.searchNodes(query);
426 |     return {
427 |       content: [{ type: "text" as const, text: JSON.stringify(graph, null, 2) }],
428 |       structuredContent: { ...graph }
429 |     };
430 |   }
431 | );
432 | 
433 | // Register open_nodes tool
434 | server.registerTool(
435 |   "open_nodes",
436 |   {
437 |     title: "Open Nodes",
438 |     description: "Open specific nodes in the knowledge graph by their names",
439 |     inputSchema: {
440 |       names: z.array(z.string()).describe("An array of entity names to retrieve")
441 |     },
442 |     outputSchema: {
443 |       entities: z.array(EntitySchema),
444 |       relations: z.array(RelationSchema)
445 |     }
446 |   },
447 |   async ({ names }) => {
448 |     const graph = await knowledgeGraphManager.openNodes(names);
449 |     return {
450 |       content: [{ type: "text" as const, text: JSON.stringify(graph, null, 2) }],
451 |       structuredContent: { ...graph }
452 |     };
453 |   }
454 | );
455 | 
456 | async function main() {
457 |   // Initialize memory file path with backward compatibility
458 |   MEMORY_FILE_PATH = await ensureMemoryFilePath();
459 | 
460 |   // Initialize knowledge graph manager with the memory file path
461 |   knowledgeGraphManager = new KnowledgeGraphManager(MEMORY_FILE_PATH);
462 | 
463 |   const transport = new StdioServerTransport();
464 |   await server.connect(transport);
465 |   console.error("Knowledge Graph MCP Server running on stdio");
466 | }
467 | 
468 | main().catch((error) => {
469 |   console.error("Fatal error in main():", error);
470 |   process.exit(1);
471 | });
472 | 
```

--------------------------------------------------------------------------------
/src/time/test/time_server_test.py:
--------------------------------------------------------------------------------

```python
  1 | 
  2 | from freezegun import freeze_time
  3 | from mcp.shared.exceptions import McpError
  4 | import pytest
  5 | from unittest.mock import patch
  6 | from zoneinfo import ZoneInfo
  7 | 
  8 | from mcp_server_time.server import TimeServer, get_local_tz
  9 | 
 10 | 
 11 | @pytest.mark.parametrize(
 12 |     "test_time,timezone,expected",
 13 |     [
 14 |         # UTC+1 non-DST
 15 |         (
 16 |             "2024-01-01 12:00:00+00:00",
 17 |             "Europe/Warsaw",
 18 |             {
 19 |                 "timezone": "Europe/Warsaw",
 20 |                 "datetime": "2024-01-01T13:00:00+01:00",
 21 |                 "is_dst": False,
 22 |             },
 23 |         ),
 24 |         # UTC non-DST
 25 |         (
 26 |             "2024-01-01 12:00:00+00:00",
 27 |             "Europe/London",
 28 |             {
 29 |                 "timezone": "Europe/London",
 30 |                 "datetime": "2024-01-01T12:00:00+00:00",
 31 |                 "is_dst": False,
 32 |             },
 33 |         ),
 34 |         # UTC-5 non-DST
 35 |         (
 36 |             "2024-01-01 12:00:00-00:00",
 37 |             "America/New_York",
 38 |             {
 39 |                 "timezone": "America/New_York",
 40 |                 "datetime": "2024-01-01T07:00:00-05:00",
 41 |                 "is_dst": False,
 42 |             },
 43 |         ),
 44 |         # UTC+1 DST
 45 |         (
 46 |             "2024-03-31 12:00:00+00:00",
 47 |             "Europe/Warsaw",
 48 |             {
 49 |                 "timezone": "Europe/Warsaw",
 50 |                 "datetime": "2024-03-31T14:00:00+02:00",
 51 |                 "is_dst": True,
 52 |             },
 53 |         ),
 54 |         # UTC DST
 55 |         (
 56 |             "2024-03-31 12:00:00+00:00",
 57 |             "Europe/London",
 58 |             {
 59 |                 "timezone": "Europe/London",
 60 |                 "datetime": "2024-03-31T13:00:00+01:00",
 61 |                 "is_dst": True,
 62 |             },
 63 |         ),
 64 |         # UTC-5 DST
 65 |         (
 66 |             "2024-03-31 12:00:00-00:00",
 67 |             "America/New_York",
 68 |             {
 69 |                 "timezone": "America/New_York",
 70 |                 "datetime": "2024-03-31T08:00:00-04:00",
 71 |                 "is_dst": True,
 72 |             },
 73 |         ),
 74 |     ],
 75 | )
 76 | def test_get_current_time(test_time, timezone, expected):
 77 |     with freeze_time(test_time):
 78 |         time_server = TimeServer()
 79 |         result = time_server.get_current_time(timezone)
 80 |         assert result.timezone == expected["timezone"]
 81 |         assert result.datetime == expected["datetime"]
 82 |         assert result.is_dst == expected["is_dst"]
 83 | 
 84 | 
 85 | def test_get_current_time_with_invalid_timezone():
 86 |     time_server = TimeServer()
 87 |     with pytest.raises(
 88 |         McpError,
 89 |         match=r"Invalid timezone: 'No time zone found with key Invalid/Timezone'",
 90 |     ):
 91 |         time_server.get_current_time("Invalid/Timezone")
 92 | 
 93 | 
 94 | @pytest.mark.parametrize(
 95 |     "source_tz,time_str,target_tz,expected_error",
 96 |     [
 97 |         (
 98 |             "invalid_tz",
 99 |             "12:00",
100 |             "Europe/London",
101 |             "Invalid timezone: 'No time zone found with key invalid_tz'",
102 |         ),
103 |         (
104 |             "Europe/Warsaw",
105 |             "12:00",
106 |             "invalid_tz",
107 |             "Invalid timezone: 'No time zone found with key invalid_tz'",
108 |         ),
109 |         (
110 |             "Europe/Warsaw",
111 |             "25:00",
112 |             "Europe/London",
113 |             "Invalid time format. Expected HH:MM [24-hour format]",
114 |         ),
115 |     ],
116 | )
117 | def test_convert_time_errors(source_tz, time_str, target_tz, expected_error):
118 |     time_server = TimeServer()
119 |     with pytest.raises((McpError, ValueError), match=expected_error):
120 |         time_server.convert_time(source_tz, time_str, target_tz)
121 | 
122 | 
123 | @pytest.mark.parametrize(
124 |     "test_time,source_tz,time_str,target_tz,expected",
125 |     [
126 |         # Basic case: Standard time conversion between Warsaw and London (1 hour difference)
127 |         # Warsaw is UTC+1, London is UTC+0
128 |         (
129 |             "2024-01-01 00:00:00+00:00",
130 |             "Europe/Warsaw",
131 |             "12:00",
132 |             "Europe/London",
133 |             {
134 |                 "source": {
135 |                     "timezone": "Europe/Warsaw",
136 |                     "datetime": "2024-01-01T12:00:00+01:00",
137 |                     "is_dst": False,
138 |                 },
139 |                 "target": {
140 |                     "timezone": "Europe/London",
141 |                     "datetime": "2024-01-01T11:00:00+00:00",
142 |                     "is_dst": False,
143 |                 },
144 |                 "time_difference": "-1.0h",
145 |             },
146 |         ),
147 |         # Reverse case of above: London to Warsaw conversion
148 |         # Shows how time difference is positive when going east
149 |         (
150 |             "2024-01-01 00:00:00+00:00",
151 |             "Europe/London",
152 |             "12:00",
153 |             "Europe/Warsaw",
154 |             {
155 |                 "source": {
156 |                     "timezone": "Europe/London",
157 |                     "datetime": "2024-01-01T12:00:00+00:00",
158 |                     "is_dst": False,
159 |                 },
160 |                 "target": {
161 |                     "timezone": "Europe/Warsaw",
162 |                     "datetime": "2024-01-01T13:00:00+01:00",
163 |                     "is_dst": False,
164 |                 },
165 |                 "time_difference": "+1.0h",
166 |             },
167 |         ),
168 |         # Edge case: Different DST periods between Europe and USA
169 |         # Europe ends DST on Oct 27, while USA waits until Nov 3
170 |         # This creates a one-week period where Europe is in standard time but USA still observes DST
171 |         (
172 |             "2024-10-28 00:00:00+00:00",
173 |             "Europe/Warsaw",
174 |             "12:00",
175 |             "America/New_York",
176 |             {
177 |                 "source": {
178 |                     "timezone": "Europe/Warsaw",
179 |                     "datetime": "2024-10-28T12:00:00+01:00",
180 |                     "is_dst": False,
181 |                 },
182 |                 "target": {
183 |                     "timezone": "America/New_York",
184 |                     "datetime": "2024-10-28T07:00:00-04:00",
185 |                     "is_dst": True,
186 |                 },
187 |                 "time_difference": "-5.0h",
188 |             },
189 |         ),
190 |         # Follow-up to previous case: After both regions end DST
191 |         # Shows how time difference increases by 1 hour when USA also ends DST
192 |         (
193 |             "2024-11-04 00:00:00+00:00",
194 |             "Europe/Warsaw",
195 |             "12:00",
196 |             "America/New_York",
197 |             {
198 |                 "source": {
199 |                     "timezone": "Europe/Warsaw",
200 |                     "datetime": "2024-11-04T12:00:00+01:00",
201 |                     "is_dst": False,
202 |                 },
203 |                 "target": {
204 |                     "timezone": "America/New_York",
205 |                     "datetime": "2024-11-04T06:00:00-05:00",
206 |                     "is_dst": False,
207 |                 },
208 |                 "time_difference": "-6.0h",
209 |             },
210 |         ),
211 |         # Edge case: Nepal's unusual UTC+5:45 offset
212 |         # One of the few time zones using 45-minute offset
213 |         (
214 |             "2024-01-01 00:00:00+00:00",
215 |             "Europe/Warsaw",
216 |             "12:00",
217 |             "Asia/Kathmandu",
218 |             {
219 |                 "source": {
220 |                     "timezone": "Europe/Warsaw",
221 |                     "datetime": "2024-01-01T12:00:00+01:00",
222 |                     "is_dst": False,
223 |                 },
224 |                 "target": {
225 |                     "timezone": "Asia/Kathmandu",
226 |                     "datetime": "2024-01-01T16:45:00+05:45",
227 |                     "is_dst": False,
228 |                 },
229 |                 "time_difference": "+4.75h",
230 |             },
231 |         ),
232 |         # Reverse case for Nepal
233 |         # Demonstrates how 45-minute offset works in opposite direction
234 |         (
235 |             "2024-01-01 00:00:00+00:00",
236 |             "Asia/Kathmandu",
237 |             "12:00",
238 |             "Europe/Warsaw",
239 |             {
240 |                 "source": {
241 |                     "timezone": "Asia/Kathmandu",
242 |                     "datetime": "2024-01-01T12:00:00+05:45",
243 |                     "is_dst": False,
244 |                 },
245 |                 "target": {
246 |                     "timezone": "Europe/Warsaw",
247 |                     "datetime": "2024-01-01T07:15:00+01:00",
248 |                     "is_dst": False,
249 |                 },
250 |                 "time_difference": "-4.75h",
251 |             },
252 |         ),
253 |         # Edge case: Lord Howe Island's unique DST rules
254 |         # One of the few places using 30-minute DST shift
255 |         # During summer (DST), they use UTC+11
256 |         (
257 |             "2024-01-01 00:00:00+00:00",
258 |             "Europe/Warsaw",
259 |             "12:00",
260 |             "Australia/Lord_Howe",
261 |             {
262 |                 "source": {
263 |                     "timezone": "Europe/Warsaw",
264 |                     "datetime": "2024-01-01T12:00:00+01:00",
265 |                     "is_dst": False,
266 |                 },
267 |                 "target": {
268 |                     "timezone": "Australia/Lord_Howe",
269 |                     "datetime": "2024-01-01T22:00:00+11:00",
270 |                     "is_dst": True,
271 |                 },
272 |                 "time_difference": "+10.0h",
273 |             },
274 |         ),
275 |         # Second Lord Howe Island case: During their standard time
276 |         # Shows transition to UTC+10:30 after DST ends
277 |         (
278 |             "2024-04-07 00:00:00+00:00",
279 |             "Europe/Warsaw",
280 |             "12:00",
281 |             "Australia/Lord_Howe",
282 |             {
283 |                 "source": {
284 |                     "timezone": "Europe/Warsaw",
285 |                     "datetime": "2024-04-07T12:00:00+02:00",
286 |                     "is_dst": True,
287 |                 },
288 |                 "target": {
289 |                     "timezone": "Australia/Lord_Howe",
290 |                     "datetime": "2024-04-07T20:30:00+10:30",
291 |                     "is_dst": False,
292 |                 },
293 |                 "time_difference": "+8.5h",
294 |             },
295 |         ),
296 |         # Edge case: Date line crossing with Samoa
297 |         # Demonstrates how a single time conversion can result in a date change
298 |         # Samoa is UTC+13, creating almost a full day difference with Warsaw
299 |         (
300 |             "2024-01-01 00:00:00+00:00",
301 |             "Europe/Warsaw",
302 |             "23:00",
303 |             "Pacific/Apia",
304 |             {
305 |                 "source": {
306 |                     "timezone": "Europe/Warsaw",
307 |                     "datetime": "2024-01-01T23:00:00+01:00",
308 |                     "is_dst": False,
309 |                 },
310 |                 "target": {
311 |                     "timezone": "Pacific/Apia",
312 |                     "datetime": "2024-01-02T11:00:00+13:00",
313 |                     "is_dst": False,
314 |                 },
315 |                 "time_difference": "+12.0h",
316 |             },
317 |         ),
318 |         # Edge case: Iran's unusual half-hour offset
319 |         # Demonstrates conversion with Iran's UTC+3:30 timezone
320 |         (
321 |             "2024-03-21 00:00:00+00:00",
322 |             "Europe/Warsaw",
323 |             "12:00",
324 |             "Asia/Tehran",
325 |             {
326 |                 "source": {
327 |                     "timezone": "Europe/Warsaw",
328 |                     "datetime": "2024-03-21T12:00:00+01:00",
329 |                     "is_dst": False,
330 |                 },
331 |                 "target": {
332 |                     "timezone": "Asia/Tehran",
333 |                     "datetime": "2024-03-21T14:30:00+03:30",
334 |                     "is_dst": False,
335 |                 },
336 |                 "time_difference": "+2.5h",
337 |             },
338 |         ),
339 |         # Edge case: Venezuela's unusual -4:30 offset (historical)
340 |         # In 2016, Venezuela moved from -4:30 to -4:00
341 |         # Useful for testing historical dates
342 |         (
343 |             "2016-04-30 00:00:00+00:00",  # Just before the change
344 |             "Europe/Warsaw",
345 |             "12:00",
346 |             "America/Caracas",
347 |             {
348 |                 "source": {
349 |                     "timezone": "Europe/Warsaw",
350 |                     "datetime": "2016-04-30T12:00:00+02:00",
351 |                     "is_dst": True,
352 |                 },
353 |                 "target": {
354 |                     "timezone": "America/Caracas",
355 |                     "datetime": "2016-04-30T05:30:00-04:30",
356 |                     "is_dst": False,
357 |                 },
358 |                 "time_difference": "-6.5h",
359 |             },
360 |         ),
361 |         # Edge case: Israel's variable DST
362 |         # Israel's DST changes don't follow a fixed pattern
363 |         # They often change dates year-to-year based on Hebrew calendar
364 |         (
365 |             "2024-10-27 00:00:00+00:00",
366 |             "Europe/Warsaw",
367 |             "12:00",
368 |             "Asia/Jerusalem",
369 |             {
370 |                 "source": {
371 |                     "timezone": "Europe/Warsaw",
372 |                     "datetime": "2024-10-27T12:00:00+01:00",
373 |                     "is_dst": False,
374 |                 },
375 |                 "target": {
376 |                     "timezone": "Asia/Jerusalem",
377 |                     "datetime": "2024-10-27T13:00:00+02:00",
378 |                     "is_dst": False,
379 |                 },
380 |                 "time_difference": "+1.0h",
381 |             },
382 |         ),
383 |         # Edge case: Antarctica/Troll station
384 |         # Only timezone that uses UTC+0 in winter and UTC+2 in summer
385 |         # One of the few zones with exactly 2 hours DST difference
386 |         (
387 |             "2024-03-31 00:00:00+00:00",
388 |             "Europe/Warsaw",
389 |             "12:00",
390 |             "Antarctica/Troll",
391 |             {
392 |                 "source": {
393 |                     "timezone": "Europe/Warsaw",
394 |                     "datetime": "2024-03-31T12:00:00+02:00",
395 |                     "is_dst": True,
396 |                 },
397 |                 "target": {
398 |                     "timezone": "Antarctica/Troll",
399 |                     "datetime": "2024-03-31T12:00:00+02:00",
400 |                     "is_dst": True,
401 |                 },
402 |                 "time_difference": "+0.0h",
403 |             },
404 |         ),
405 |         # Edge case: Kiribati date line anomaly
406 |         # After skipping Dec 31, 1994, eastern Kiribati is UTC+14
407 |         # The furthest forward timezone in the world
408 |         (
409 |             "2024-01-01 00:00:00+00:00",
410 |             "Europe/Warsaw",
411 |             "23:00",
412 |             "Pacific/Kiritimati",
413 |             {
414 |                 "source": {
415 |                     "timezone": "Europe/Warsaw",
416 |                     "datetime": "2024-01-01T23:00:00+01:00",
417 |                     "is_dst": False,
418 |                 },
419 |                 "target": {
420 |                     "timezone": "Pacific/Kiritimati",
421 |                     "datetime": "2024-01-02T12:00:00+14:00",
422 |                     "is_dst": False,
423 |                 },
424 |                 "time_difference": "+13.0h",
425 |             },
426 |         ),
427 |         # Edge case: Chatham Islands, New Zealand
428 |         # Uses unusual 45-minute offset AND observes DST
429 |         # UTC+12:45 in standard time, UTC+13:45 in DST
430 |         (
431 |             "2024-01-01 00:00:00+00:00",
432 |             "Europe/Warsaw",
433 |             "12:00",
434 |             "Pacific/Chatham",
435 |             {
436 |                 "source": {
437 |                     "timezone": "Europe/Warsaw",
438 |                     "datetime": "2024-01-01T12:00:00+01:00",
439 |                     "is_dst": False,
440 |                 },
441 |                 "target": {
442 |                     "timezone": "Pacific/Chatham",
443 |                     "datetime": "2024-01-02T00:45:00+13:45",
444 |                     "is_dst": True,
445 |                 },
446 |                 "time_difference": "+12.75h",
447 |             },
448 |         ),
449 |     ],
450 | )
451 | def test_convert_time(test_time, source_tz, time_str, target_tz, expected):
452 |     with freeze_time(test_time):
453 |         time_server = TimeServer()
454 |         result = time_server.convert_time(source_tz, time_str, target_tz)
455 | 
456 |         assert result.source.timezone == expected["source"]["timezone"]
457 |         assert result.target.timezone == expected["target"]["timezone"]
458 |         assert result.source.datetime == expected["source"]["datetime"]
459 |         assert result.target.datetime == expected["target"]["datetime"]
460 |         assert result.source.is_dst == expected["source"]["is_dst"]
461 |         assert result.target.is_dst == expected["target"]["is_dst"]
462 |         assert result.time_difference == expected["time_difference"]
463 | 
464 | 
465 | def test_get_local_tz_with_override():
466 |     """Test that timezone override works correctly."""
467 |     result = get_local_tz("America/New_York")
468 |     assert str(result) == "America/New_York"
469 |     assert isinstance(result, ZoneInfo)
470 | 
471 | 
472 | def test_get_local_tz_with_invalid_override():
473 |     """Test that invalid timezone override raises an error."""
474 |     with pytest.raises(Exception):  # ZoneInfo will raise an exception
475 |         get_local_tz("Invalid/Timezone")
476 | 
477 | 
478 | @patch('mcp_server_time.server.get_localzone_name')
479 | def test_get_local_tz_with_valid_iana_name(mock_get_localzone):
480 |     """Test that valid IANA timezone names from tzlocal work correctly."""
481 |     mock_get_localzone.return_value = "Europe/London"
482 |     result = get_local_tz()
483 |     assert str(result) == "Europe/London"
484 |     assert isinstance(result, ZoneInfo)
485 | 
486 | 
487 | @patch('mcp_server_time.server.get_localzone_name')
488 | def test_get_local_tz_when_none_returned(mock_get_localzone):
489 |     """Test default to UTC when tzlocal returns None."""
490 |     mock_get_localzone.return_value = None
491 |     result = get_local_tz()
492 |     assert str(result) == "UTC"
493 | 
494 | 
495 | @patch('mcp_server_time.server.get_localzone_name')
496 | def test_get_local_tz_handles_windows_timezones(mock_get_localzone):
497 |     """Test that tzlocal properly handles Windows timezone names.
498 |     
499 |     Note: tzlocal should convert Windows names like 'Pacific Standard Time'
500 |     to proper IANA names like 'America/Los_Angeles'.
501 |     """
502 |     # tzlocal should return IANA names even on Windows
503 |     mock_get_localzone.return_value = "America/Los_Angeles"
504 |     result = get_local_tz()
505 |     assert str(result) == "America/Los_Angeles"
506 |     assert isinstance(result, ZoneInfo)
507 | 
508 | 
509 | @pytest.mark.parametrize(
510 |     "timezone_name",
511 |     [
512 |         "America/New_York",
513 |         "Europe/Paris", 
514 |         "Asia/Tokyo",
515 |         "Australia/Sydney",
516 |         "Africa/Cairo",
517 |         "America/Sao_Paulo",
518 |         "Pacific/Auckland",
519 |         "UTC",
520 |     ],
521 | )
522 | @patch('mcp_server_time.server.get_localzone_name')
523 | def test_get_local_tz_various_timezones(mock_get_localzone, timezone_name):
524 |     """Test various timezone names that tzlocal might return."""
525 |     mock_get_localzone.return_value = timezone_name
526 |     result = get_local_tz()
527 |     assert str(result) == timezone_name
528 |     assert isinstance(result, ZoneInfo)
529 | 
```

--------------------------------------------------------------------------------
/src/filesystem/__tests__/lib.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
  2 | import fs from 'fs/promises';
  3 | import path from 'path';
  4 | import os from 'os';
  5 | import {
  6 |   // Pure utility functions
  7 |   formatSize,
  8 |   normalizeLineEndings,
  9 |   createUnifiedDiff,
 10 |   // Security & validation functions
 11 |   validatePath,
 12 |   setAllowedDirectories,
 13 |   // File operations
 14 |   getFileStats,
 15 |   readFileContent,
 16 |   writeFileContent,
 17 |   // Search & filtering functions
 18 |   searchFilesWithValidation,
 19 |   // File editing functions
 20 |   applyFileEdits,
 21 |   tailFile,
 22 |   headFile
 23 | } from '../lib.js';
 24 | 
 25 | // Mock fs module
 26 | vi.mock('fs/promises');
 27 | const mockFs = fs as any;
 28 | 
 29 | describe('Lib Functions', () => {
 30 |   beforeEach(() => {
 31 |     vi.clearAllMocks();
 32 |     // Set up allowed directories for tests
 33 |     const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp', 'C:\\allowed'] : ['/home/user', '/tmp', '/allowed'];
 34 |     setAllowedDirectories(allowedDirs);
 35 |   });
 36 | 
 37 |   afterEach(() => {
 38 |     vi.restoreAllMocks();
 39 |     // Clear allowed directories after tests
 40 |     setAllowedDirectories([]);
 41 |   });
 42 | 
 43 |   describe('Pure Utility Functions', () => {
 44 |     describe('formatSize', () => {
 45 |       it('formats bytes correctly', () => {
 46 |         expect(formatSize(0)).toBe('0 B');
 47 |         expect(formatSize(512)).toBe('512 B');
 48 |         expect(formatSize(1024)).toBe('1.00 KB');
 49 |         expect(formatSize(1536)).toBe('1.50 KB');
 50 |         expect(formatSize(1048576)).toBe('1.00 MB');
 51 |         expect(formatSize(1073741824)).toBe('1.00 GB');
 52 |         expect(formatSize(1099511627776)).toBe('1.00 TB');
 53 |       });
 54 | 
 55 |       it('handles edge cases', () => {
 56 |         expect(formatSize(1023)).toBe('1023 B');
 57 |         expect(formatSize(1025)).toBe('1.00 KB');
 58 |         expect(formatSize(1048575)).toBe('1024.00 KB');
 59 |       });
 60 | 
 61 |       it('handles very large numbers beyond TB', () => {
 62 |         // The function only supports up to TB, so very large numbers will show as TB
 63 |         expect(formatSize(1024 * 1024 * 1024 * 1024 * 1024)).toBe('1024.00 TB');
 64 |         expect(formatSize(Number.MAX_SAFE_INTEGER)).toContain('TB');
 65 |       });
 66 | 
 67 |       it('handles negative numbers', () => {
 68 |         // Negative numbers will result in NaN for the log calculation
 69 |         expect(formatSize(-1024)).toContain('NaN');
 70 |         expect(formatSize(-0)).toBe('0 B');
 71 |       });
 72 | 
 73 |       it('handles decimal numbers', () => {
 74 |         expect(formatSize(1536.5)).toBe('1.50 KB');
 75 |         expect(formatSize(1023.9)).toBe('1023.9 B');
 76 |       });
 77 | 
 78 |       it('handles very small positive numbers', () => {
 79 |         expect(formatSize(1)).toBe('1 B');
 80 |         expect(formatSize(0.5)).toBe('0.5 B');
 81 |         expect(formatSize(0.1)).toBe('0.1 B');
 82 |       });
 83 |     });
 84 | 
 85 |     describe('normalizeLineEndings', () => {
 86 |       it('converts CRLF to LF', () => {
 87 |         expect(normalizeLineEndings('line1\r\nline2\r\nline3')).toBe('line1\nline2\nline3');
 88 |       });
 89 | 
 90 |       it('leaves LF unchanged', () => {
 91 |         expect(normalizeLineEndings('line1\nline2\nline3')).toBe('line1\nline2\nline3');
 92 |       });
 93 | 
 94 |       it('handles mixed line endings', () => {
 95 |         expect(normalizeLineEndings('line1\r\nline2\nline3\r\n')).toBe('line1\nline2\nline3\n');
 96 |       });
 97 | 
 98 |       it('handles empty string', () => {
 99 |         expect(normalizeLineEndings('')).toBe('');
100 |       });
101 |     });
102 | 
103 |     describe('createUnifiedDiff', () => {
104 |       it('creates diff for simple changes', () => {
105 |         const original = 'line1\nline2\nline3';
106 |         const modified = 'line1\nmodified line2\nline3';
107 |         const diff = createUnifiedDiff(original, modified, 'test.txt');
108 |         
109 |         expect(diff).toContain('--- test.txt');
110 |         expect(diff).toContain('+++ test.txt');
111 |         expect(diff).toContain('-line2');
112 |         expect(diff).toContain('+modified line2');
113 |       });
114 | 
115 |       it('handles CRLF normalization', () => {
116 |         const original = 'line1\r\nline2\r\n';
117 |         const modified = 'line1\nmodified line2\n';
118 |         const diff = createUnifiedDiff(original, modified);
119 |         
120 |         expect(diff).toContain('-line2');
121 |         expect(diff).toContain('+modified line2');
122 |       });
123 | 
124 |       it('handles identical content', () => {
125 |         const content = 'line1\nline2\nline3';
126 |         const diff = createUnifiedDiff(content, content);
127 |         
128 |         // Should not contain any +/- lines for identical content (excluding header lines)
129 |         expect(diff.split('\n').filter((line: string) => line.startsWith('+++') || line.startsWith('---'))).toHaveLength(2);
130 |         expect(diff.split('\n').filter((line: string) => line.startsWith('+') && !line.startsWith('+++'))).toHaveLength(0);
131 |         expect(diff.split('\n').filter((line: string) => line.startsWith('-') && !line.startsWith('---'))).toHaveLength(0);
132 |       });
133 | 
134 |       it('handles empty content', () => {
135 |         const diff = createUnifiedDiff('', '');
136 |         expect(diff).toContain('--- file');
137 |         expect(diff).toContain('+++ file');
138 |       });
139 | 
140 |       it('handles default filename parameter', () => {
141 |         const diff = createUnifiedDiff('old', 'new');
142 |         expect(diff).toContain('--- file');
143 |         expect(diff).toContain('+++ file');
144 |       });
145 | 
146 |       it('handles custom filename', () => {
147 |         const diff = createUnifiedDiff('old', 'new', 'custom.txt');
148 |         expect(diff).toContain('--- custom.txt');
149 |         expect(diff).toContain('+++ custom.txt');
150 |       });
151 |     });
152 |   });
153 | 
154 |   describe('Security & Validation Functions', () => {
155 |     describe('validatePath', () => {
156 |       // Use Windows-compatible paths for testing
157 |       const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp'] : ['/home/user', '/tmp'];
158 | 
159 |       beforeEach(() => {
160 |         mockFs.realpath.mockImplementation(async (path: any) => path.toString());
161 |       });
162 | 
163 |       it('validates allowed paths', async () => {
164 |         const testPath = process.platform === 'win32' ? 'C:\\Users\\test\\file.txt' : '/home/user/file.txt';
165 |         const result = await validatePath(testPath);
166 |         expect(result).toBe(testPath);
167 |       });
168 | 
169 |       it('rejects disallowed paths', async () => {
170 |         const testPath = process.platform === 'win32' ? 'C:\\Windows\\System32\\file.txt' : '/etc/passwd';
171 |         await expect(validatePath(testPath))
172 |           .rejects.toThrow('Access denied - path outside allowed directories');
173 |       });
174 | 
175 |       it('handles non-existent files by checking parent directory', async () => {
176 |         const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\newfile.txt' : '/home/user/newfile.txt';
177 |         const parentPath = process.platform === 'win32' ? 'C:\\Users\\test' : '/home/user';
178 |         
179 |         // Create an error with the ENOENT code that the implementation checks for
180 |         const enoentError = new Error('ENOENT') as NodeJS.ErrnoException;
181 |         enoentError.code = 'ENOENT';
182 |         
183 |         mockFs.realpath
184 |           .mockRejectedValueOnce(enoentError)
185 |           .mockResolvedValueOnce(parentPath);
186 |         
187 |         const result = await validatePath(newFilePath);
188 |         expect(result).toBe(path.resolve(newFilePath));
189 |       });
190 | 
191 |       it('rejects when parent directory does not exist', async () => {
192 |         const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\nonexistent\\newfile.txt' : '/home/user/nonexistent/newfile.txt';
193 |         
194 |         // Create errors with the ENOENT code
195 |         const enoentError1 = new Error('ENOENT') as NodeJS.ErrnoException;
196 |         enoentError1.code = 'ENOENT';
197 |         const enoentError2 = new Error('ENOENT') as NodeJS.ErrnoException;
198 |         enoentError2.code = 'ENOENT';
199 |         
200 |         mockFs.realpath
201 |           .mockRejectedValueOnce(enoentError1)
202 |           .mockRejectedValueOnce(enoentError2);
203 |         
204 |         await expect(validatePath(newFilePath))
205 |           .rejects.toThrow('Parent directory does not exist');
206 |       });
207 |     });
208 |   });
209 | 
210 |   describe('File Operations', () => {
211 |     describe('getFileStats', () => {
212 |       it('returns file statistics', async () => {
213 |         const mockStats = {
214 |           size: 1024,
215 |           birthtime: new Date('2023-01-01'),
216 |           mtime: new Date('2023-01-02'),
217 |           atime: new Date('2023-01-03'),
218 |           isDirectory: () => false,
219 |           isFile: () => true,
220 |           mode: 0o644
221 |         };
222 |         
223 |         mockFs.stat.mockResolvedValueOnce(mockStats as any);
224 |         
225 |         const result = await getFileStats('/test/file.txt');
226 |         
227 |         expect(result).toEqual({
228 |           size: 1024,
229 |           created: new Date('2023-01-01'),
230 |           modified: new Date('2023-01-02'),
231 |           accessed: new Date('2023-01-03'),
232 |           isDirectory: false,
233 |           isFile: true,
234 |           permissions: '644'
235 |         });
236 |       });
237 | 
238 |       it('handles directory statistics', async () => {
239 |         const mockStats = {
240 |           size: 4096,
241 |           birthtime: new Date('2023-01-01'),
242 |           mtime: new Date('2023-01-02'),
243 |           atime: new Date('2023-01-03'),
244 |           isDirectory: () => true,
245 |           isFile: () => false,
246 |           mode: 0o755
247 |         };
248 |         
249 |         mockFs.stat.mockResolvedValueOnce(mockStats as any);
250 |         
251 |         const result = await getFileStats('/test/dir');
252 |         
253 |         expect(result.isDirectory).toBe(true);
254 |         expect(result.isFile).toBe(false);
255 |         expect(result.permissions).toBe('755');
256 |       });
257 |     });
258 | 
259 |     describe('readFileContent', () => {
260 |       it('reads file with default encoding', async () => {
261 |         mockFs.readFile.mockResolvedValueOnce('file content');
262 |         
263 |         const result = await readFileContent('/test/file.txt');
264 |         
265 |         expect(result).toBe('file content');
266 |         expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'utf-8');
267 |       });
268 | 
269 |       it('reads file with custom encoding', async () => {
270 |         mockFs.readFile.mockResolvedValueOnce('file content');
271 |         
272 |         const result = await readFileContent('/test/file.txt', 'ascii');
273 |         
274 |         expect(result).toBe('file content');
275 |         expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'ascii');
276 |       });
277 |     });
278 | 
279 |     describe('writeFileContent', () => {
280 |       it('writes file content', async () => {
281 |         mockFs.writeFile.mockResolvedValueOnce(undefined);
282 |         
283 |         await writeFileContent('/test/file.txt', 'new content');
284 |         
285 |         expect(mockFs.writeFile).toHaveBeenCalledWith('/test/file.txt', 'new content', { encoding: "utf-8", flag: 'wx' });
286 |       });
287 |     });
288 | 
289 |   });
290 | 
291 |   describe('Search & Filtering Functions', () => {
292 |     describe('searchFilesWithValidation', () => {
293 |       beforeEach(() => {
294 |         mockFs.realpath.mockImplementation(async (path: any) => path.toString());
295 |       });
296 | 
297 | 
298 |       it('excludes files matching exclude patterns', async () => {
299 |         const mockEntries = [
300 |           { name: 'test.txt', isDirectory: () => false },
301 |           { name: 'test.log', isDirectory: () => false },
302 |           { name: 'node_modules', isDirectory: () => true }
303 |         ];
304 |         
305 |         mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
306 |         
307 |         const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
308 |         const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
309 |         
310 |         // Mock realpath to return the same path for validation to pass
311 |         mockFs.realpath.mockImplementation(async (inputPath: any) => {
312 |           const pathStr = inputPath.toString();
313 |           // Return the path as-is for validation
314 |           return pathStr;
315 |         });
316 |         
317 |         const result = await searchFilesWithValidation(
318 |           testDir,
319 |           '*test*',
320 |           allowedDirs,
321 |           { excludePatterns: ['*.log', 'node_modules'] }
322 |         );
323 |         
324 |         const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
325 |         expect(result).toEqual([expectedResult]);
326 |       });
327 | 
328 |       it('handles validation errors during search', async () => {
329 |         const mockEntries = [
330 |           { name: 'test.txt', isDirectory: () => false },
331 |           { name: 'invalid_file.txt', isDirectory: () => false }
332 |         ];
333 |         
334 |         mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
335 |         
336 |         // Mock validatePath to throw error for invalid_file.txt
337 |         mockFs.realpath.mockImplementation(async (path: any) => {
338 |           if (path.toString().includes('invalid_file.txt')) {
339 |             throw new Error('Access denied');
340 |           }
341 |           return path.toString();
342 |         });
343 |         
344 |         const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
345 |         const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
346 |         
347 |         const result = await searchFilesWithValidation(
348 |           testDir,
349 |           '*test*',
350 |           allowedDirs,
351 |           {}
352 |         );
353 |         
354 |         // Should only return the valid file, skipping the invalid one
355 |         const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
356 |         expect(result).toEqual([expectedResult]);
357 |       });
358 | 
359 |       it('handles complex exclude patterns with wildcards', async () => {
360 |         const mockEntries = [
361 |           { name: 'test.txt', isDirectory: () => false },
362 |           { name: 'test.backup', isDirectory: () => false },
363 |           { name: 'important_test.js', isDirectory: () => false }
364 |         ];
365 |         
366 |         mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
367 |         
368 |         const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
369 |         const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
370 |         
371 |         const result = await searchFilesWithValidation(
372 |           testDir,
373 |           '*test*',
374 |           allowedDirs,
375 |           { excludePatterns: ['*.backup'] }
376 |         );
377 |         
378 |         const expectedResults = process.platform === 'win32' ? [
379 |           'C:\\allowed\\dir\\test.txt',
380 |           'C:\\allowed\\dir\\important_test.js'
381 |         ] : [
382 |           '/allowed/dir/test.txt',
383 |           '/allowed/dir/important_test.js'
384 |         ];
385 |         expect(result).toEqual(expectedResults);
386 |       });
387 |     });
388 |   });
389 | 
390 |   describe('File Editing Functions', () => {
391 |     describe('applyFileEdits', () => {
392 |       beforeEach(() => {
393 |         mockFs.readFile.mockResolvedValue('line1\nline2\nline3\n');
394 |         mockFs.writeFile.mockResolvedValue(undefined);
395 |       });
396 | 
397 |       it('applies simple text replacement', async () => {
398 |         const edits = [
399 |           { oldText: 'line2', newText: 'modified line2' }
400 |         ];
401 |         
402 |         mockFs.rename.mockResolvedValueOnce(undefined);
403 |         
404 |         const result = await applyFileEdits('/test/file.txt', edits, false);
405 |         
406 |         expect(result).toContain('modified line2');
407 |         // Should write to temporary file then rename
408 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
409 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
410 |           'line1\nmodified line2\nline3\n',
411 |           'utf-8'
412 |         );
413 |         expect(mockFs.rename).toHaveBeenCalledWith(
414 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
415 |           '/test/file.txt'
416 |         );
417 |       });
418 | 
419 |       it('handles dry run mode', async () => {
420 |         const edits = [
421 |           { oldText: 'line2', newText: 'modified line2' }
422 |         ];
423 |         
424 |         const result = await applyFileEdits('/test/file.txt', edits, true);
425 |         
426 |         expect(result).toContain('modified line2');
427 |         expect(mockFs.writeFile).not.toHaveBeenCalled();
428 |       });
429 | 
430 |       it('applies multiple edits sequentially', async () => {
431 |         const edits = [
432 |           { oldText: 'line1', newText: 'first line' },
433 |           { oldText: 'line3', newText: 'third line' }
434 |         ];
435 |         
436 |         mockFs.rename.mockResolvedValueOnce(undefined);
437 |         
438 |         await applyFileEdits('/test/file.txt', edits, false);
439 |         
440 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
441 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
442 |           'first line\nline2\nthird line\n',
443 |           'utf-8'
444 |         );
445 |         expect(mockFs.rename).toHaveBeenCalledWith(
446 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
447 |           '/test/file.txt'
448 |         );
449 |       });
450 | 
451 |       it('handles whitespace-flexible matching', async () => {
452 |         mockFs.readFile.mockResolvedValue('  line1\n    line2\n  line3\n');
453 |         
454 |         const edits = [
455 |           { oldText: 'line2', newText: 'modified line2' }
456 |         ];
457 |         
458 |         mockFs.rename.mockResolvedValueOnce(undefined);
459 |         
460 |         await applyFileEdits('/test/file.txt', edits, false);
461 |         
462 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
463 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
464 |           '  line1\n    modified line2\n  line3\n',
465 |           'utf-8'
466 |         );
467 |         expect(mockFs.rename).toHaveBeenCalledWith(
468 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
469 |           '/test/file.txt'
470 |         );
471 |       });
472 | 
473 |       it('throws error for non-matching edits', async () => {
474 |         const edits = [
475 |           { oldText: 'nonexistent line', newText: 'replacement' }
476 |         ];
477 |         
478 |         await expect(applyFileEdits('/test/file.txt', edits, false))
479 |           .rejects.toThrow('Could not find exact match for edit');
480 |       });
481 | 
482 |       it('handles complex multi-line edits with indentation', async () => {
483 |         mockFs.readFile.mockResolvedValue('function test() {\n  console.log("hello");\n  return true;\n}');
484 |         
485 |         const edits = [
486 |           { 
487 |             oldText: '  console.log("hello");\n  return true;', 
488 |             newText: '  console.log("world");\n  console.log("test");\n  return false;' 
489 |           }
490 |         ];
491 |         
492 |         mockFs.rename.mockResolvedValueOnce(undefined);
493 |         
494 |         await applyFileEdits('/test/file.js', edits, false);
495 |         
496 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
497 |           expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
498 |           'function test() {\n  console.log("world");\n  console.log("test");\n  return false;\n}',
499 |           'utf-8'
500 |         );
501 |         expect(mockFs.rename).toHaveBeenCalledWith(
502 |           expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
503 |           '/test/file.js'
504 |         );
505 |       });
506 | 
507 |       it('handles edits with different indentation patterns', async () => {
508 |         mockFs.readFile.mockResolvedValue('    if (condition) {\n        doSomething();\n    }');
509 |         
510 |         const edits = [
511 |           { 
512 |             oldText: 'doSomething();', 
513 |             newText: 'doSomethingElse();\n        doAnotherThing();' 
514 |           }
515 |         ];
516 |         
517 |         mockFs.rename.mockResolvedValueOnce(undefined);
518 |         
519 |         await applyFileEdits('/test/file.js', edits, false);
520 |         
521 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
522 |           expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
523 |           '    if (condition) {\n        doSomethingElse();\n        doAnotherThing();\n    }',
524 |           'utf-8'
525 |         );
526 |         expect(mockFs.rename).toHaveBeenCalledWith(
527 |           expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
528 |           '/test/file.js'
529 |         );
530 |       });
531 | 
532 |       it('handles CRLF line endings in file content', async () => {
533 |         mockFs.readFile.mockResolvedValue('line1\r\nline2\r\nline3\r\n');
534 |         
535 |         const edits = [
536 |           { oldText: 'line2', newText: 'modified line2' }
537 |         ];
538 |         
539 |         mockFs.rename.mockResolvedValueOnce(undefined);
540 |         
541 |         await applyFileEdits('/test/file.txt', edits, false);
542 |         
543 |         expect(mockFs.writeFile).toHaveBeenCalledWith(
544 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
545 |           'line1\nmodified line2\nline3\n',
546 |           'utf-8'
547 |         );
548 |         expect(mockFs.rename).toHaveBeenCalledWith(
549 |           expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
550 |           '/test/file.txt'
551 |         );
552 |       });
553 |     });
554 | 
555 |     describe('tailFile', () => {
556 |       it('handles empty files', async () => {
557 |         mockFs.stat.mockResolvedValue({ size: 0 } as any);
558 |         
559 |         const result = await tailFile('/test/empty.txt', 5);
560 |         
561 |         expect(result).toBe('');
562 |         expect(mockFs.open).not.toHaveBeenCalled();
563 |       });
564 | 
565 |       it('calls stat to check file size', async () => {
566 |         mockFs.stat.mockResolvedValue({ size: 100 } as any);
567 |         
568 |         // Mock file handle with proper typing
569 |         const mockFileHandle = {
570 |           read: vi.fn(),
571 |           close: vi.fn()
572 |         } as any;
573 |         
574 |         mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
575 |         mockFileHandle.close.mockResolvedValue(undefined);
576 |         
577 |         mockFs.open.mockResolvedValue(mockFileHandle);
578 |         
579 |         await tailFile('/test/file.txt', 2);
580 |         
581 |         expect(mockFs.stat).toHaveBeenCalledWith('/test/file.txt');
582 |         expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
583 |       });
584 | 
585 |       it('handles files with content and returns last lines', async () => {
586 |         mockFs.stat.mockResolvedValue({ size: 50 } as any);
587 |         
588 |         const mockFileHandle = {
589 |           read: vi.fn(),
590 |           close: vi.fn()
591 |         } as any;
592 |         
593 |         // Simulate reading file content in chunks
594 |         mockFileHandle.read
595 |           .mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line3\nline4\nline5\n') })
596 |           .mockResolvedValueOnce({ bytesRead: 0 });
597 |         mockFileHandle.close.mockResolvedValue(undefined);
598 |         
599 |         mockFs.open.mockResolvedValue(mockFileHandle);
600 |         
601 |         const result = await tailFile('/test/file.txt', 2);
602 |         
603 |         expect(mockFileHandle.close).toHaveBeenCalled();
604 |       });
605 | 
606 |       it('handles read errors gracefully', async () => {
607 |         mockFs.stat.mockResolvedValue({ size: 100 } as any);
608 |         
609 |         const mockFileHandle = {
610 |           read: vi.fn(),
611 |           close: vi.fn()
612 |         } as any;
613 |         
614 |         mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
615 |         mockFileHandle.close.mockResolvedValue(undefined);
616 |         
617 |         mockFs.open.mockResolvedValue(mockFileHandle);
618 |         
619 |         await tailFile('/test/file.txt', 5);
620 |         
621 |         expect(mockFileHandle.close).toHaveBeenCalled();
622 |       });
623 |     });
624 | 
625 |     describe('headFile', () => {
626 |       it('opens file for reading', async () => {
627 |         // Mock file handle with proper typing
628 |         const mockFileHandle = {
629 |           read: vi.fn(),
630 |           close: vi.fn()
631 |         } as any;
632 |         
633 |         mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
634 |         mockFileHandle.close.mockResolvedValue(undefined);
635 |         
636 |         mockFs.open.mockResolvedValue(mockFileHandle);
637 |         
638 |         await headFile('/test/file.txt', 2);
639 |         
640 |         expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
641 |       });
642 | 
643 |       it('handles files with content and returns first lines', async () => {
644 |         const mockFileHandle = {
645 |           read: vi.fn(),
646 |           close: vi.fn()
647 |         } as any;
648 |         
649 |         // Simulate reading file content with newlines
650 |         mockFileHandle.read
651 |           .mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line1\nline2\nline3\n') })
652 |           .mockResolvedValueOnce({ bytesRead: 0 });
653 |         mockFileHandle.close.mockResolvedValue(undefined);
654 |         
655 |         mockFs.open.mockResolvedValue(mockFileHandle);
656 |         
657 |         const result = await headFile('/test/file.txt', 2);
658 |         
659 |         expect(mockFileHandle.close).toHaveBeenCalled();
660 |       });
661 | 
662 |       it('handles files with leftover content', async () => {
663 |         const mockFileHandle = {
664 |           read: vi.fn(),
665 |           close: vi.fn()
666 |         } as any;
667 |         
668 |         // Simulate reading file content without final newline
669 |         mockFileHandle.read
670 |           .mockResolvedValueOnce({ bytesRead: 15, buffer: Buffer.from('line1\nline2\nend') })
671 |           .mockResolvedValueOnce({ bytesRead: 0 });
672 |         mockFileHandle.close.mockResolvedValue(undefined);
673 |         
674 |         mockFs.open.mockResolvedValue(mockFileHandle);
675 |         
676 |         const result = await headFile('/test/file.txt', 5);
677 |         
678 |         expect(mockFileHandle.close).toHaveBeenCalled();
679 |       });
680 | 
681 |       it('handles reaching requested line count', async () => {
682 |         const mockFileHandle = {
683 |           read: vi.fn(),
684 |           close: vi.fn()
685 |         } as any;
686 |         
687 |         // Simulate reading exactly the requested number of lines
688 |         mockFileHandle.read
689 |           .mockResolvedValueOnce({ bytesRead: 12, buffer: Buffer.from('line1\nline2\n') })
690 |           .mockResolvedValueOnce({ bytesRead: 0 });
691 |         mockFileHandle.close.mockResolvedValue(undefined);
692 |         
693 |         mockFs.open.mockResolvedValue(mockFileHandle);
694 |         
695 |         const result = await headFile('/test/file.txt', 2);
696 |         
697 |         expect(mockFileHandle.close).toHaveBeenCalled();
698 |       });
699 |     });
700 |   });
701 | });
702 | 
```

--------------------------------------------------------------------------------
/src/filesystem/index.ts:
--------------------------------------------------------------------------------

```typescript
  1 | #!/usr/bin/env node
  2 | 
  3 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
  4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
  5 | import {
  6 |   CallToolResult,
  7 |   RootsListChangedNotificationSchema,
  8 |   type Root,
  9 | } from "@modelcontextprotocol/sdk/types.js";
 10 | import fs from "fs/promises";
 11 | import { createReadStream } from "fs";
 12 | import path from "path";
 13 | import { z } from "zod";
 14 | import { minimatch } from "minimatch";
 15 | import { normalizePath, expandHome } from './path-utils.js';
 16 | import { getValidRootDirectories } from './roots-utils.js';
 17 | import {
 18 |   // Function imports
 19 |   formatSize,
 20 |   validatePath,
 21 |   getFileStats,
 22 |   readFileContent,
 23 |   writeFileContent,
 24 |   searchFilesWithValidation,
 25 |   applyFileEdits,
 26 |   tailFile,
 27 |   headFile,
 28 |   setAllowedDirectories,
 29 | } from './lib.js';
 30 | 
 31 | // Command line argument parsing
 32 | const args = process.argv.slice(2);
 33 | if (args.length === 0) {
 34 |   console.error("Usage: mcp-server-filesystem [allowed-directory] [additional-directories...]");
 35 |   console.error("Note: Allowed directories can be provided via:");
 36 |   console.error("  1. Command-line arguments (shown above)");
 37 |   console.error("  2. MCP roots protocol (if client supports it)");
 38 |   console.error("At least one directory must be provided by EITHER method for the server to operate.");
 39 | }
 40 | 
 41 | // Store allowed directories in normalized and resolved form
 42 | let allowedDirectories = await Promise.all(
 43 |   args.map(async (dir) => {
 44 |     const expanded = expandHome(dir);
 45 |     const absolute = path.resolve(expanded);
 46 |     try {
 47 |       // Security: Resolve symlinks in allowed directories during startup
 48 |       // This ensures we know the real paths and can validate against them later
 49 |       const resolved = await fs.realpath(absolute);
 50 |       return normalizePath(resolved);
 51 |     } catch (error) {
 52 |       // If we can't resolve (doesn't exist), use the normalized absolute path
 53 |       // This allows configuring allowed dirs that will be created later
 54 |       return normalizePath(absolute);
 55 |     }
 56 |   })
 57 | );
 58 | 
 59 | // Validate that all directories exist and are accessible
 60 | await Promise.all(allowedDirectories.map(async (dir) => {
 61 |   try {
 62 |     const stats = await fs.stat(dir);
 63 |     if (!stats.isDirectory()) {
 64 |       console.error(`Error: ${dir} is not a directory`);
 65 |       process.exit(1);
 66 |     }
 67 |   } catch (error) {
 68 |     console.error(`Error accessing directory ${dir}:`, error);
 69 |     process.exit(1);
 70 |   }
 71 | }));
 72 | 
 73 | // Initialize the global allowedDirectories in lib.ts
 74 | setAllowedDirectories(allowedDirectories);
 75 | 
 76 | // Schema definitions
 77 | const ReadTextFileArgsSchema = z.object({
 78 |   path: z.string(),
 79 |   tail: z.number().optional().describe('If provided, returns only the last N lines of the file'),
 80 |   head: z.number().optional().describe('If provided, returns only the first N lines of the file')
 81 | });
 82 | 
 83 | const ReadMediaFileArgsSchema = z.object({
 84 |   path: z.string()
 85 | });
 86 | 
 87 | const ReadMultipleFilesArgsSchema = z.object({
 88 |   paths: z
 89 |     .array(z.string())
 90 |     .min(1, "At least one file path must be provided")
 91 |     .describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories."),
 92 | });
 93 | 
 94 | const WriteFileArgsSchema = z.object({
 95 |   path: z.string(),
 96 |   content: z.string(),
 97 | });
 98 | 
 99 | const EditOperation = z.object({
100 |   oldText: z.string().describe('Text to search for - must match exactly'),
101 |   newText: z.string().describe('Text to replace with')
102 | });
103 | 
104 | const EditFileArgsSchema = z.object({
105 |   path: z.string(),
106 |   edits: z.array(EditOperation),
107 |   dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
108 | });
109 | 
110 | const CreateDirectoryArgsSchema = z.object({
111 |   path: z.string(),
112 | });
113 | 
114 | const ListDirectoryArgsSchema = z.object({
115 |   path: z.string(),
116 | });
117 | 
118 | const ListDirectoryWithSizesArgsSchema = z.object({
119 |   path: z.string(),
120 |   sortBy: z.enum(['name', 'size']).optional().default('name').describe('Sort entries by name or size'),
121 | });
122 | 
123 | const DirectoryTreeArgsSchema = z.object({
124 |   path: z.string(),
125 |   excludePatterns: z.array(z.string()).optional().default([])
126 | });
127 | 
128 | const MoveFileArgsSchema = z.object({
129 |   source: z.string(),
130 |   destination: z.string(),
131 | });
132 | 
133 | const SearchFilesArgsSchema = z.object({
134 |   path: z.string(),
135 |   pattern: z.string(),
136 |   excludePatterns: z.array(z.string()).optional().default([])
137 | });
138 | 
139 | const GetFileInfoArgsSchema = z.object({
140 |   path: z.string(),
141 | });
142 | 
143 | // Server setup
144 | const server = new McpServer(
145 |   {
146 |     name: "secure-filesystem-server",
147 |     version: "0.2.0",
148 |   }
149 | );
150 | 
151 | // Reads a file as a stream of buffers, concatenates them, and then encodes
152 | // the result to a Base64 string. This is a memory-efficient way to handle
153 | // binary data from a stream before the final encoding.
154 | async function readFileAsBase64Stream(filePath: string): Promise<string> {
155 |   return new Promise((resolve, reject) => {
156 |     const stream = createReadStream(filePath);
157 |     const chunks: Buffer[] = [];
158 |     stream.on('data', (chunk) => {
159 |       chunks.push(chunk as Buffer);
160 |     });
161 |     stream.on('end', () => {
162 |       const finalBuffer = Buffer.concat(chunks);
163 |       resolve(finalBuffer.toString('base64'));
164 |     });
165 |     stream.on('error', (err) => reject(err));
166 |   });
167 | }
168 | 
169 | // Tool registrations
170 | 
171 | // read_file (deprecated) and read_text_file
172 | const readTextFileHandler = async (args: z.infer<typeof ReadTextFileArgsSchema>) => {
173 |   const validPath = await validatePath(args.path);
174 | 
175 |   if (args.head && args.tail) {
176 |     throw new Error("Cannot specify both head and tail parameters simultaneously");
177 |   }
178 | 
179 |   let content: string;
180 |   if (args.tail) {
181 |     content = await tailFile(validPath, args.tail);
182 |   } else if (args.head) {
183 |     content = await headFile(validPath, args.head);
184 |   } else {
185 |     content = await readFileContent(validPath);
186 |   }
187 | 
188 |   return {
189 |     content: [{ type: "text" as const, text: content }],
190 |     structuredContent: { content }
191 |   };
192 | };
193 | 
194 | server.registerTool(
195 |   "read_file",
196 |   {
197 |     title: "Read File (Deprecated)",
198 |     description: "Read the complete contents of a file as text. DEPRECATED: Use read_text_file instead.",
199 |     inputSchema: ReadTextFileArgsSchema.shape,
200 |     outputSchema: { content: z.string() },
201 |     annotations: { readOnlyHint: true }
202 |   },
203 |   readTextFileHandler
204 | );
205 | 
206 | server.registerTool(
207 |   "read_text_file",
208 |   {
209 |     title: "Read Text File",
210 |     description:
211 |       "Read the complete contents of a file from the file system as text. " +
212 |       "Handles various text encodings and provides detailed error messages " +
213 |       "if the file cannot be read. Use this tool when you need to examine " +
214 |       "the contents of a single file. Use the 'head' parameter to read only " +
215 |       "the first N lines of a file, or the 'tail' parameter to read only " +
216 |       "the last N lines of a file. Operates on the file as text regardless of extension. " +
217 |       "Only works within allowed directories.",
218 |     inputSchema: {
219 |       path: z.string(),
220 |       tail: z.number().optional().describe("If provided, returns only the last N lines of the file"),
221 |       head: z.number().optional().describe("If provided, returns only the first N lines of the file")
222 |     },
223 |     outputSchema: { content: z.string() },
224 |     annotations: { readOnlyHint: true }
225 |   },
226 |   readTextFileHandler
227 | );
228 | 
229 | server.registerTool(
230 |   "read_media_file",
231 |   {
232 |     title: "Read Media File",
233 |     description:
234 |       "Read an image or audio file. Returns the base64 encoded data and MIME type. " +
235 |       "Only works within allowed directories.",
236 |     inputSchema: {
237 |       path: z.string()
238 |     },
239 |     outputSchema: {
240 |       content: z.array(z.object({
241 |         type: z.enum(["image", "audio", "blob"]),
242 |         data: z.string(),
243 |         mimeType: z.string()
244 |       }))
245 |     },
246 |     annotations: { readOnlyHint: true }
247 |   },
248 |   async (args: z.infer<typeof ReadMediaFileArgsSchema>) => {
249 |     const validPath = await validatePath(args.path);
250 |     const extension = path.extname(validPath).toLowerCase();
251 |     const mimeTypes: Record<string, string> = {
252 |       ".png": "image/png",
253 |       ".jpg": "image/jpeg",
254 |       ".jpeg": "image/jpeg",
255 |       ".gif": "image/gif",
256 |       ".webp": "image/webp",
257 |       ".bmp": "image/bmp",
258 |       ".svg": "image/svg+xml",
259 |       ".mp3": "audio/mpeg",
260 |       ".wav": "audio/wav",
261 |       ".ogg": "audio/ogg",
262 |       ".flac": "audio/flac",
263 |     };
264 |     const mimeType = mimeTypes[extension] || "application/octet-stream";
265 |     const data = await readFileAsBase64Stream(validPath);
266 | 
267 |     const type = mimeType.startsWith("image/")
268 |       ? "image"
269 |       : mimeType.startsWith("audio/")
270 |         ? "audio"
271 |         // Fallback for other binary types, not officially supported by the spec but has been used for some time
272 |         : "blob";
273 |     const contentItem = { type: type as 'image' | 'audio' | 'blob', data, mimeType };
274 |     return {
275 |       content: [contentItem],
276 |       structuredContent: { content: [contentItem] }
277 |     } as unknown as CallToolResult;
278 |   }
279 | );
280 | 
281 | server.registerTool(
282 |   "read_multiple_files",
283 |   {
284 |     title: "Read Multiple Files",
285 |     description:
286 |       "Read the contents of multiple files simultaneously. This is more " +
287 |       "efficient than reading files one by one when you need to analyze " +
288 |       "or compare multiple files. Each file's content is returned with its " +
289 |       "path as a reference. Failed reads for individual files won't stop " +
290 |       "the entire operation. Only works within allowed directories.",
291 |     inputSchema: {
292 |       paths: z.array(z.string())
293 |         .min(1)
294 |         .describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories.")
295 |     },
296 |     outputSchema: { content: z.string() },
297 |     annotations: { readOnlyHint: true }
298 |   },
299 |   async (args: z.infer<typeof ReadMultipleFilesArgsSchema>) => {
300 |     const results = await Promise.all(
301 |       args.paths.map(async (filePath: string) => {
302 |         try {
303 |           const validPath = await validatePath(filePath);
304 |           const content = await readFileContent(validPath);
305 |           return `${filePath}:\n${content}\n`;
306 |         } catch (error) {
307 |           const errorMessage = error instanceof Error ? error.message : String(error);
308 |           return `${filePath}: Error - ${errorMessage}`;
309 |         }
310 |       }),
311 |     );
312 |     const text = results.join("\n---\n");
313 |     return {
314 |       content: [{ type: "text" as const, text }],
315 |       structuredContent: { content: text }
316 |     };
317 |   }
318 | );
319 | 
320 | server.registerTool(
321 |   "write_file",
322 |   {
323 |     title: "Write File",
324 |     description:
325 |       "Create a new file or completely overwrite an existing file with new content. " +
326 |       "Use with caution as it will overwrite existing files without warning. " +
327 |       "Handles text content with proper encoding. Only works within allowed directories.",
328 |     inputSchema: {
329 |       path: z.string(),
330 |       content: z.string()
331 |     },
332 |     outputSchema: { content: z.string() },
333 |     annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: true }
334 |   },
335 |   async (args: z.infer<typeof WriteFileArgsSchema>) => {
336 |     const validPath = await validatePath(args.path);
337 |     await writeFileContent(validPath, args.content);
338 |     const text = `Successfully wrote to ${args.path}`;
339 |     return {
340 |       content: [{ type: "text" as const, text }],
341 |       structuredContent: { content: text }
342 |     };
343 |   }
344 | );
345 | 
346 | server.registerTool(
347 |   "edit_file",
348 |   {
349 |     title: "Edit File",
350 |     description:
351 |       "Make line-based edits to a text file. Each edit replaces exact line sequences " +
352 |       "with new content. Returns a git-style diff showing the changes made. " +
353 |       "Only works within allowed directories.",
354 |     inputSchema: {
355 |       path: z.string(),
356 |       edits: z.array(z.object({
357 |         oldText: z.string().describe("Text to search for - must match exactly"),
358 |         newText: z.string().describe("Text to replace with")
359 |       })),
360 |       dryRun: z.boolean().default(false).describe("Preview changes using git-style diff format")
361 |     },
362 |     outputSchema: { content: z.string() },
363 |     annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: true }
364 |   },
365 |   async (args: z.infer<typeof EditFileArgsSchema>) => {
366 |     const validPath = await validatePath(args.path);
367 |     const result = await applyFileEdits(validPath, args.edits, args.dryRun);
368 |     return {
369 |       content: [{ type: "text" as const, text: result }],
370 |       structuredContent: { content: result }
371 |     };
372 |   }
373 | );
374 | 
375 | server.registerTool(
376 |   "create_directory",
377 |   {
378 |     title: "Create Directory",
379 |     description:
380 |       "Create a new directory or ensure a directory exists. Can create multiple " +
381 |       "nested directories in one operation. If the directory already exists, " +
382 |       "this operation will succeed silently. Perfect for setting up directory " +
383 |       "structures for projects or ensuring required paths exist. Only works within allowed directories.",
384 |     inputSchema: {
385 |       path: z.string()
386 |     },
387 |     outputSchema: { content: z.string() },
388 |     annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: false }
389 |   },
390 |   async (args: z.infer<typeof CreateDirectoryArgsSchema>) => {
391 |     const validPath = await validatePath(args.path);
392 |     await fs.mkdir(validPath, { recursive: true });
393 |     const text = `Successfully created directory ${args.path}`;
394 |     return {
395 |       content: [{ type: "text" as const, text }],
396 |       structuredContent: { content: text }
397 |     };
398 |   }
399 | );
400 | 
401 | server.registerTool(
402 |   "list_directory",
403 |   {
404 |     title: "List Directory",
405 |     description:
406 |       "Get a detailed listing of all files and directories in a specified path. " +
407 |       "Results clearly distinguish between files and directories with [FILE] and [DIR] " +
408 |       "prefixes. This tool is essential for understanding directory structure and " +
409 |       "finding specific files within a directory. Only works within allowed directories.",
410 |     inputSchema: {
411 |       path: z.string()
412 |     },
413 |     outputSchema: { content: z.string() },
414 |     annotations: { readOnlyHint: true }
415 |   },
416 |   async (args: z.infer<typeof ListDirectoryArgsSchema>) => {
417 |     const validPath = await validatePath(args.path);
418 |     const entries = await fs.readdir(validPath, { withFileTypes: true });
419 |     const formatted = entries
420 |       .map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`)
421 |       .join("\n");
422 |     return {
423 |       content: [{ type: "text" as const, text: formatted }],
424 |       structuredContent: { content: formatted }
425 |     };
426 |   }
427 | );
428 | 
429 | server.registerTool(
430 |   "list_directory_with_sizes",
431 |   {
432 |     title: "List Directory with Sizes",
433 |     description:
434 |       "Get a detailed listing of all files and directories in a specified path, including sizes. " +
435 |       "Results clearly distinguish between files and directories with [FILE] and [DIR] " +
436 |       "prefixes. This tool is useful for understanding directory structure and " +
437 |       "finding specific files within a directory. Only works within allowed directories.",
438 |     inputSchema: {
439 |       path: z.string(),
440 |       sortBy: z.enum(["name", "size"]).optional().default("name").describe("Sort entries by name or size")
441 |     },
442 |     outputSchema: { content: z.string() },
443 |     annotations: { readOnlyHint: true }
444 |   },
445 |   async (args: z.infer<typeof ListDirectoryWithSizesArgsSchema>) => {
446 |     const validPath = await validatePath(args.path);
447 |     const entries = await fs.readdir(validPath, { withFileTypes: true });
448 | 
449 |     // Get detailed information for each entry
450 |     const detailedEntries = await Promise.all(
451 |       entries.map(async (entry) => {
452 |         const entryPath = path.join(validPath, entry.name);
453 |         try {
454 |           const stats = await fs.stat(entryPath);
455 |           return {
456 |             name: entry.name,
457 |             isDirectory: entry.isDirectory(),
458 |             size: stats.size,
459 |             mtime: stats.mtime
460 |           };
461 |         } catch (error) {
462 |           return {
463 |             name: entry.name,
464 |             isDirectory: entry.isDirectory(),
465 |             size: 0,
466 |             mtime: new Date(0)
467 |           };
468 |         }
469 |       })
470 |     );
471 | 
472 |     // Sort entries based on sortBy parameter
473 |     const sortedEntries = [...detailedEntries].sort((a, b) => {
474 |       if (args.sortBy === 'size') {
475 |         return b.size - a.size; // Descending by size
476 |       }
477 |       // Default sort by name
478 |       return a.name.localeCompare(b.name);
479 |     });
480 | 
481 |     // Format the output
482 |     const formattedEntries = sortedEntries.map(entry =>
483 |       `${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${
484 |         entry.isDirectory ? "" : formatSize(entry.size).padStart(10)
485 |       }`
486 |     );
487 | 
488 |     // Add summary
489 |     const totalFiles = detailedEntries.filter(e => !e.isDirectory).length;
490 |     const totalDirs = detailedEntries.filter(e => e.isDirectory).length;
491 |     const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0);
492 | 
493 |     const summary = [
494 |       "",
495 |       `Total: ${totalFiles} files, ${totalDirs} directories`,
496 |       `Combined size: ${formatSize(totalSize)}`
497 |     ];
498 | 
499 |     const text = [...formattedEntries, ...summary].join("\n");
500 |     const contentBlock = { type: "text" as const, text };
501 |     return {
502 |       content: [contentBlock],
503 |       structuredContent: { content: [contentBlock] }
504 |     };
505 |   }
506 | );
507 | 
508 | server.registerTool(
509 |   "directory_tree",
510 |   {
511 |     title: "Directory Tree",
512 |     description:
513 |       "Get a recursive tree view of files and directories as a JSON structure. " +
514 |       "Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
515 |       "Files have no children array, while directories always have a children array (which may be empty). " +
516 |       "The output is formatted with 2-space indentation for readability. Only works within allowed directories.",
517 |     inputSchema: {
518 |       path: z.string(),
519 |       excludePatterns: z.array(z.string()).optional().default([])
520 |     },
521 |     outputSchema: { content: z.string() },
522 |     annotations: { readOnlyHint: true }
523 |   },
524 |   async (args: z.infer<typeof DirectoryTreeArgsSchema>) => {
525 |     interface TreeEntry {
526 |       name: string;
527 |       type: 'file' | 'directory';
528 |       children?: TreeEntry[];
529 |     }
530 |     const rootPath = args.path;
531 | 
532 |     async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise<TreeEntry[]> {
533 |       const validPath = await validatePath(currentPath);
534 |       const entries = await fs.readdir(validPath, { withFileTypes: true });
535 |       const result: TreeEntry[] = [];
536 | 
537 |       for (const entry of entries) {
538 |         const relativePath = path.relative(rootPath, path.join(currentPath, entry.name));
539 |         const shouldExclude = excludePatterns.some(pattern => {
540 |           if (pattern.includes('*')) {
541 |             return minimatch(relativePath, pattern, { dot: true });
542 |           }
543 |           // For files: match exact name or as part of path
544 |           // For directories: match as directory path
545 |           return minimatch(relativePath, pattern, { dot: true }) ||
546 |             minimatch(relativePath, `**/${pattern}`, { dot: true }) ||
547 |             minimatch(relativePath, `**/${pattern}/**`, { dot: true });
548 |         });
549 |         if (shouldExclude)
550 |           continue;
551 | 
552 |         const entryData: TreeEntry = {
553 |           name: entry.name,
554 |           type: entry.isDirectory() ? 'directory' : 'file'
555 |         };
556 | 
557 |         if (entry.isDirectory()) {
558 |           const subPath = path.join(currentPath, entry.name);
559 |           entryData.children = await buildTree(subPath, excludePatterns);
560 |         }
561 | 
562 |         result.push(entryData);
563 |       }
564 | 
565 |       return result;
566 |     }
567 | 
568 |     const treeData = await buildTree(rootPath, args.excludePatterns);
569 |     const text = JSON.stringify(treeData, null, 2);
570 |     const contentBlock = { type: "text" as const, text };
571 |     return {
572 |       content: [contentBlock],
573 |       structuredContent: { content: [contentBlock] }
574 |     };
575 |   }
576 | );
577 | 
578 | server.registerTool(
579 |   "move_file",
580 |   {
581 |     title: "Move File",
582 |     description:
583 |       "Move or rename files and directories. Can move files between directories " +
584 |       "and rename them in a single operation. If the destination exists, the " +
585 |       "operation will fail. Works across different directories and can be used " +
586 |       "for simple renaming within the same directory. Both source and destination must be within allowed directories.",
587 |     inputSchema: {
588 |       source: z.string(),
589 |       destination: z.string()
590 |     },
591 |     outputSchema: { content: z.string() },
592 |     annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: false }
593 |   },
594 |   async (args: z.infer<typeof MoveFileArgsSchema>) => {
595 |     const validSourcePath = await validatePath(args.source);
596 |     const validDestPath = await validatePath(args.destination);
597 |     await fs.rename(validSourcePath, validDestPath);
598 |     const text = `Successfully moved ${args.source} to ${args.destination}`;
599 |     const contentBlock = { type: "text" as const, text };
600 |     return {
601 |       content: [contentBlock],
602 |       structuredContent: { content: [contentBlock] }
603 |     };
604 |   }
605 | );
606 | 
607 | server.registerTool(
608 |   "search_files",
609 |   {
610 |     title: "Search Files",
611 |     description:
612 |       "Recursively search for files and directories matching a pattern. " +
613 |       "The patterns should be glob-style patterns that match paths relative to the working directory. " +
614 |       "Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " +
615 |       "Returns full paths to all matching items. Great for finding files when you don't know their exact location. " +
616 |       "Only searches within allowed directories.",
617 |     inputSchema: {
618 |       path: z.string(),
619 |       pattern: z.string(),
620 |       excludePatterns: z.array(z.string()).optional().default([])
621 |     },
622 |     outputSchema: { content: z.string() },
623 |     annotations: { readOnlyHint: true }
624 |   },
625 |   async (args: z.infer<typeof SearchFilesArgsSchema>) => {
626 |     const validPath = await validatePath(args.path);
627 |     const results = await searchFilesWithValidation(validPath, args.pattern, allowedDirectories, { excludePatterns: args.excludePatterns });
628 |     const text = results.length > 0 ? results.join("\n") : "No matches found";
629 |     return {
630 |       content: [{ type: "text" as const, text }],
631 |       structuredContent: { content: text }
632 |     };
633 |   }
634 | );
635 | 
636 | server.registerTool(
637 |   "get_file_info",
638 |   {
639 |     title: "Get File Info",
640 |     description:
641 |       "Retrieve detailed metadata about a file or directory. Returns comprehensive " +
642 |       "information including size, creation time, last modified time, permissions, " +
643 |       "and type. This tool is perfect for understanding file characteristics " +
644 |       "without reading the actual content. Only works within allowed directories.",
645 |     inputSchema: {
646 |       path: z.string()
647 |     },
648 |     outputSchema: { content: z.string() },
649 |     annotations: { readOnlyHint: true }
650 |   },
651 |   async (args: z.infer<typeof GetFileInfoArgsSchema>) => {
652 |     const validPath = await validatePath(args.path);
653 |     const info = await getFileStats(validPath);
654 |     const text = Object.entries(info)
655 |       .map(([key, value]) => `${key}: ${value}`)
656 |       .join("\n");
657 |     return {
658 |       content: [{ type: "text" as const, text }],
659 |       structuredContent: { content: text }
660 |     };
661 |   }
662 | );
663 | 
664 | server.registerTool(
665 |   "list_allowed_directories",
666 |   {
667 |     title: "List Allowed Directories",
668 |     description:
669 |       "Returns the list of directories that this server is allowed to access. " +
670 |       "Subdirectories within these allowed directories are also accessible. " +
671 |       "Use this to understand which directories and their nested paths are available " +
672 |       "before trying to access files.",
673 |     inputSchema: {},
674 |     outputSchema: { content: z.string() },
675 |     annotations: { readOnlyHint: true }
676 |   },
677 |   async () => {
678 |     const text = `Allowed directories:\n${allowedDirectories.join('\n')}`;
679 |     return {
680 |       content: [{ type: "text" as const, text }],
681 |       structuredContent: { content: text }
682 |     };
683 |   }
684 | );
685 | 
686 | // Updates allowed directories based on MCP client roots
687 | async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) {
688 |   const validatedRootDirs = await getValidRootDirectories(requestedRoots);
689 |   if (validatedRootDirs.length > 0) {
690 |     allowedDirectories = [...validatedRootDirs];
691 |     setAllowedDirectories(allowedDirectories); // Update the global state in lib.ts
692 |     console.error(`Updated allowed directories from MCP roots: ${validatedRootDirs.length} valid directories`);
693 |   } else {
694 |     console.error("No valid root directories provided by client");
695 |   }
696 | }
697 | 
698 | // Handles dynamic roots updates during runtime, when client sends "roots/list_changed" notification, server fetches the updated roots and replaces all allowed directories with the new roots.
699 | server.server.setNotificationHandler(RootsListChangedNotificationSchema, async () => {
700 |   try {
701 |     // Request the updated roots list from the client
702 |     const response = await server.server.listRoots();
703 |     if (response && 'roots' in response) {
704 |       await updateAllowedDirectoriesFromRoots(response.roots);
705 |     }
706 |   } catch (error) {
707 |     console.error("Failed to request roots from client:", error instanceof Error ? error.message : String(error));
708 |   }
709 | });
710 | 
711 | // Handles post-initialization setup, specifically checking for and fetching MCP roots.
712 | server.server.oninitialized = async () => {
713 |   const clientCapabilities = server.server.getClientCapabilities();
714 | 
715 |   if (clientCapabilities?.roots) {
716 |     try {
717 |       const response = await server.server.listRoots();
718 |       if (response && 'roots' in response) {
719 |         await updateAllowedDirectoriesFromRoots(response.roots);
720 |       } else {
721 |         console.error("Client returned no roots set, keeping current settings");
722 |       }
723 |     } catch (error) {
724 |       console.error("Failed to request initial roots from client:", error instanceof Error ? error.message : String(error));
725 |     }
726 |   } else {
727 |     if (allowedDirectories.length > 0) {
728 |       console.error("Client does not support MCP Roots, using allowed directories set from server args:", allowedDirectories);
729 |     }else{
730 |       throw new Error(`Server cannot operate: No allowed directories available. Server was started without command-line directories and client either does not support MCP roots protocol or provided empty roots. Please either: 1) Start server with directory arguments, or 2) Use a client that supports MCP roots protocol and provides valid root directories.`);
731 |     }
732 |   }
733 | };
734 | 
735 | // Start server
736 | async function runServer() {
737 |   const transport = new StdioServerTransport();
738 |   await server.connect(transport);
739 |   console.error("Secure MCP Filesystem Server running on stdio");
740 |   if (allowedDirectories.length === 0) {
741 |     console.error("Started without allowed directories - waiting for client to provide roots via MCP protocol");
742 |   }
743 | }
744 | 
745 | runServer().catch((error) => {
746 |   console.error("Fatal error running server:", error);
747 |   process.exit(1);
748 | });
749 | 
```
Page 4/5FirstPrevNextLast