#
tokens: 23047/50000 5/74 files (page 2/2)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 2 of 2. Use http://codebase.md/rawr-ai/mcp-filesystem?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .ai
│   └── rules
│       └── filesystem-mcp-server-usage.md
├── .cursor
│   └── rules
│       ├── creating-cursor-rules.mdc
│       ├── filesystem-mcp-tools-guide.mdc
│       └── graphiti
│           ├── graphiti-filesystem-schema.mdc
│           ├── graphiti-knowledge-graph-maintenance.mdc
│           └── graphiti-mcp-core-rules.mdc
├── .early.coverage
│   └── v8
│       └── coverage-final.json
├── .github
│   └── workflows
│       └── ci.yml
├── .gitignore
├── .repomixignore
├── ai
│   ├── graph
│   │   ├── entities
│   │   │   ├── .gitkeep
│   │   │   └── Tool.py
│   │   ├── mcp-config.yaml
│   │   └── rools
│   │       ├── orchestrator_SOPs.md
│   │       └── playbooks
│   │           ├── pb_development_logging.md
│   │           ├── pb_discovery_driven_execution.md
│   │           ├── pb_iterative_execution_verification.md
│   │           └── pb_registry.md
│   └── logs
│       ├── dev
│       │   └── 2025-04-06-regex-content-search.md
│       └── introduce_test_suite
│           └── workflow_diagram.md
├── bun.lock
├── bunfig.toml
├── demo
│   ├── archive
│   │   ├── log.txt
│   │   ├── readme.md
│   │   └── subdir
│   │       └── old_data.txt
│   ├── data.json
│   ├── info.txt
│   ├── nested
│   │   ├── deep
│   │   │   └── hidden.json
│   │   └── info.md
│   ├── README.md
│   └── sample.xml
├── Dockerfile
├── examples
│   ├── mcp_cursor.json
│   ├── mcp_docker.json
│   ├── mcp_glama.json
│   ├── mcp_http.json
│   ├── mcp_permissions.json
│   ├── mcp_roo.json
│   ├── mcp_sse.json
│   └── mcp_stdio.json
├── glama.json
├── index.ts
├── package.json
├── README.md
├── repomix.config.json
├── scripts
│   └── run-docker-demo.sh
├── src
│   ├── config
│   │   └── permissions.ts
│   ├── handlers
│   │   ├── directory-handlers.ts
│   │   ├── file-handlers.ts
│   │   ├── index.ts
│   │   ├── json-handlers.ts
│   │   ├── utility-handlers.ts
│   │   └── xml-handlers.ts
│   ├── schemas
│   │   ├── directory-operations.ts
│   │   ├── file-operations.ts
│   │   ├── index.ts
│   │   ├── json-operations.ts
│   │   └── utility-operations.ts
│   └── utils
│       ├── data-utils.ts
│       ├── file-utils.ts
│       ├── path-utils.ts
│       ├── schema-utils.ts
│       └── typebox-zod.ts
├── test
│   ├── json
│   │   └── users.json
│   ├── sample.xml
│   ├── suites
│   │   ├── regex_search_content
│   │   │   ├── basic_search.test.ts
│   │   │   ├── depth_limiting.test.ts
│   │   │   ├── error_handling.test.ts
│   │   │   ├── file_pattern.test.ts
│   │   │   ├── max_filesize.test.ts
│   │   │   ├── max_results.test.ts
│   │   │   ├── path_usage.test.ts
│   │   │   ├── regex_flags.test.ts
│   │   │   └── spec.md
│   │   └── xml_tools
│   │       └── xml_tools.test.ts
│   ├── transports
│   │   ├── network.test.ts
│   │   └── stdio.test.ts
│   └── utils
│       ├── pathUtils.test.ts
│       └── regexUtils.ts
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/src/handlers/file-handlers.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import fs from 'fs/promises';
  2 | import { Permissions } from '../config/permissions.js';
  3 | import { validatePath } from '../utils/path-utils.js';
  4 | import { parseArgs } from '../utils/schema-utils.js';
  5 | import { getFileStats, applyFileEdits } from '../utils/file-utils.js';
  6 | import {
  7 |   ReadFileArgsSchema,
  8 |   ReadMultipleFilesArgsSchema,
  9 |   WriteFileArgsSchema,
 10 |   EditFileArgsSchema,
 11 |   GetFileInfoArgsSchema,
 12 |   MoveFileArgsSchema,
 13 |   DeleteFileArgsSchema,
 14 |   RenameFileArgsSchema,
 15 |   type ReadFileArgs,
 16 |   type ReadMultipleFilesArgs,
 17 |   type WriteFileArgs,
 18 |   type EditFileArgs,
 19 |   type GetFileInfoArgs,
 20 |   type MoveFileArgs,
 21 |   type DeleteFileArgs,
 22 |   type RenameFileArgs
 23 | } from '../schemas/file-operations.js';
 24 | import path from 'path';
 25 | 
 26 | export async function handleReadFile(
 27 |   args: unknown,
 28 |   allowedDirectories: string[],
 29 |   symlinksMap: Map<string, string>,
 30 |   noFollowSymlinks: boolean
 31 | ) {
 32 |   const { path: filePath, maxBytes } = parseArgs(ReadFileArgsSchema, args, 'read_file');
 33 |   const validPath = await validatePath(filePath, allowedDirectories, symlinksMap, noFollowSymlinks);
 34 |   
 35 |   // Check file size before reading
 36 |   const stats = await fs.stat(validPath);
 37 |   const effectiveMaxBytes = maxBytes ?? (10 * 1024); // Default 10KB
 38 |   if (stats.size > effectiveMaxBytes) {
 39 |     throw new Error(`File size (${stats.size} bytes) exceeds the maximum allowed size (${effectiveMaxBytes} bytes).`);
 40 |   }
 41 |   
 42 |   const content = await fs.readFile(validPath, "utf-8");
 43 |   return {
 44 |     content: [{ type: "text", text: content }],
 45 |   };
 46 | }
 47 | 
 48 | export async function handleReadMultipleFiles(
 49 |   args: unknown,
 50 |   allowedDirectories: string[],
 51 |   symlinksMap: Map<string, string>,
 52 |   noFollowSymlinks: boolean
 53 | ) {
 54 |   const { paths, maxBytesPerFile } = parseArgs(ReadMultipleFilesArgsSchema, args, 'read_multiple_files');
 55 |   const effectiveMaxBytes = maxBytesPerFile ?? (10 * 1024); // Default 10KB per file
 56 |   
 57 |   const results = await Promise.all(
 58 |     paths.map(async (filePath: string) => {
 59 |       try {
 60 |         const validPath = await validatePath(filePath, allowedDirectories, symlinksMap, noFollowSymlinks);
 61 |         
 62 |         // Check file size before reading
 63 |         const stats = await fs.stat(validPath);
 64 |         if (stats.size > effectiveMaxBytes) {
 65 |           return `${filePath}: Error - File size (${stats.size} bytes) exceeds the maximum allowed size (${effectiveMaxBytes} bytes).`;
 66 |         }
 67 |         
 68 |         const content = await fs.readFile(validPath, "utf-8");
 69 |         return `${filePath}:\n${content}\n`;
 70 |       } catch (error) {
 71 |         const errorMessage = error instanceof Error ? error.message : String(error);
 72 |         return `${filePath}: Error - ${errorMessage}`;
 73 |       }
 74 |     }),
 75 |   );
 76 |   return {
 77 |     content: [{ type: "text", text: results.join("\n---\n") }],
 78 |   };
 79 | }
 80 | 
 81 | export async function handleCreateFile(
 82 |   args: unknown,
 83 |   permissions: Permissions,
 84 |   allowedDirectories: string[],
 85 |   symlinksMap: Map<string, string>,
 86 |   noFollowSymlinks: boolean
 87 | ) {
 88 |   const data = parseArgs(WriteFileArgsSchema, args, 'create_file');
 89 |   
 90 |   const validPath = await validatePath(
 91 |     data.path,
 92 |     allowedDirectories,
 93 |     symlinksMap,
 94 |     noFollowSymlinks,
 95 |     { checkParentExists: false } // Add this option
 96 |   );
 97 |   
 98 |   // Check if file already exists before writing
 99 |   try {
100 |     await fs.access(validPath);
101 |     // If access succeeds, file exists
102 |     throw new Error(`File already exists: ${data.path}`);
103 |   } catch (error) {
104 |      const msg = error instanceof Error ? error.message : String(error);
105 |      if (!msg.includes('ENOENT')) { // Rethrow if it's not a "file not found" error
106 |        throw error;
107 |      }
108 |      // If ENOENT, proceed with creation
109 |      // Ensure create permission
110 |      if (!permissions.create && !permissions.fullAccess) {
111 |         throw new Error('Cannot create new file: create permission not granted (requires --allow-create)');
112 |      }
113 |      // Ensure parent directory exists
114 |      const parentDir = path.dirname(validPath);
115 |      await fs.mkdir(parentDir, { recursive: true });
116 | 
117 |      await fs.writeFile(validPath, data.content, "utf-8");
118 |      return {
119 |        content: [{ type: "text", text: `Successfully created ${data.path}` }],
120 |      };
121 |   }
122 | }
123 | 
124 | export async function handleModifyFile(
125 |   args: unknown,
126 |   permissions: Permissions,
127 |   allowedDirectories: string[],
128 |   symlinksMap: Map<string, string>,
129 |   noFollowSymlinks: boolean
130 | ) {
131 |   const data = parseArgs(WriteFileArgsSchema, args, 'modify_file');
132 | 
133 |   const validPath = await validatePath(data.path, allowedDirectories, symlinksMap, noFollowSymlinks);
134 |   
135 |   // Check if file exists
136 |   try {
137 |     await fs.access(validPath);
138 |     
139 |     if (!permissions.edit && !permissions.fullAccess) {
140 |       throw new Error('Cannot modify file: edit permission not granted (requires --allow-edit)');
141 |     }
142 |     
143 |     await fs.writeFile(validPath, data.content, "utf-8");
144 |     return {
145 |       content: [{ type: "text", text: `Successfully modified ${data.path}` }],
146 |     };
147 |   } catch (error) {
148 |     throw new Error('Cannot modify file: file does not exist');
149 |   }
150 | }
151 | 
152 | export async function handleEditFile(
153 |   args: unknown,
154 |   permissions: Permissions,
155 |   allowedDirectories: string[],
156 |   symlinksMap: Map<string, string>,
157 |   noFollowSymlinks: boolean
158 | ) {
159 |   const parsed = parseArgs(EditFileArgsSchema, args, 'edit_file');
160 |   
161 |   // Enforce permission checks
162 |   if (!permissions.edit && !permissions.fullAccess) {
163 |     throw new Error('Cannot edit file: edit permission not granted (requires --allow-edit)');
164 |   }
165 |   
166 |   const { path: filePath, edits, dryRun, maxBytes } = parsed;
167 |   const validPath = await validatePath(filePath, allowedDirectories, symlinksMap, noFollowSymlinks);
168 |   
169 |   // Check file size before attempting to read/edit
170 |   const stats = await fs.stat(validPath);
171 |   const effectiveMaxBytes = maxBytes ?? (10 * 1024); // Default 10KB
172 |   if (stats.size > effectiveMaxBytes) {
173 |     throw new Error(`File size (${stats.size} bytes) exceeds the maximum allowed size (${effectiveMaxBytes} bytes) for editing.`);
174 |   }
175 |   
176 |   const result = await applyFileEdits(validPath, edits, dryRun);
177 |   return {
178 |     content: [{ type: "text", text: result }],
179 |   };
180 | }
181 | 
182 | export async function handleGetFileInfo(
183 |   args: unknown,
184 |   allowedDirectories: string[],
185 |   symlinksMap: Map<string, string>,
186 |   noFollowSymlinks: boolean
187 | ) {
188 |   const parsed = parseArgs(GetFileInfoArgsSchema, args, 'get_file_info');
189 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
190 |   const info = await getFileStats(validPath);
191 |   return {
192 |     content: [{ type: "text", text: Object.entries(info)
193 |       .map(([key, value]) => `${key}: ${value}`)
194 |       .join("\n") }],
195 |   };
196 | }
197 | 
198 | export async function handleMoveFile(
199 |   args: unknown,
200 |   permissions: Permissions,
201 |   allowedDirectories: string[],
202 |   symlinksMap: Map<string, string>,
203 |   noFollowSymlinks: boolean
204 | ) {
205 |   const parsed = parseArgs(MoveFileArgsSchema, args, 'move_file');
206 |   
207 |   // Enforce permission checks
208 |   if (!permissions.move && !permissions.fullAccess) {
209 |     throw new Error('Cannot move file: move permission not granted (requires --allow-move)');
210 |   }
211 |   
212 |   const validSourcePath = await validatePath(parsed.source, allowedDirectories, symlinksMap, noFollowSymlinks); // No option here, source must exist
213 | 
214 |   const validDestPath = await validatePath(
215 |     parsed.destination,
216 |     allowedDirectories,
217 |     symlinksMap,
218 |     noFollowSymlinks,
219 |     { checkParentExists: false } // Add option here for destination
220 |   );
221 |   // Ensure destination parent exists before moving (fs.rename requires parent)
222 |   const destParentDir = path.dirname(validDestPath);
223 |   try {
224 |       await fs.access(destParentDir);
225 |   } catch {
226 |       throw new Error(`Destination parent directory does not exist: ${path.dirname(parsed.destination)}`);
227 |   }
228 | 
229 |   await fs.rename(validSourcePath, validDestPath);
230 |   return {
231 |     content: [{ type: "text", text: `Successfully moved ${parsed.source} to ${parsed.destination}` }],
232 |   };
233 | }
234 | 
235 | export async function handleDeleteFile(
236 |   args: unknown,
237 |   permissions: Permissions,
238 |   allowedDirectories: string[],
239 |   symlinksMap: Map<string, string>,
240 |   noFollowSymlinks: boolean
241 | ) {
242 |   const parsed = parseArgs(DeleteFileArgsSchema, args, 'delete_file');
243 |   
244 |   // Enforce permission checks
245 |   if (!permissions.delete && !permissions.fullAccess) {
246 |     throw new Error('Cannot delete file: delete permission not granted (requires --allow-delete)');
247 |   }
248 |   
249 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
250 |   
251 |   try {
252 |     // Check if file exists
253 |     await fs.access(validPath);
254 |     await fs.unlink(validPath);
255 |     return {
256 |       content: [{ type: "text", text: `Successfully deleted ${parsed.path}` }],
257 |     };
258 |   } catch (error) {
259 |     throw new Error(`Failed to delete file: ${error instanceof Error ? error.message : String(error)}`);
260 |   }
261 | }
262 | 
263 | export async function handleRenameFile(
264 |   args: unknown,
265 |   permissions: Permissions,
266 |   allowedDirectories: string[],
267 |   symlinksMap: Map<string, string>,
268 |   noFollowSymlinks: boolean
269 | ) {
270 |   const parsed = parseArgs(RenameFileArgsSchema, args, 'rename_file');
271 |   
272 |   // Enforce permission checks - rename requires the rename permission
273 |   if (!permissions.rename && !permissions.fullAccess) {
274 |     throw new Error('Cannot rename file: rename permission not granted (requires --allow-rename)');
275 |   }
276 |   
277 |   const validSourcePath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
278 |   
279 |   // Get the directory from the source path
280 |   const directory = path.dirname(validSourcePath);
281 |   
282 |   // Create the destination path using the same directory and the new name
283 |   const destinationPath = path.join(directory, parsed.newName);
284 |   
285 |   // Validate the destination path
286 |   const validDestPath = await validatePath(destinationPath, allowedDirectories, symlinksMap, noFollowSymlinks);
287 |   
288 |   // Check if destination already exists
289 |   try {
290 |     await fs.access(validDestPath);
291 |     throw new Error(`Cannot rename file: a file with name "${parsed.newName}" already exists in the directory`);
292 |   } catch (error) {
293 |     // We want this error - it means the destination doesn't exist yet
294 |     if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
295 |       throw error;
296 |     }
297 |   }
298 |   
299 |   // Perform the rename operation
300 |   await fs.rename(validSourcePath, validDestPath);
301 | 
302 |   return {
303 |     content: [{ type: "text", text: `Successfully renamed ${parsed.path} to ${parsed.newName}` }],
304 |   };
305 | }
306 | 
```

--------------------------------------------------------------------------------
/src/handlers/xml-handlers.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import fs from 'fs/promises';
  2 | import * as xpath from 'xpath';
  3 | import { DOMParser as XmldomDOMParser } from '@xmldom/xmldom';
  4 | import { validatePath } from '../utils/path-utils.js';
  5 | import { parseArgs } from '../utils/schema-utils.js';
  6 | import {
  7 |   XmlQueryArgsSchema,
  8 |   XmlStructureArgsSchema,
  9 |   type XmlQueryArgs,
 10 |   type XmlStructureArgs
 11 | } from '../schemas/utility-operations.js';
 12 | 
 13 | // Define interfaces for type safety
 14 | interface XmlNode {
 15 |   type: 'element' | 'text' | 'attribute' | 'unknown';
 16 |   name?: string;
 17 |   value?: string;
 18 |   attributes?: Array<{ name: string; value: string }>;
 19 |   children?: XmlNode[];
 20 |   nodeType?: number;
 21 | }
 22 | 
 23 | interface HierarchyNode {
 24 |   name: string;
 25 |   hasChildren?: boolean;
 26 |   children?: HierarchyNode[];
 27 | }
 28 | 
 29 | interface XmlStructureInfo {
 30 |   rootElement: string | undefined;
 31 |   elements: Record<string, number>;
 32 |   attributes?: Record<string, number>;
 33 |   namespaces: Record<string, string>;
 34 |   hierarchy?: HierarchyNode;
 35 | }
 36 | 
 37 | /**
 38 |  * Handler for executing XPath queries on XML files
 39 |  */
 40 | export async function handleXmlQuery(
 41 |   args: unknown,
 42 |   allowedDirectories: string[],
 43 |   symlinksMap: Map<string, string>,
 44 |   noFollowSymlinks: boolean
 45 | ): Promise<{ content: Array<{ type: string; text: string }> }> {
 46 |   const parsed = parseArgs(XmlQueryArgsSchema, args, 'xml_query');
 47 | 
 48 |   const validPath = await validatePath(
 49 |     parsed.path,
 50 |     allowedDirectories,
 51 |     symlinksMap,
 52 |     noFollowSymlinks
 53 |   );
 54 | 
 55 |   try {
 56 |     const xmlContent = await fs.readFile(validPath, 'utf8');
 57 | 
 58 |     try {
 59 |       const responseLimit =
 60 |         (parsed as any).maxResponseBytes ?? parsed.maxBytes ?? 200 * 1024; // 200KB default
 61 |       const result = processXmlContent(
 62 |         xmlContent,
 63 |         parsed.query,
 64 |         parsed.structureOnly,
 65 |         parsed.includeAttributes,
 66 |         responseLimit
 67 |       );
 68 |       return result;
 69 |     } catch (err) {
 70 |       const errorMessage = err instanceof Error ? err.message : String(err);
 71 |       throw new Error(`Failed to process XML: ${errorMessage}`);
 72 |     }
 73 |   } catch (err) {
 74 |     const errorMessage = err instanceof Error ? err.message : String(err);
 75 |     throw new Error(`Failed to query XML file: ${errorMessage}`);
 76 |   }
 77 | }
 78 | 
 79 | /**
 80 |  * Handler for extracting XML structure information
 81 |  */
 82 | export async function handleXmlStructure(
 83 |   args: unknown,
 84 |   allowedDirectories: string[],
 85 |   symlinksMap: Map<string, string>,
 86 |   noFollowSymlinks: boolean
 87 | ): Promise<{ content: Array<{ type: string; text: string }> }> {
 88 |   const parsed = parseArgs(XmlStructureArgsSchema, args, 'xml_structure');
 89 | 
 90 |   const validPath = await validatePath(
 91 |     parsed.path,
 92 |     allowedDirectories,
 93 |     symlinksMap,
 94 |     noFollowSymlinks
 95 |   );
 96 | 
 97 |   try {
 98 |     const xmlContent = await fs.readFile(validPath, 'utf8');
 99 | 
100 |     try {
101 |       const parser = new XmldomDOMParser();
102 |       const doc: any = parser.parseFromString(xmlContent, 'text/xml');
103 |       const structure = extractXmlStructure(
104 |         doc,
105 |         parsed.maxDepth,
106 |         parsed.includeAttributes
107 |       );
108 | 
109 |       const responseLimit = (parsed as any).maxResponseBytes ?? parsed.maxBytes ?? 200 * 1024; // 200KB default
110 |       let json = JSON.stringify(structure, null, 2);
111 | 
112 |       if (typeof responseLimit === 'number' && responseLimit > 0) {
113 |         const size = Buffer.byteLength(json, 'utf8');
114 |         if (size > responseLimit) {
115 |           // Fallback to a summarized structure to respect response limit
116 |           const summary = {
117 |             rootElement: structure.rootElement,
118 |             namespaces: structure.namespaces,
119 |             elementTypeCount: Object.keys(structure.elements).length,
120 |             attributeKeyCount: structure.attributes ? Object.keys(structure.attributes).length : 0,
121 |             hierarchy: structure.hierarchy ? { name: structure.hierarchy.name, hasChildren: structure.hierarchy.hasChildren, childrenCount: structure.hierarchy.children?.length ?? 0 } : undefined,
122 |             _meta: {
123 |               truncated: true,
124 |               note: `Full structure omitted to fit response limit of ${responseLimit} bytes`
125 |             }
126 |           };
127 |           json = JSON.stringify(summary, null, 2);
128 |         }
129 |       }
130 | 
131 |       return {
132 |         content: [{
133 |           type: 'text',
134 |           text: json
135 |         }]
136 |       };
137 |     } catch (err) {
138 |       const errorMessage = err instanceof Error ? err.message : String(err);
139 |       throw new Error(`Failed to extract XML structure: ${errorMessage}`);
140 |     }
141 |   } catch (err) {
142 |     const errorMessage = err instanceof Error ? err.message : String(err);
143 |     throw new Error(`Failed to analyze XML structure: ${errorMessage}`);
144 |   }
145 | }
146 | 
147 | /**
148 |  * Process XML content with XPath or structure analysis
149 |  */
150 | function processXmlContent(
151 |   xmlContent: string,
152 |   query?: string,
153 |   structureOnly = false,
154 |   includeAttributes = true,
155 |   maxResponseBytes?: number
156 | ): { content: Array<{ type: string; text: string }> } {
157 |   const parser = new XmldomDOMParser();
158 |   const doc: any = parser.parseFromString(xmlContent, 'text/xml');
159 | 
160 |   if (structureOnly) {
161 |     // Extract only structure information
162 |     const tags = new Set<string>();
163 |     const structureQuery = "//*";
164 |     const nodes = xpath.select(structureQuery, doc as any);
165 |     
166 |     if (!Array.isArray(nodes)) {
167 |       throw new Error('Unexpected XPath result type');
168 |     }
169 | 
170 |     nodes.forEach((node: Node) => {
171 |       if (node.nodeName) {
172 |         tags.add(node.nodeName);
173 |       }
174 |     });
175 | 
176 |     const base = {
177 |       tags: Array.from(tags),
178 |       count: nodes.length,
179 |       rootElement: doc.documentElement?.nodeName
180 |     };
181 | 
182 |     let json = JSON.stringify(base, null, 2);
183 |     if (typeof maxResponseBytes === 'number' && maxResponseBytes > 0) {
184 |       if (Buffer.byteLength(json, 'utf8') > maxResponseBytes) {
185 |         // Trim tags list progressively until it fits
186 |         const all = base.tags;
187 |         let lo = 0;
188 |         let hi = all.length;
189 |         let best = 0;
190 |         while (lo <= hi) {
191 |           const mid = Math.floor((lo + hi) / 2);
192 |           const candidate = { ...base, tags: all.slice(0, mid) };
193 |           const s = JSON.stringify(candidate, null, 2);
194 |           if (Buffer.byteLength(s, 'utf8') <= maxResponseBytes) {
195 |             best = mid;
196 |             lo = mid + 1;
197 |           } else {
198 |             hi = mid - 1;
199 |           }
200 |         }
201 |         const truncated = {
202 |           ...base,
203 |           tags: all.slice(0, best),
204 |           _meta: {
205 |             truncated: true,
206 |             omittedTagCount: all.length - best
207 |           }
208 |         } as const;
209 |         json = JSON.stringify(truncated, null, 2);
210 |       }
211 |     }
212 | 
213 |     return {
214 |       content: [{ type: 'text', text: json }]
215 |     };
216 |   } else if (query) {
217 |     // Execute specific XPath query
218 |     const nodes = xpath.select(query, doc as any);
219 | 
220 |     const asArray: any[] = Array.isArray(nodes) ? nodes as any[] : [nodes as any];
221 |     const results: XmlNode[] = [];
222 |     let omittedCount = 0;
223 |     let currentJson = JSON.stringify(results, null, 2);
224 |     const limit = typeof maxResponseBytes === 'number' && maxResponseBytes > 0 ? maxResponseBytes : undefined;
225 | 
226 |     for (let i = 0; i < asArray.length; i++) {
227 |       const formatted = formatNode(asArray[i] as any, includeAttributes);
228 |       const tentative = [...results, formatted];
229 |       const serialized = JSON.stringify(tentative, null, 2);
230 |       if (limit && Buffer.byteLength(serialized, 'utf8') > limit) {
231 |         omittedCount = asArray.length - i;
232 |         break;
233 |       }
234 |       results.push(formatted);
235 |       currentJson = serialized;
236 |     }
237 | 
238 |     if (omittedCount > 0) {
239 |       const meta = { type: 'meta', value: `truncated: omitted ${omittedCount} result(s)` } as const;
240 |       const tentative = [...results, meta as any];
241 |       const serialized = JSON.stringify(tentative, null, 2);
242 |       if (!limit || Buffer.byteLength(serialized, 'utf8') <= limit) {
243 |         currentJson = serialized;
244 |       }
245 |     }
246 | 
247 |     return {
248 |       content: [{ type: 'text', text: currentJson }]
249 |     };
250 |   } else {
251 |     throw new Error('Either structureOnly or query must be specified');
252 |   }
253 | }
254 | 
255 | /**
256 |  * Format a DOM node for output
257 |  */
258 | function formatNode(node: Node | string | number | boolean | null | undefined, includeAttributes = true): XmlNode {
259 |   if (typeof node === 'string' || typeof node === 'number' || typeof node === 'boolean') {
260 |     return { type: 'text', value: String(node) };
261 |   }
262 | 
263 |   if (!node || typeof node !== 'object' || !('nodeType' in node)) {
264 |     return { type: 'unknown', value: String(node) };
265 |   }
266 | 
267 |   // Text node
268 |   if (node.nodeType === 3) {
269 |     return {
270 |       type: 'text',
271 |       value: node.nodeValue?.trim()
272 |     };
273 |   }
274 | 
275 |   // Element node
276 |   if (node.nodeType === 1) {
277 |     const element = node as Element;
278 |     const result: XmlNode = {
279 |       type: 'element',
280 |       name: element.nodeName,
281 |       value: element.textContent?.trim()
282 |     };
283 | 
284 |     if (includeAttributes && element.attributes && element.attributes.length > 0) {
285 |       result.attributes = Array.from(element.attributes).map((attr) => ({
286 |         name: attr.nodeName,
287 |         value: attr.nodeValue ?? ''
288 |       }));
289 |     }
290 | 
291 |     return result;
292 |   }
293 | 
294 |   // Attribute node
295 |   if (node.nodeType === 2) {
296 |     return {
297 |       type: 'attribute',
298 |       name: (node as Attr).nodeName,
299 |       value: (node as Attr).nodeValue ?? ''
300 |     };
301 |   }
302 | 
303 |   return {
304 |     type: 'unknown',
305 |     nodeType: node.nodeType,
306 |     value: node.toString()
307 |   };
308 | }
309 | 
310 | /**
311 |  * Extract structured information about XML document
312 |  */
313 | function extractXmlStructure(doc: any, maxDepth = 2, includeAttributes = true): XmlStructureInfo {
314 |   const structure: XmlStructureInfo = {
315 |     rootElement: doc.documentElement?.nodeName,
316 |     elements: {},
317 |     attributes: includeAttributes ? {} : undefined,
318 |     namespaces: extractNamespaces(doc),
319 |   };
320 | 
321 |   // Get all element names and counts
322 |   const elementQuery = "//*";
323 |   const elements = xpath.select(elementQuery, doc) as any[];
324 | 
325 |   elements.forEach((element) => {
326 |     const el = element as any;
327 |     const name = el.nodeName;
328 |     structure.elements[name] = (structure.elements[name] || 0) + 1;
329 | 
330 |     if (includeAttributes && el.attributes && el.attributes.length > 0) {
331 |       for (let i = 0; i < el.attributes.length; i++) {
332 |         const attr = el.attributes[i];
333 |         const attrKey = `${name}@${attr.nodeName}`;
334 |         if (structure.attributes) {
335 |           structure.attributes[attrKey] = (structure.attributes[attrKey] || 0) + 1;
336 |         }
337 |       }
338 |     }
339 |   });
340 | 
341 |   // Get child relationship structure up to maxDepth
342 |   if (maxDepth > 0 && doc.documentElement) {
343 |     structure.hierarchy = buildHierarchy(doc.documentElement, maxDepth);
344 |   }
345 | 
346 |   return structure;
347 | }
348 | 
349 | /**
350 |  * Extract namespaces used in the document
351 |  */
352 | function extractNamespaces(doc: any) {
353 |   const namespaces: Record<string, string> = {};
354 |   const nsQuery = "//*[namespace-uri()]";
355 | 
356 |   try {
357 |     const nsNodes = xpath.select(nsQuery, doc) as any[];
358 |     nsNodes.forEach((node) => {
359 |       const el = node as any;
360 |       if (el.namespaceURI) {
361 |         const prefix = el.prefix || '';
362 |         namespaces[prefix] = el.namespaceURI;
363 |       }
364 |     });
365 |   } catch (err) {
366 |     // Some documents might not support namespace queries
367 |     console.error('Error extracting namespaces:', err instanceof Error ? err.message : String(err));
368 |   }
369 | 
370 |   return namespaces;
371 | }
372 | 
373 | /**
374 |  * Build element hierarchy up to maxDepth
375 |  */
376 | function buildHierarchy(element: any, maxDepth: number, currentDepth = 0): HierarchyNode {
377 |   if (currentDepth >= maxDepth) {
378 |     return { name: element.nodeName, hasChildren: element.childNodes.length > 0 };
379 |   }
380 | 
381 |   const result: HierarchyNode = {
382 |     name: element.nodeName,
383 |     children: []
384 |   };
385 | 
386 |   // Only process element nodes (type 1)
387 |   const childElements = Array.from(element.childNodes as any[])
388 |     .filter((node: any) => node && node.nodeType === 1) as any[];
389 | 
390 |   if (childElements.length > 0) {
391 |     const processedChildren = new Set<string>();
392 | 
393 |     childElements.forEach((child: any) => {
394 |       // Only add unique child element types
395 |       if (!processedChildren.has(child.nodeName)) {
396 |         processedChildren.add(child.nodeName);
397 |         result.children!.push(
398 |           buildHierarchy(child, maxDepth, currentDepth + 1)
399 |         );
400 |       }
401 |     });
402 |   }
403 | 
404 |   return result;
405 | }
406 | 
```

--------------------------------------------------------------------------------
/src/utils/file-utils.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import fsPromises from 'fs/promises';
  2 | import { createReadStream, Stats } from 'fs';
  3 | import * as readline from 'readline';
  4 | import type { ReadonlyDeep } from 'type-fest';
  5 | import { createTwoFilesPatch } from 'diff';
  6 | import { minimatch } from 'minimatch';
  7 | import path from 'path';
  8 | 
  9 | export interface FileInfo {
 10 |   size: number;
 11 |   created: Date;
 12 |   modified: Date;
 13 |   accessed: Date;
 14 |   isDirectory: boolean;
 15 |   isFile: boolean;
 16 |   permissions: string;
 17 | }
 18 | 
 19 | export type ImmutableFileInfo = ReadonlyDeep<FileInfo>;
 20 | 
 21 | export async function getFileStats(filePath: string): Promise<ImmutableFileInfo> {
 22 |   const stats = await fsPromises.stat(filePath);
 23 |   return {
 24 |     size: stats.size,
 25 |     created: stats.birthtime,
 26 |     modified: stats.mtime,
 27 |     accessed: stats.atime,
 28 |     isDirectory: stats.isDirectory(),
 29 |     isFile: stats.isFile(),
 30 |     permissions: stats.mode.toString(8).slice(-3),
 31 |   };
 32 | }
 33 | 
 34 | export async function searchFiles(
 35 |   rootPath: string,
 36 |   pattern: string,
 37 |   excludePatterns: string[] = [],
 38 |   maxDepth: number = 2, // Default depth
 39 |   maxResults: number = 10 // Default results
 40 | ): Promise<ReadonlyArray<string>> {
 41 |   const results: string[] = [];
 42 | 
 43 |   async function search(currentPath: string, currentDepth: number) {
 44 |     // Stop if max depth is reached
 45 |     if (currentDepth >= maxDepth) {
 46 |       return;
 47 |     }
 48 |     
 49 |     // Stop if max results are reached
 50 |     if (results.length >= maxResults) {
 51 |       return;
 52 |     }
 53 |     const entries = await fsPromises.readdir(currentPath, { withFileTypes: true });
 54 | 
 55 |     for (const entry of entries) {
 56 |       const fullPath = path.join(currentPath, entry.name);
 57 | 
 58 |       // Check if path matches any exclude pattern
 59 |       const relativePath = path.relative(rootPath, fullPath);
 60 |       const shouldExclude = excludePatterns.some(pattern => {
 61 |         const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
 62 |         return minimatch(relativePath, globPattern, { dot: true });
 63 |       });
 64 | 
 65 |       if (shouldExclude) {
 66 |         continue;
 67 |       }
 68 | 
 69 |       if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
 70 |         if (results.length < maxResults) {
 71 |           results.push(fullPath);
 72 |         }
 73 |         // Check again if max results reached after adding
 74 |         if (results.length >= maxResults) {
 75 |           return; // Stop searching this branch
 76 |         }
 77 |       }
 78 | 
 79 |       if (entry.isDirectory()) {
 80 |         // Check results length before recursing
 81 |         if (results.length < maxResults) {
 82 |           await search(fullPath, currentDepth + 1);
 83 |         }
 84 |       }
 85 |     }
 86 |   }
 87 | 
 88 |   await search(rootPath, 0); // Start search at depth 0
 89 |   return results;
 90 | }
 91 | 
 92 | export async function findFilesByExtension(
 93 |   rootPath: string,
 94 |   extension: string,
 95 |   excludePatterns: string[] = [],
 96 |   maxDepth: number = 2, // Default depth
 97 |   maxResults: number = 10 // Default results
 98 | ): Promise<ReadonlyArray<string>> {
 99 |   const results: string[] = [];
100 |   
101 |   // Normalize the extension (remove leading dot if present)
102 |   let normalizedExtension = extension.toLowerCase();
103 |   if (normalizedExtension.startsWith('.')) {
104 |     normalizedExtension = normalizedExtension.substring(1);
105 |   }
106 |   
107 |   async function searchDirectory(currentPath: string, currentDepth: number) {
108 |     // Stop if max depth is reached
109 |     if (currentDepth >= maxDepth) {
110 |       return;
111 |     }
112 |     
113 |     // Stop if max results are reached
114 |     if (results.length >= maxResults) {
115 |       return;
116 |     }
117 |     const entries = await fsPromises.readdir(currentPath, { withFileTypes: true });
118 | 
119 |     for (const entry of entries) {
120 |       const fullPath = path.join(currentPath, entry.name);
121 | 
122 |       // Check if path matches any exclude pattern
123 |       const relativePath = path.relative(rootPath, fullPath);
124 |       const shouldExclude = excludePatterns.some(pattern => {
125 |         const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
126 |         return minimatch(relativePath, globPattern, { dot: true });
127 |       });
128 | 
129 |       if (shouldExclude) {
130 |         continue;
131 |       }
132 | 
133 |       if (entry.isFile()) {
134 |         // Check if file has the requested extension
135 |         const fileExtension = path.extname(entry.name).toLowerCase().substring(1);
136 |         if (fileExtension === normalizedExtension) {
137 |           if (results.length < maxResults) {
138 |             results.push(fullPath);
139 |           }
140 |           // Check again if max results reached after adding
141 |           if (results.length >= maxResults) {
142 |             return; // Stop searching this branch
143 |           }
144 |         }
145 |       } else if (entry.isDirectory()) {
146 |         // Recursively search subdirectories
147 |         // Check results length before recursing
148 |         if (results.length < maxResults) {
149 |           await searchDirectory(fullPath, currentDepth + 1);
150 |         }
151 |       }
152 |     }
153 |   }
154 | 
155 |   await searchDirectory(rootPath, 0); // Start search at depth 0
156 |   return results;
157 | }
158 | 
159 | export function normalizeLineEndings(text: string): string {
160 |   return text.replace(/\r\n/g, '\n');
161 | }
162 | 
163 | export function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
164 |   // Ensure consistent line endings for diff
165 |   const normalizedOriginal = normalizeLineEndings(originalContent);
166 |   const normalizedNew = normalizeLineEndings(newContent);
167 | 
168 |   return createTwoFilesPatch(
169 |     filepath,
170 |     filepath,
171 |     normalizedOriginal,
172 |     normalizedNew,
173 |     'original',
174 |     'modified'
175 |   );
176 | }
177 | 
178 | export async function applyFileEdits(
179 |   filePath: string,
180 |   edits: ReadonlyArray<ReadonlyDeep<{ oldText: string; newText: string }>>,
181 |   dryRun = false
182 | ): Promise<string> {
183 |   // Read file content and normalize line endings
184 |   const content = normalizeLineEndings(await fsPromises.readFile(filePath, 'utf-8'));
185 | 
186 |   // Apply edits sequentially
187 |   let modifiedContent = content;
188 |   for (const edit of edits) {
189 |     const normalizedOld = normalizeLineEndings(edit.oldText);
190 |     const normalizedNew = normalizeLineEndings(edit.newText);
191 | 
192 |     // If exact match exists, use it
193 |     if (modifiedContent.includes(normalizedOld)) {
194 |       modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
195 |       continue;
196 |     }
197 | 
198 |     // Otherwise, try line-by-line matching with flexibility for whitespace
199 |     const oldLines = normalizedOld.split('\n');
200 |     const contentLines = modifiedContent.split('\n');
201 |     let matchFound = false;
202 | 
203 |     for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
204 |       const potentialMatch = contentLines.slice(i, i + oldLines.length);
205 | 
206 |       // Compare lines with normalized whitespace
207 |       const isMatch = oldLines.every((oldLine, j) => {
208 |         const contentLine = potentialMatch[j];
209 |         return oldLine.trim() === contentLine.trim();
210 |       });
211 | 
212 |       if (isMatch) {
213 |         // Preserve original indentation of first line
214 |         const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
215 |         const newLines = normalizedNew.split('\n').map((line, j) => {
216 |           if (j === 0) return originalIndent + line.trimStart();
217 |           // For subsequent lines, try to preserve relative indentation
218 |           const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
219 |           const newIndent = line.match(/^\s*/)?.[0] || '';
220 |           if (oldIndent && newIndent) {
221 |             const relativeIndent = newIndent.length - oldIndent.length;
222 |             return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
223 |           }
224 |           return line;
225 |         });
226 | 
227 |         contentLines.splice(i, oldLines.length, ...newLines);
228 |         modifiedContent = contentLines.join('\n');
229 |         matchFound = true;
230 |         break;
231 |       }
232 |     }
233 | 
234 |     if (!matchFound) {
235 |       throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
236 |     }
237 |   }
238 | 
239 |   // Create unified diff
240 |   const diff = createUnifiedDiff(content, modifiedContent, filePath);
241 | 
242 |   // Format diff with appropriate number of backticks
243 |   let numBackticks = 3;
244 |   while (diff.includes('`'.repeat(numBackticks))) {
245 |     numBackticks++;
246 |   }
247 |   const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
248 | 
249 |   if (!dryRun) {
250 |     await fsPromises.writeFile(filePath, modifiedContent, 'utf-8');
251 |   }
252 | 
253 |   return formattedDiff;
254 | }
255 | 
256 | export type RegexSearchResult = ReadonlyDeep<{
257 |   path: string;
258 |   matches: Array<{
259 |     lineNumber: number;
260 |     lineContent: string;
261 |   }>;
262 | }>;
263 | 
264 | export async function regexSearchContent(
265 |   rootPath: string,
266 |   regexPattern: string,
267 |   filePattern: string = '*',
268 |   maxDepth: number = 2,
269 |   maxFileSize: number = 10 * 1024 * 1024, // 10MB default
270 |   maxResults: number = 50
271 | ): Promise<ReadonlyArray<RegexSearchResult>> {
272 |   const results: RegexSearchResult[] = [];
273 |   let regex: RegExp;
274 | 
275 |   try {
276 |     regex = new RegExp(regexPattern, 'g'); // Global flag to find all matches
277 |   } catch (error: any) {
278 |     throw new Error(`Invalid regex pattern provided: ${error.message}`);
279 |   }
280 | 
281 |   async function search(currentPath: string, currentDepth: number) {
282 |     if (currentDepth >= maxDepth || results.length >= maxResults) {
283 |       return;
284 |     }
285 | 
286 |     let entries;
287 |     try {
288 |       entries = await fsPromises.readdir(currentPath, { withFileTypes: true });
289 |     } catch (error: any) {
290 |       console.warn(`Skipping directory ${currentPath}: ${error.message}`);
291 |       return; // Skip directories we can't read
292 |     }
293 | 
294 |     for (const entry of entries) {
295 |       if (results.length >= maxResults) return; // Check results limit again
296 | 
297 |       const fullPath = path.join(currentPath, entry.name);
298 |       const relativePath = path.relative(rootPath, fullPath);
299 | 
300 |       if (entry.isDirectory()) {
301 |         await search(fullPath, currentDepth + 1);
302 |       } else if (entry.isFile()) {
303 |         // Check if file matches the filePattern glob
304 |         // Match file pattern against the relative path (removed matchBase: true)
305 |         if (!minimatch(relativePath, filePattern, { dot: true })) {
306 |           continue;
307 |         }
308 | 
309 |         try {
310 |           const stats = await fsPromises.stat(fullPath);
311 |           if (stats.size > maxFileSize) {
312 |             console.warn(`Skipping large file ${fullPath}: size ${stats.size} > max ${maxFileSize}`);
313 |             continue;
314 |           }
315 | 
316 |           // Use streaming approach for large files
317 |           const fileStream = createReadStream(fullPath, { encoding: 'utf-8' });
318 |           const rl = readline.createInterface({
319 |             input: fileStream,
320 |             crlfDelay: Infinity, // Handle different line endings
321 |           });
322 | 
323 |           const fileMatches: { lineNumber: number; lineContent: string }[] = [];
324 |           let currentLineNumber = 0;
325 | 
326 |           // Wrap readline processing in a promise
327 |           await new Promise<void>((resolve, reject) => {
328 |             rl.on('line', (line) => {
329 |               currentLineNumber++;
330 |               // Reset regex lastIndex before each test if using global flag
331 |               regex.lastIndex = 0;
332 |               if (regex.test(line)) {
333 |                 fileMatches.push({ lineNumber: currentLineNumber, lineContent: line });
334 |               }
335 |             });
336 | 
337 |             rl.on('close', () => {
338 |               resolve();
339 |             });
340 | 
341 |             rl.on('error', (err) => {
342 |               // Don't reject, just warn and resolve to continue processing other files
343 |               console.warn(`Error reading file ${fullPath}: ${err.message}`);
344 |               resolve();
345 |             });
346 | 
347 |             fileStream.on('error', (err) => {
348 |               // Handle stream errors (e.g., file not found during read)
349 |                console.warn(`Error reading file stream ${fullPath}: ${err.message}`);
350 |                resolve(); // Resolve to allow processing to continue
351 |             });
352 |           });
353 | 
354 |           if (fileMatches.length > 0) {
355 |             if (results.length < maxResults) {
356 |               results.push({ path: fullPath, matches: fileMatches });
357 |             }
358 |             if (results.length >= maxResults) return; // Stop searching this branch
359 |           }
360 |         } catch (error: any) {
361 |           console.warn(`Skipping file ${fullPath}: ${error.message}`);
362 |           // Continue searching other files even if one fails
363 |         }
364 |       }
365 |     }
366 |   }
367 | 
368 |   await search(rootPath, 0);
369 |   return results;
370 | }
```

--------------------------------------------------------------------------------
/index.ts:
--------------------------------------------------------------------------------

```typescript
  1 | #!/usr/bin/env bun
  2 | 
  3 | import { FastMCP } from "fastmcp";
  4 | import fs from "fs/promises";
  5 | import path from "path";
  6 | import {
  7 |   expandHome,
  8 |   normalizePath,
  9 |   validatePath,
 10 | } from "./src/utils/path-utils.js";
 11 | import { toolSchemas } from "./src/schemas/index.js";
 12 | import { toZodParameters } from "./src/utils/typebox-zod.js";
 13 | import {
 14 |   handleReadFile,
 15 |   handleReadMultipleFiles,
 16 |   handleCreateFile,
 17 |   handleModifyFile,
 18 |   handleEditFile,
 19 |   handleGetFileInfo,
 20 |   handleMoveFile,
 21 |   handleDeleteFile,
 22 |   handleRenameFile,
 23 | } from "./src/handlers/file-handlers.js";
 24 | import {
 25 |   handleCreateDirectory,
 26 |   handleListDirectory,
 27 |   handleDirectoryTree,
 28 |   handleDeleteDirectory,
 29 | } from "./src/handlers/directory-handlers.js";
 30 | import {
 31 |   handleSearchFiles,
 32 |   handleFindFilesByExtension,
 33 |   handleGetPermissions,
 34 |   handleXmlToJson,
 35 |   handleXmlToJsonString,
 36 |   handleListAllowedDirectories,
 37 |   handleRegexSearchContent,
 38 | } from "./src/handlers/utility-handlers.js";
 39 | import {
 40 |   handleXmlQuery,
 41 |   handleXmlStructure,
 42 | } from "./src/handlers/xml-handlers.js";
 43 | import {
 44 |   handleJsonQuery,
 45 |   handleJsonFilter,
 46 |   handleJsonGetValue,
 47 |   handleJsonTransform,
 48 |   handleJsonStructure,
 49 |   handleJsonSample,
 50 |   handleJsonValidate,
 51 |   handleJsonSearchKv,
 52 | } from "./src/handlers/json-handlers.js";
 53 | 
 54 | // parse command line
 55 | const args = process.argv.slice(2);
 56 | const readonlyFlag = args.includes("--readonly");
 57 | const noFollowSymlinks = args.includes("--no-follow-symlinks");
 58 | const fullAccessFlag = args.includes("--full-access");
 59 | const allowCreate = args.includes("--allow-create");
 60 | const allowEdit = args.includes("--allow-edit");
 61 | const allowMove = args.includes("--allow-move");
 62 | const allowDelete = args.includes("--allow-delete");
 63 | const allowRename = args.includes("--allow-rename");
 64 | const httpFlagIndex = args.indexOf("--http");
 65 | const useHttp = httpFlagIndex !== -1;
 66 | if (useHttp) args.splice(httpFlagIndex, 1);
 67 | let port = 8080;
 68 | const portIndex = args.indexOf("--port");
 69 | if (portIndex !== -1) {
 70 |   port = parseInt(args[portIndex + 1], 10);
 71 |   args.splice(portIndex, 2);
 72 | }
 73 | 
 74 | if (readonlyFlag) args.splice(args.indexOf("--readonly"), 1);
 75 | if (noFollowSymlinks) args.splice(args.indexOf("--no-follow-symlinks"), 1);
 76 | if (fullAccessFlag) args.splice(args.indexOf("--full-access"), 1);
 77 | if (allowCreate) args.splice(args.indexOf("--allow-create"), 1);
 78 | if (allowEdit) args.splice(args.indexOf("--allow-edit"), 1);
 79 | if (allowMove) args.splice(args.indexOf("--allow-move"), 1);
 80 | if (allowDelete) args.splice(args.indexOf("--allow-delete"), 1);
 81 | if (allowRename) args.splice(args.indexOf("--allow-rename"), 1);
 82 | 
 83 | const useCwdFlag = args.includes("--cwd");
 84 | if (useCwdFlag) {
 85 |   args.splice(args.indexOf("--cwd"), 1);
 86 | }
 87 | 
 88 | // If no explicit allowed directories, use cwd if --cwd was passed or no directory was passed at all
 89 | let allowedDirectories: string[];
 90 | if (args.length === 0 || useCwdFlag) {
 91 |   allowedDirectories = [normalizePath(process.cwd())];
 92 | } else {
 93 |   allowedDirectories = args.map((dir) =>
 94 |     normalizePath(path.resolve(expandHome(dir))),
 95 |   );
 96 | }
 97 | 
 98 | if (!useCwdFlag && args.length === 0) {
 99 |   console.warn(
100 |     "No allowed directory specified. Using current working directory as root.",
101 |   );
102 |   console.warn(
103 |     "Usage: mcp-server-filesystem [flags] <allowed-directory> [additional-directories...]\n       mcp-server-filesystem --cwd [flags]",
104 |   );
105 | }
106 | 
107 | // duplicate declaration removed; `allowedDirectories` already defined above
108 | const symlinksMap = new Map<string, string>();
109 | 
110 | await Promise.all(
111 |   allowedDirectories.map(async (dir) => {
112 |     try {
113 |       const stats = await fs.stat(dir);
114 |       if (!stats.isDirectory()) {
115 |         console.error(`Error: ${dir} is not a directory`);
116 |         process.exit(1);
117 |       }
118 |       try {
119 |         const realPath = await fs.realpath(dir);
120 |         if (realPath !== dir) {
121 |           const normalizedDir = normalizePath(path.resolve(expandHome(dir)));
122 |           const normalizedRealPath = normalizePath(realPath);
123 |           symlinksMap.set(normalizedRealPath, normalizedDir);
124 |           if (!allowedDirectories.includes(normalizedRealPath))
125 |             allowedDirectories.push(normalizedRealPath);
126 |           await validatePath(
127 |             normalizedRealPath,
128 |             allowedDirectories,
129 |             symlinksMap,
130 |             noFollowSymlinks,
131 |           );
132 |         }
133 |         await validatePath(
134 |           dir,
135 |           allowedDirectories,
136 |           symlinksMap,
137 |           noFollowSymlinks,
138 |         );
139 |       } catch (error) {
140 |         console.error(
141 |           `Warning: Could not resolve real path for ${dir}:`,
142 |           error,
143 |         );
144 |       }
145 |     } catch (error) {
146 |       console.error(`Error accessing directory ${dir}:`, error);
147 |       process.exit(1);
148 |     }
149 |   }),
150 | );
151 | 
152 | const permissions = {
153 |   create: !readonlyFlag && (fullAccessFlag || allowCreate),
154 |   edit: !readonlyFlag && (fullAccessFlag || allowEdit),
155 |   move: !readonlyFlag && (fullAccessFlag || allowMove),
156 |   delete: !readonlyFlag && (fullAccessFlag || allowDelete),
157 |   rename: !readonlyFlag && (fullAccessFlag || allowRename),
158 |   fullAccess: !readonlyFlag && fullAccessFlag,
159 | };
160 | 
161 | const server = new FastMCP({
162 |   name: "secure-filesystem-server",
163 |   version: "0.2.0",
164 | });
165 | 
166 | const toolHandlers = {
167 |   read_file: (a: unknown) =>
168 |     handleReadFile(a, allowedDirectories, symlinksMap, noFollowSymlinks),
169 |   read_multiple_files: (a: unknown) =>
170 |     handleReadMultipleFiles(
171 |       a,
172 |       allowedDirectories,
173 |       symlinksMap,
174 |       noFollowSymlinks,
175 |     ),
176 |   create_file: (a: unknown) =>
177 |     handleCreateFile(
178 |       a,
179 |       permissions,
180 |       allowedDirectories,
181 |       symlinksMap,
182 |       noFollowSymlinks,
183 |     ),
184 |   modify_file: (a: unknown) =>
185 |     handleModifyFile(
186 |       a,
187 |       permissions,
188 |       allowedDirectories,
189 |       symlinksMap,
190 |       noFollowSymlinks,
191 |     ),
192 |   edit_file: (a: unknown) =>
193 |     handleEditFile(
194 |       a,
195 |       permissions,
196 |       allowedDirectories,
197 |       symlinksMap,
198 |       noFollowSymlinks,
199 |     ),
200 |   create_directory: (a: unknown) =>
201 |     handleCreateDirectory(
202 |       a,
203 |       permissions,
204 |       allowedDirectories,
205 |       symlinksMap,
206 |       noFollowSymlinks,
207 |     ),
208 |   list_directory: (a: unknown) =>
209 |     handleListDirectory(a, allowedDirectories, symlinksMap, noFollowSymlinks),
210 |   directory_tree: (a: unknown) =>
211 |     handleDirectoryTree(a, allowedDirectories, symlinksMap, noFollowSymlinks),
212 |   move_file: (a: unknown) =>
213 |     handleMoveFile(
214 |       a,
215 |       permissions,
216 |       allowedDirectories,
217 |       symlinksMap,
218 |       noFollowSymlinks,
219 |     ),
220 |   rename_file: (a: unknown) =>
221 |     handleRenameFile(
222 |       a,
223 |       permissions,
224 |       allowedDirectories,
225 |       symlinksMap,
226 |       noFollowSymlinks,
227 |     ),
228 |   delete_directory: (a: unknown) =>
229 |     handleDeleteDirectory(
230 |       a,
231 |       permissions,
232 |       allowedDirectories,
233 |       symlinksMap,
234 |       noFollowSymlinks,
235 |     ),
236 |   search_files: (a: unknown) =>
237 |     handleSearchFiles(a, allowedDirectories, symlinksMap, noFollowSymlinks),
238 |   find_files_by_extension: (a: unknown) =>
239 |     handleFindFilesByExtension(
240 |       a,
241 |       allowedDirectories,
242 |       symlinksMap,
243 |       noFollowSymlinks,
244 |     ),
245 |   get_file_info: (a: unknown) =>
246 |     handleGetFileInfo(a, allowedDirectories, symlinksMap, noFollowSymlinks),
247 |   list_allowed_directories: (a: unknown) =>
248 |     handleListAllowedDirectories(a, allowedDirectories),
249 |   get_permissions: (a: unknown) =>
250 |     handleGetPermissions(
251 |       a,
252 |       permissions,
253 |       readonlyFlag,
254 |       noFollowSymlinks,
255 |       allowedDirectories,
256 |     ),
257 |   xml_query: (a: unknown) =>
258 |     handleXmlQuery(a, allowedDirectories, symlinksMap, noFollowSymlinks),
259 |   xml_structure: (a: unknown) =>
260 |     handleXmlStructure(a, allowedDirectories, symlinksMap, noFollowSymlinks),
261 |   xml_to_json: (a: unknown) =>
262 |     handleXmlToJson(
263 |       a,
264 |       permissions,
265 |       allowedDirectories,
266 |       symlinksMap,
267 |       noFollowSymlinks,
268 |     ),
269 |   xml_to_json_string: (a: unknown) =>
270 |     handleXmlToJsonString(a, allowedDirectories, symlinksMap, noFollowSymlinks),
271 |   delete_file: (a: unknown) =>
272 |     handleDeleteFile(
273 |       a,
274 |       permissions,
275 |       allowedDirectories,
276 |       symlinksMap,
277 |       noFollowSymlinks,
278 |     ),
279 |   json_query: (a: unknown) =>
280 |     handleJsonQuery(a, allowedDirectories, symlinksMap, noFollowSymlinks),
281 |   json_structure: (a: unknown) =>
282 |     handleJsonStructure(a, allowedDirectories, symlinksMap, noFollowSymlinks),
283 |   json_filter: (a: unknown) =>
284 |     handleJsonFilter(a, allowedDirectories, symlinksMap, noFollowSymlinks),
285 |   json_get_value: (a: unknown) =>
286 |     handleJsonGetValue(a, allowedDirectories, symlinksMap, noFollowSymlinks),
287 |   json_transform: (a: unknown) =>
288 |     handleJsonTransform(a, allowedDirectories, symlinksMap, noFollowSymlinks),
289 |   json_sample: (a: unknown) =>
290 |     handleJsonSample(a, allowedDirectories, symlinksMap, noFollowSymlinks),
291 |   json_validate: (a: unknown) =>
292 |     handleJsonValidate(a, allowedDirectories, symlinksMap, noFollowSymlinks),
293 |   json_search_kv: (a: unknown) =>
294 |     handleJsonSearchKv(a, allowedDirectories, symlinksMap, noFollowSymlinks),
295 |   regex_search_content: (a: unknown) =>
296 |     handleRegexSearchContent(
297 |       a,
298 |       allowedDirectories,
299 |       symlinksMap,
300 |       noFollowSymlinks,
301 |     ),
302 | } as const;
303 | 
304 | const allTools = [
305 |   { name: "read_file", description: "Read file contents" },
306 |   { name: "read_multiple_files", description: "Read multiple files" },
307 |   { name: "list_directory", description: "List directory contents" },
308 |   { name: "directory_tree", description: "Directory tree view" },
309 |   { name: "search_files", description: "Search files by name" },
310 |   { name: "find_files_by_extension", description: "Find files by extension" },
311 |   { name: "get_file_info", description: "Get file metadata" },
312 |   { name: "list_allowed_directories", description: "List allowed directories" },
313 |   { name: "get_permissions", description: "Get server permissions" },
314 |   { name: "create_file", description: "Create a new file" },
315 |   { name: "modify_file", description: "Replace file contents" },
316 |   { name: "edit_file", description: "Edit part of a file" },
317 |   { name: "create_directory", description: "Create a directory" },
318 |   { name: "move_file", description: "Move a file" },
319 |   { name: "rename_file", description: "Rename a file" },
320 |   { name: "delete_directory", description: "Delete a directory" },
321 |   { name: "xml_query", description: "Query XML" },
322 |   { name: "xml_structure", description: "Analyze XML structure" },
323 |   { name: "xml_to_json", description: "Convert XML to JSON" },
324 |   { name: "xml_to_json_string", description: "XML to JSON string" },
325 |   { name: "delete_file", description: "Delete a file" },
326 |   { name: "json_query", description: "Query JSON" },
327 |   { name: "json_structure", description: "JSON structure" },
328 |   { name: "json_filter", description: "Filter JSON" },
329 |   { name: "json_get_value", description: "Get value from JSON" },
330 |   { name: "json_transform", description: "Transform JSON" },
331 |   { name: "json_sample", description: "Sample JSON data" },
332 |   { name: "json_validate", description: "Validate JSON" },
333 |   { name: "json_search_kv", description: "Search key/value in JSON" },
334 |   {
335 |     name: "regex_search_content",
336 |     description: "Search file content with regex",
337 |   },
338 | ];
339 | 
340 | const tools = !permissions.fullAccess
341 |   ? allTools.filter((t) => {
342 |       if (
343 |         [
344 |           "read_file",
345 |           "read_multiple_files",
346 |           "list_directory",
347 |           "directory_tree",
348 |           "search_files",
349 |           "find_files_by_extension",
350 |           "get_file_info",
351 |           "list_allowed_directories",
352 |           "xml_to_json_string",
353 |           "get_permissions",
354 |           "xml_query",
355 |           "xml_structure",
356 |           "json_query",
357 |           "json_filter",
358 |           "json_get_value",
359 |           "json_transform",
360 |           "json_structure",
361 |           "json_sample",
362 |           "json_validate",
363 |           "json_search_kv",
364 |           "regex_search_content",
365 |         ].includes(t.name)
366 |       ) {
367 |         return true;
368 |       }
369 |       if (
370 |         permissions.create &&
371 |         ["create_file", "create_directory", "xml_to_json"].includes(t.name)
372 |       )
373 |         return true;
374 |       if (permissions.edit && ["modify_file", "edit_file"].includes(t.name))
375 |         return true;
376 |       if (permissions.move && t.name === "move_file") return true;
377 |       if (permissions.rename && t.name === "rename_file") return true;
378 |       if (
379 |         permissions.delete &&
380 |         ["delete_file", "delete_directory"].includes(t.name)
381 |       )
382 |         return true;
383 |       return false;
384 |     })
385 |   : allTools;
386 | 
387 | for (const tool of tools) {
388 |   const execute = toolHandlers[tool.name as keyof typeof toolHandlers];
389 |   const schema = (toolSchemas as Record<string, any>)[tool.name];
390 |   server.addTool({
391 |     name: tool.name,
392 |     description: tool.description,
393 |     parameters: toZodParameters(schema as any) as any,
394 |     execute: async (a) => execute(a) as any,
395 |   });
396 | }
397 | 
398 | async function runServer() {
399 |   if (useHttp) {
400 |     await server.start({ transportType: "httpStream", httpStream: { port } });
401 |     console.error(
402 |       `Secure MCP Filesystem Server running on HTTP stream port ${port}`,
403 |     );
404 |   } else {
405 |     await server.start({ transportType: "stdio" });
406 |     console.error("Secure MCP Filesystem Server running on stdio");
407 |   }
408 |   console.error("Allowed directories:", allowedDirectories);
409 |   const permState = [] as string[];
410 |   if (readonlyFlag) {
411 |     console.error(
412 |       "Server running in read-only mode (--readonly flag overrides all other permissions)",
413 |     );
414 |   } else if (permissions.fullAccess) {
415 |     console.error(
416 |       "Server running with full access (all operations enabled via --full-access)",
417 |     );
418 |   } else {
419 |     if (permissions.create) permState.push("create");
420 |     if (permissions.edit) permState.push("edit");
421 |     if (permissions.move) permState.push("move");
422 |     if (permissions.rename) permState.push("rename");
423 |     if (permissions.delete) permState.push("delete");
424 |     if (permState.length === 0) {
425 |       console.error(
426 |         "Server running in default read-only mode (use --full-access or specific --allow-* flags to enable write operations)",
427 |       );
428 |     } else {
429 |       console.error(
430 |         `Server running with specific permissions enabled: ${permState.join(", ")}`,
431 |       );
432 |     }
433 |   }
434 |   if (noFollowSymlinks) {
435 |     console.error("Server running with symlink following disabled");
436 |   }
437 | }
438 | 
439 | runServer().catch((error) => {
440 |   console.error("Fatal error running server:", error);
441 |   process.exit(1);
442 | });
443 | 
```

--------------------------------------------------------------------------------
/src/handlers/json-handlers.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import fs from 'fs/promises';
  2 | import { JSONPath } from 'jsonpath-plus';
  3 | import {
  4 |   isPlainObject,
  5 |   pickBy,
  6 |   size,
  7 |   values,
  8 |   filter,
  9 |   map,
 10 |   get as getProp,
 11 |   isEqual,
 12 |   some,
 13 |   every,
 14 |   groupBy,
 15 |   orderBy,
 16 |   flattenDeep,
 17 |   pick,
 18 |   omit,
 19 |   isEmpty,
 20 |   sampleSize,
 21 |   take,
 22 |   transform
 23 | } from '../utils/data-utils.js';
 24 | import AjvModule, { ErrorObject } from 'ajv';
 25 | const Ajv = AjvModule.default || AjvModule;
 26 | import path from 'path';
 27 | import { validatePath } from '../utils/path-utils.js';
 28 | import { parseArgs } from '../utils/schema-utils.js';
 29 | import {
 30 |   JsonQueryArgsSchema,
 31 |   JsonFilterArgsSchema,
 32 |   JsonGetValueArgsSchema,
 33 |   JsonTransformArgsSchema,
 34 |   JsonStructureArgsSchema,
 35 |   JsonSampleArgsSchema,
 36 |   JsonValidateArgsSchema,
 37 |   JsonSearchKvArgsSchema,
 38 |   type JsonQueryArgs,
 39 |   type JsonFilterArgs,
 40 |   type JsonGetValueArgs,
 41 |   type JsonTransformArgs,
 42 |   type JsonStructureArgs,
 43 |   type JsonSampleArgs,
 44 |   type JsonValidateArgs,
 45 |   type JsonSearchKvArgs
 46 | } from '../schemas/json-operations.js';
 47 | 
 48 | /**
 49 |  * Read and parse a JSON file
 50 |  */
 51 | async function readJsonFile(filePath: string, maxBytesInput?: number): Promise<any> {
 52 |   const effectiveMaxBytes = maxBytesInput ?? (10 * 1024); // Default 10KB
 53 |   try {
 54 |     // Check file size before reading
 55 |     const stats = await fs.stat(filePath);
 56 |     if (stats.size > effectiveMaxBytes) {
 57 |       throw new Error(`File size (${stats.size} bytes) exceeds the maximum allowed size (${effectiveMaxBytes} bytes).`);
 58 |     }
 59 |     
 60 |     // Read file content up to the limit
 61 |     const content = await fs.readFile(filePath, {
 62 |       encoding: 'utf-8',
 63 |       // Note: fs.readFile doesn't have a 'length' option like createReadStream's 'end'.
 64 |       // We rely on the pre-check above. If the file is slightly larger but within limits
 65 |       // for parsing start, it might still work, but the size check prevents huge files.
 66 |     });
 67 |     // Attempt to parse only up to maxBytes (approximate)
 68 |     // This is imperfect as JSON parsing needs the full structure. The main protection is the size check.
 69 |     return JSON.parse(content.substring(0, effectiveMaxBytes));
 70 |   } catch (error) {
 71 |     if (error instanceof Error) {
 72 |       throw new Error(`Failed to read or parse JSON file: ${error.message}`);
 73 |     }
 74 |     throw error;
 75 |   }
 76 | }
 77 | 
 78 | /**
 79 |  * Handle JSONPath query operations
 80 |  */
 81 | export async function handleJsonQuery(
 82 |   args: unknown,
 83 |   allowedDirectories: string[],
 84 |   symlinksMap: Map<string, string>,
 85 |   noFollowSymlinks: boolean
 86 | ) {
 87 |   const parsed = parseArgs(JsonQueryArgsSchema, args, 'json_query');
 88 | 
 89 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
 90 |   const jsonData = await readJsonFile(validPath, parsed.maxBytes);
 91 | 
 92 |   try {
 93 |     const result = JSONPath({
 94 |       path: parsed.query,
 95 |       json: jsonData,
 96 |       wrap: false // Don't wrap single results in an array
 97 |     });
 98 |     return {
 99 |       content: [{ 
100 |         type: "text", 
101 |         text: JSON.stringify(result, null, 2)
102 |       }],
103 |     };
104 |   } catch (error) {
105 |     if (error instanceof Error) {
106 |       throw new Error(`JSONPath query failed: ${error.message}`);
107 |     }
108 |     throw error;
109 |   }
110 | }
111 | 
112 | /**
113 |  * Handle JSON filtering operations
114 |  */
115 | export async function handleJsonFilter(
116 |   args: unknown,
117 |   allowedDirectories: string[],
118 |   symlinksMap: Map<string, string>,
119 |   noFollowSymlinks: boolean
120 | ) {
121 |   const parsed = parseArgs(JsonFilterArgsSchema, args, 'json_filter');
122 | 
123 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
124 |   const jsonData = await readJsonFile(validPath, parsed.maxBytes);
125 | 
126 |   try {
127 |     let dataToFilter: any[] = [];
128 |     
129 |     // Check if arrayPath is provided
130 |     if (parsed.arrayPath) {
131 |       // Use JSONPath to locate the target array
132 |       const targetArray = JSONPath({
133 |         path: parsed.arrayPath,
134 |         json: jsonData,
135 |         wrap: false
136 |       });
137 | 
138 |       if (!Array.isArray(targetArray)) {
139 |         throw new Error(`Path "${parsed.arrayPath}" did not resolve to an array`);
140 |       }
141 |       
142 |       dataToFilter = targetArray;
143 |     } 
144 |     // No arrayPath provided, use automatic detection for simple cases
145 |     else {
146 |       if (Array.isArray(jsonData)) {
147 |         // Direct array case
148 |         dataToFilter = jsonData;
149 |       } else if (isPlainObject(jsonData)) {
150 |         // Find all array properties at the top level
151 |         const arrayProps = pickBy(jsonData, Array.isArray);
152 | 
153 |         if (size(arrayProps) === 1) {
154 |           // If exactly one array property, use it automatically
155 |           dataToFilter = values(arrayProps)[0] as any[];
156 |         } else if (size(arrayProps) > 1) {
157 |           // Multiple arrays found, can't automatically determine which to use
158 |           throw new Error(
159 |             'Multiple arrays found in the JSON data. ' +
160 |             'Please provide the "arrayPath" parameter to specify which array to filter. ' +
161 |             'Example: "$.items" or "$.data.resources"'
162 |           );
163 |         } else {
164 |           // No arrays found at the top level
165 |           throw new Error(
166 |             'No arrays found in the JSON data. ' +
167 |             'Please provide the "arrayPath" parameter to specify the path to the array to filter. ' +
168 |             'Example: "$.items" or "$.data.resources"'
169 |           );
170 |         }
171 |       } else {
172 |         // Not an object or array
173 |         throw new Error(
174 |           'The JSON data is not an array or an object containing arrays. ' +
175 |           'Please provide valid JSON data with arrays to filter.'
176 |         );
177 |       }
178 |     }
179 |     
180 |     // If we still couldn't find an array to filter, throw a helpful error
181 |     if (!Array.isArray(dataToFilter) || isEmpty(dataToFilter)) {
182 |       throw new Error(
183 |         'Could not find a valid array to filter in the JSON data. ' +
184 |         'Please make sure the file contains an array or specify the correct arrayPath parameter.'
185 |       );
186 |     }
187 | 
188 |     // Now filter the array using predicates
189 |     const filtered = filter(dataToFilter, (item) => {
190 |       const results = map(parsed.conditions, condition => {
191 |         const value = getProp(item, condition.field);
192 |         
193 |         switch (condition.operator) {
194 |           case 'eq':
195 |             return isEqual(value, condition.value);
196 |           case 'neq':
197 |             return !isEqual(value, condition.value);
198 |           case 'gt':
199 |             return value > condition.value;
200 |           case 'gte':
201 |             return value >= condition.value;
202 |           case 'lt':
203 |             return value < condition.value;
204 |           case 'lte':
205 |             return value <= condition.value;
206 |           case 'contains':
207 |             return typeof value === 'string'
208 |               ? value.includes(String(condition.value))
209 |               : Array.isArray(value) && some(value, v => isEqual(v, condition.value));
210 |           case 'startsWith':
211 |             return typeof value === 'string' && value.startsWith(String(condition.value));
212 |           case 'endsWith':
213 |             return typeof value === 'string' && value.endsWith(String(condition.value));
214 |           case 'exists':
215 |             return value !== undefined;
216 |           case 'type':
217 |             return typeof value === condition.value;
218 |           default:
219 |             return false;
220 |         }
221 |       });
222 | 
223 |       return parsed.match === 'all'
224 |         ? every(results, Boolean)
225 |         : some(results, Boolean);
226 |     });
227 | 
228 |     return {
229 |       content: [{ 
230 |         type: "text", 
231 |         text: JSON.stringify(filtered, null, 2)
232 |       }],
233 |     };
234 |   } catch (error) {
235 |     if (error instanceof Error) {
236 |       throw new Error(`JSON filtering failed: ${error.message}`);
237 |     }
238 |     throw error;
239 |   }
240 | }
241 | 
242 | /**
243 |  * Handle getting a specific value from a JSON file
244 |  */
245 | export async function handleJsonGetValue(
246 |   args: unknown,
247 |   allowedDirectories: string[],
248 |   symlinksMap: Map<string, string>,
249 |   noFollowSymlinks: boolean
250 | ) {
251 |   const parsed = parseArgs(JsonGetValueArgsSchema, args, 'json_get_value');
252 | 
253 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
254 |   const jsonData = await readJsonFile(validPath, parsed.maxBytes);
255 | 
256 |   try {
257 |     const value = getProp(jsonData, parsed.field);
258 |     if (value === undefined) {
259 |       throw new Error(`Field "${parsed.field}" not found in JSON data`);
260 |     }
261 | 
262 |     return {
263 |       content: [{ 
264 |         type: "text", 
265 |         text: JSON.stringify(value, null, 2)
266 |       }],
267 |     };
268 |   } catch (error) {
269 |     if (error instanceof Error) {
270 |       throw new Error(`Failed to get JSON value: ${error.message}`);
271 |     }
272 |     throw error;
273 |   }
274 | }
275 | 
276 | /**
277 |  * Handle JSON transformation operations
278 |  */
279 | export async function handleJsonTransform(
280 |   args: unknown,
281 |   allowedDirectories: string[],
282 |   symlinksMap: Map<string, string>,
283 |   noFollowSymlinks: boolean
284 | ) {
285 |   const parsed = parseArgs(JsonTransformArgsSchema, args, 'json_transform');
286 | 
287 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
288 |   let jsonData = await readJsonFile(validPath, parsed.maxBytes);
289 | 
290 |   try {
291 |     // Apply operations in sequence
292 |     for (const op of parsed.operations) {
293 |       switch (op.type) {
294 |         case 'map':
295 |           if (!Array.isArray(jsonData)) {
296 |             throw new Error('Data must be an array for map operation');
297 |           }
298 |           if (!op.field) {
299 |             throw new Error('Field is required for map operation');
300 |           }
301 |           jsonData = jsonData.map(item => getProp(item, op.field!));
302 |           break;
303 | 
304 |         case 'groupBy':
305 |           if (!Array.isArray(jsonData)) {
306 |             throw new Error('Data must be an array for groupBy operation');
307 |           }
308 |           if (!op.field) {
309 |             throw new Error('Field is required for groupBy operation');
310 |           }
311 |           jsonData = groupBy(jsonData, op.field);
312 |           break;
313 | 
314 |         case 'sort':
315 |           if (!Array.isArray(jsonData)) {
316 |             throw new Error('Data must be an array for sort operation');
317 |           }
318 |           if (!op.field) {
319 |             throw new Error('Field is required for sort operation');
320 |           }
321 |           jsonData = orderBy(
322 |             jsonData,
323 |             op.field,
324 |             [op.order || 'asc']
325 |           );
326 |           break;
327 | 
328 |         case 'flatten':
329 |           if (!Array.isArray(jsonData)) {
330 |             throw new Error('Data must be an array for flatten operation');
331 |           }
332 |           jsonData = flattenDeep(jsonData);
333 |           break;
334 | 
335 |         case 'pick':
336 |           if (!op.fields || !op.fields.length) {
337 |             throw new Error('Fields array is required for pick operation');
338 |           }
339 |           if (Array.isArray(jsonData)) {
340 |             jsonData = jsonData.map(item => pick(item, op.fields!));
341 |           } else {
342 |             jsonData = pick(jsonData, op.fields);
343 |           }
344 |           break;
345 | 
346 |         case 'omit':
347 |           if (!op.fields || !op.fields.length) {
348 |             throw new Error('Fields array is required for omit operation');
349 |           }
350 |           if (Array.isArray(jsonData)) {
351 |             jsonData = jsonData.map(item => omit(item, op.fields!));
352 |           } else {
353 |             jsonData = omit(jsonData, op.fields);
354 |           }
355 |           break;
356 |       }
357 |     }
358 | 
359 |     return {
360 |       content: [{ 
361 |         type: "text", 
362 |         text: JSON.stringify(jsonData, null, 2)
363 |       }],
364 |     };
365 |   } catch (error) {
366 |     if (error instanceof Error) {
367 |       throw new Error(`JSON transformation failed: ${error.message}`);
368 |     }
369 |     throw error;
370 |   }
371 | }
372 | 
373 | /**
374 |  * Get the structure of a JSON file with configurable depth and array type analysis
375 |  */
376 | export async function handleJsonStructure(
377 |   args: unknown,
378 |   allowedDirectories: string[],
379 |   symlinksMap: Map<string, string>,
380 |   noFollowSymlinks: boolean
381 | ) {
382 |   const parsed = parseArgs(JsonStructureArgsSchema, args, 'json_structure');
383 | 
384 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
385 |   const jsonData = await readJsonFile(validPath, parsed.maxBytes);
386 |   const { maxDepth, detailedArrayTypes = false } = parsed;
387 |   const effectiveMaxDepth = maxDepth ?? 2; // Default depth 2
388 | 
389 |   try {
390 |     // Define a type that includes our custom type strings
391 |     type ValueType = 'string' | 'number' | 'boolean' | 'object' | 'array' | `array<${string}>` | 'null' | 'undefined';
392 |     
393 |     /**
394 |      * Analyze the type of a value, including detailed array analysis if requested
395 |      */
396 |     function analyzeType(value: any, currentDepth: number = 0): { type: ValueType; structure?: Record<string, any> } {
397 |       // Handle null and undefined
398 |       if (value === null) return { type: 'null' };
399 |       if (value === undefined) return { type: 'undefined' };
400 | 
401 |       // Handle arrays
402 |       if (Array.isArray(value)) {
403 |         if (value.length === 0) return { type: 'array<empty>' as ValueType };
404 |         
405 |         if (detailedArrayTypes) {
406 |           // Analyze all elements for mixed types
407 |           const elementTypes = new Set<string>();
408 |           value.forEach(item => {
409 |             const itemType = analyzeType(item, currentDepth + 1);
410 |             elementTypes.add(itemType.type);
411 |           });
412 |           
413 |           const typeString = Array.from(elementTypes).join('|');
414 |           return { type: `array<${typeString}>` as ValueType };
415 |         } else {
416 |           // Just analyze first element
417 |           const firstType = analyzeType(value[0], currentDepth + 1);
418 |           return { type: `array<${firstType.type}>` as ValueType };
419 |         }
420 |       }
421 | 
422 |       // Handle objects
423 |       if (isPlainObject(value)) {
424 |         const type = 'object' as ValueType;
425 |         // If we haven't reached depth limit and object isn't empty, analyze structure
426 |         if (currentDepth < effectiveMaxDepth && !isEmpty(value)) { // Use effectiveMaxDepth
427 |           const structure: Record<string, any> = {};
428 |           for (const [key, val] of Object.entries(value)) {
429 |             structure[key] = analyzeType(val, currentDepth + 1);
430 |           }
431 |           return { type, structure };
432 |         }
433 |         return { type };
434 |       }
435 | 
436 |       // Handle primitives
437 |       if (typeof value === 'string') return { type: 'string' };
438 |       if (typeof value === 'number') return { type: 'number' };
439 |       if (typeof value === 'boolean') return { type: 'boolean' };
440 | 
441 |       // Fallback
442 |       return { type: typeof value as ValueType };
443 |     }
444 | 
445 |     // Analyze the root structure
446 |     const structure = Array.isArray(jsonData)
447 |       ? { type: 'array', elements: analyzeType(jsonData, 0) }
448 |       : transform(
449 |           jsonData,
450 |           (result: Record<string, any>, value: unknown, key: string) => {
451 |             result[key] = analyzeType(value, 0);
452 |           },
453 |           {} as Record<string, any>
454 |         );
455 | 
456 |     return {
457 |       content: [{ 
458 |         type: "text", 
459 |         text: JSON.stringify(structure, null, 2)
460 |       }],
461 |     };
462 |   } catch (error) {
463 |     if (error instanceof Error) {
464 |       throw new Error(`JSON structure analysis failed: ${error.message}`);
465 |     }
466 |     throw error;
467 |   }
468 | }
469 | 
470 | /**
471 |  * Handle JSON array sampling operations
472 |  */
473 | export async function handleJsonSample(
474 |   args: unknown,
475 |   allowedDirectories: string[],
476 |   symlinksMap: Map<string, string>,
477 |   noFollowSymlinks: boolean
478 | ) {
479 |   const parsed = parseArgs(JsonSampleArgsSchema, args, 'json_sample');
480 | 
481 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
482 |   const jsonData = await readJsonFile(validPath, parsed.maxBytes);
483 | 
484 |   try {
485 |     // Use JSONPath to locate the target array
486 |     const targetArray = JSONPath({
487 |       path: parsed.arrayPath,
488 |       json: jsonData,
489 |       wrap: false
490 |     });
491 | 
492 |     if (!Array.isArray(targetArray)) {
493 |       throw new Error(`Path "${parsed.arrayPath}" did not resolve to an array`);
494 |     }
495 | 
496 |     if (targetArray.length === 0) {
497 |       return {
498 |         content: [{
499 |           type: "text",
500 |           text: JSON.stringify([], null, 2)
501 |         }],
502 |       };
503 |     }
504 | 
505 |     let sampledData: any[];
506 |     if (parsed.method === 'random') {
507 |       sampledData = sampleSize(targetArray, Math.min(parsed.count, targetArray.length));
508 |     } else {
509 |       sampledData = take(targetArray, parsed.count);
510 |     }
511 | 
512 |     return {
513 |       content: [{
514 |         type: "text",
515 |         text: JSON.stringify(sampledData, null, 2)
516 |       }],
517 |     };
518 |   } catch (error) {
519 |     if (error instanceof Error) {
520 |       throw new Error(`JSON sampling failed: ${error.message}`);
521 |     }
522 |     throw error;
523 |   }
524 | }
525 | 
526 | /**
527 |  * Handle JSON Schema validation operations
528 |  */
529 | export async function handleJsonValidate(
530 |   args: unknown,
531 |   allowedDirectories: string[],
532 |   symlinksMap: Map<string, string>,
533 |   noFollowSymlinks: boolean
534 | ) {
535 |   const parsed = parseArgs(JsonValidateArgsSchema, args, 'json_validate');
536 | 
537 |   const validPath = await validatePath(parsed.path, allowedDirectories, symlinksMap, noFollowSymlinks);
538 |   const validSchemaPath = await validatePath(parsed.schemaPath, allowedDirectories, symlinksMap, noFollowSymlinks);
539 | 
540 |   try {
541 |     // Read both the data and schema files
542 |     const [jsonData, schemaData] = await Promise.all([
543 |       readJsonFile(validPath, parsed.maxBytes),
544 |       readJsonFile(validSchemaPath)
545 |     ]);
546 | 
547 |     // Configure Ajv instance
548 |     const ajv = new Ajv({
549 |       allErrors: parsed.allErrors,
550 |       strict: parsed.strict,
551 |       validateSchema: true, // Validate the schema itself
552 |       verbose: true // Include more detailed error information
553 |     });
554 | 
555 |     try {
556 |       // Compile and validate the schema itself first
557 |       const validateSchema = ajv.compile(schemaData);
558 |       
559 |       // Validate the data
560 |       const isValid = validateSchema(jsonData);
561 | 
562 |       // Prepare the validation result
563 |       const result = {
564 |         isValid,
565 |         errors: isValid ? null : (validateSchema.errors as ErrorObject[])?.map(error => ({
566 |           path: error.instancePath,
567 |           keyword: error.keyword,
568 |           message: error.message,
569 |           params: error.params,
570 |           schemaPath: error.schemaPath
571 |         }))
572 |       };
573 | 
574 |       return {
575 |         content: [{
576 |           type: "text",
577 |           text: JSON.stringify(result, null, 2)
578 |         }],
579 |       };
580 |     } catch (validationError) {
581 |       // Handle schema compilation errors
582 |       if (validationError instanceof Error) {
583 |         throw new Error(`Schema validation failed: ${validationError.message}`);
584 |       }
585 |       throw validationError;
586 |     }
587 |   } catch (error) {
588 |     if (error instanceof Error) {
589 |       throw new Error(`JSON validation failed: ${error.message}`);
590 |     }
591 |     throw error;
592 |   }
593 | }
594 | 
595 | /**
596 |  * Handle searching for JSON files containing specific key/value pairs
597 |  */
598 | export async function handleJsonSearchKv(
599 |   args: unknown,
600 |   allowedDirectories: string[],
601 |   symlinksMap: Map<string, string>,
602 |   noFollowSymlinks: boolean
603 | ) {
604 |   const parsed = parseArgs(JsonSearchKvArgsSchema, args, 'json_search_kv');
605 | 
606 |   const validDirPath = await validatePath(parsed.directoryPath, allowedDirectories, symlinksMap, noFollowSymlinks);
607 |   const { key, value, recursive = true, matchType = 'exact', maxBytes, maxResults = 10, maxDepth } = parsed;
608 |   const effectiveMaxDepth = maxDepth ?? 2; // Default depth 2
609 | 
610 |   /**
611 |    * Check if a value matches the search criteria
612 |    */
613 |   function isValueMatch(foundValue: any): boolean {
614 |     if (value === undefined) return true;
615 |     
616 |     if (typeof foundValue === 'string' && typeof value === 'string') {
617 |       switch (matchType) {
618 |         case 'contains':
619 |           return foundValue.includes(value);
620 |         case 'startsWith':
621 |           return foundValue.startsWith(value);
622 |         case 'endsWith':
623 |           return foundValue.endsWith(value);
624 |         default:
625 |           return foundValue === value;
626 |       }
627 |     }
628 |     
629 |     return isEqual(foundValue, value);
630 |   }
631 | 
632 |   /**
633 |    * Search for key/value pairs in a JSON object
634 |    */
635 |   function searchInObject(obj: any, currentPath: string[] = []): string[] {
636 |     const matches: string[] = [];
637 | 
638 |     if (isPlainObject(obj)) {
639 |       for (const [k, v] of Object.entries(obj)) {
640 |         const newPath = [...currentPath, k];
641 |         
642 |         // Check if this key matches
643 |         if (k === key && isValueMatch(v)) {
644 |           matches.push(newPath.join('.'));
645 |         }
646 |         
647 |         // Recursively search in nested objects and arrays
648 |         if (isPlainObject(v) || Array.isArray(v)) {
649 |           matches.push(...searchInObject(v, newPath));
650 |         }
651 |       }
652 |     } else if (Array.isArray(obj)) {
653 |       obj.forEach((item, index) => {
654 |         const newPath = [...currentPath, index.toString()];
655 |         matches.push(...searchInObject(item, newPath));
656 |       });
657 |     }
658 | 
659 |     return matches;
660 |   }
661 | 
662 |   /**
663 |    * Process a single JSON file
664 |    */
665 |   async function processFile(filePath: string): Promise<{ file: string; matches: string[] } | null> {
666 |     try {
667 |       // Pass maxBytes from parsed args to readJsonFile
668 |       // Use the maxBytes variable destructured earlier
669 |       const jsonData = await readJsonFile(filePath, maxBytes);
670 |       const matches = searchInObject(jsonData);
671 |       return matches.length > 0 ? { file: filePath, matches } : null;
672 |     } catch (error) {
673 |       // Skip files that can't be read or aren't valid JSON
674 |       return null;
675 |     }
676 |   }
677 | 
678 |   /**
679 |    * Recursively get all JSON files in directory
680 |    */
681 |   async function getJsonFiles(dir: string, currentDepth: number): Promise<string[]> {
682 |     // Check depth limit
683 |     if (currentDepth >= effectiveMaxDepth) {
684 |       return [];
685 |     }
686 |     const entries = await fs.readdir(dir, { withFileTypes: true });
687 |     const files: string[] = [];
688 | 
689 |     for (const entry of entries) {
690 |       const fullPath = path.join(dir, entry.name);
691 |       
692 |       if (entry.isDirectory() && recursive) {
693 |         const validSubPath = await validatePath(fullPath, allowedDirectories, symlinksMap, noFollowSymlinks);
694 |         files.push(...await getJsonFiles(validSubPath, currentDepth + 1));
695 |       } else if (entry.isFile() && entry.name.endsWith('.json')) {
696 |         const validFilePath = await validatePath(fullPath, allowedDirectories, symlinksMap, noFollowSymlinks);
697 |         files.push(validFilePath);
698 |       }
699 |     }
700 | 
701 |     return files;
702 |   }
703 | 
704 |   try {
705 |     // Get all JSON files in the directory
706 |     const jsonFiles = await getJsonFiles(validDirPath, 0); // Start at depth 0
707 |     
708 |     // Process files and collect results
709 |     const results = [];
710 |     for (const file of jsonFiles) {
711 |       if (results.length >= maxResults) break;
712 |       
713 |       const result = await processFile(file);
714 |       if (result) {
715 |         results.push(result);
716 |       }
717 |     }
718 | 
719 |     return {
720 |       content: [{
721 |         type: "text",
722 |         text: JSON.stringify({
723 |           totalFiles: jsonFiles.length,
724 |           matchingFiles: results.length,
725 |           results
726 |         }, null, 2)
727 |       }],
728 |     };
729 |   } catch (error) {
730 |     if (error instanceof Error) {
731 |       throw new Error(`JSON key/value search failed: ${error.message}`);
732 |     }
733 |     throw error;
734 |   }
735 | } 
```
Page 2/2FirstPrevNextLast