#
tokens: 28273/50000 13/13 files
lines: on (toggle) GitHub
raw markdown copy reset
# Directory Structure

```
├── .gitignore
├── CODE_OF_CONDUCT.md
├── Dockerfile
├── LICENSE
├── package.json
├── README.md
├── smithery.yaml
├── src
│   ├── index.ts
│   ├── toolDescriptions.ts
│   ├── types.ts
│   └── utils.ts
├── test scripts
│   └── test_client.js
├── test_list_all_backups.js
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
 1 | # Dependencies
 2 | node_modules/
 3 | npm-debug.log
 4 | yarn-debug.log
 5 | yarn-error.log
 6 | package-lock.json
 7 | yarn.lock
 8 | 
 9 | # Build output
10 | dist/
11 | build/
12 | *.tsbuildinfo
13 | 
14 | # Environment variables
15 | .env
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 | 
21 | # IDE and editor files
22 | .idea/
23 | .vscode/
24 | *.swp
25 | *.swo
26 | .DS_Store
27 | Thumbs.db
28 | 
29 | # Backup files
30 | .code_backups/
31 | .code_emergency_backups/
32 | 
33 | # Logs
34 | logs/
35 | *.log
36 | npm-debug.log*
37 | yarn-debug.log*
38 | yarn-error.log*
39 | 
40 | # Test coverage
41 | coverage/
42 | .nyc_output/
43 | 
44 | # Temporary files
45 | tmp/
46 | temp/
47 | 
```

--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------

```markdown
  1 | [![MseeP.ai Security Assessment Badge](https://mseep.net/pr/hexitex-mcp-backup-server-badge.png)](https://mseep.ai/app/hexitex-mcp-backup-server)
  2 | 
  3 | # MCP Backup Server
  4 | [![smithery badge](https://smithery.ai/badge/@hexitex/MCP-Backup-Server)](https://smithery.ai/server/@hexitex/MCP-Backup-Server)
  5 | 
  6 | A specialized MCP server that provides backup and restoration capabilities for AI agents and code editing tools. Tested in both Cursor and Windsurf editors.
  7 | 
  8 | Repository: [https://github.com/hexitex/MCP-Backup-Server](https://github.com/hexitex/MCP-Backup-Server)
  9 | 
 10 | ## Why Use This (Not Git)
 11 | 
 12 | This system serves a different purpose than Git:
 13 | 
 14 | **Pros:**
 15 | - Creates instant, targeted backups with agent context
 16 | - Simpler than Git for single-operation safety
 17 | - Preserves thought process and intent in backups
 18 | - No commit messages or branching required
 19 | - Better for AI agents making critical changes
 20 | - Works without repository initialization
 21 | - Faster for emergency "save points" during edits
 22 | 
 23 | **Cons:**
 24 | - Not for long-term version tracking 
 25 | - Limited collaboration features
 26 | - No merging or conflict resolution
 27 | - No distributed backup capabilities
 28 | - Not a replacement for proper version control
 29 | - Stores complete file copies rather than diffs
 30 | 
 31 | **When to use:** Before risky edits, folder restructuring, or when you need quick safety backups with context.
 32 | 
 33 | **When to use Git instead:** For proper version history, collaboration, and project management.
 34 | 
 35 | ## Features
 36 | - Preserves agent context and reasoning
 37 | - Creates targeted, minimal backups
 38 | - Supports file and folder operations
 39 | - Maintains version history
 40 | - Provides restore safety
 41 | - Uses pattern filtering
 42 | - Tracks operations
 43 | - Allows cancellation
 44 | 
 45 | ## Setup
 46 | 
 47 | ### Installing via Smithery
 48 | 
 49 | To install Backup Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@hexitex/MCP-Backup-Server):
 50 | 
 51 | ```bash
 52 | npx -y @smithery/cli install @hexitex/MCP-Backup-Server --client claude
 53 | ```
 54 | 
 55 | ### Installing Manually
 56 | ```bash
 57 | # Install dependencies
 58 | npm install
 59 | 
 60 | # Build TypeScript files
 61 | npm run build
 62 | 
 63 | # Start the backup server
 64 | npm start
 65 | ```
 66 | 
 67 | ## Config
 68 | 
 69 | Env:
 70 | - `BACKUP_DIR`: Backup directory (./.code_backups)
 71 | - `EMERGENCY_BACKUP_DIR`: Emergency backups (./.code_emergency_backups)
 72 | - `MAX_VERSIONS`: Version limit (10)
 73 | 
 74 | Configure in editor:
 75 | 
 76 | Windsurf MCP config:
 77 | ```json
 78 | {
 79 |   "mcpServers": {
 80 |     "backup": {
 81 |       "command": "node",
 82 |       "args": ["./dist/index.js"],
 83 |       "env": {
 84 |         "BACKUP_DIR": "./.code_backups",
 85 |         "EMERGENCY_BACKUP_DIR": "./.code_emergency_backups",
 86 |         "MAX_VERSIONS": "20"
 87 |       }
 88 |     }
 89 |   }
 90 | }
 91 | ```
 92 | 
 93 | Cursor: Create `.cursor/mcp.json` with similar config.
 94 | 
 95 | ## Tools
 96 | 
 97 | ### File Operations
 98 | - `backup_create`: Create backup with context
 99 | - `backup_list`: List available backups
100 | - `backup_restore`: Restore with safety backup
101 | 
102 | ### Folder Operations  
103 | - `backup_folder_create`: Backup with pattern filtering
104 | - `backup_folder_list`: List folder backups
105 | - `backup_folder_restore`: Restore folder structure
106 | 
107 | ### Management
108 | - `backup_list_all`: List all backups
109 | - `mcp_cancel`: Cancel operations
110 | 
111 | ## When to Use Backups
112 | 
113 | Only create backups when truly needed:
114 | 
115 | 1. **Before Refactoring**: When changing important code
116 | 2. **Before Removing Folders**: When reorganizing project structure
117 | 3. **Multiple Related Changes**: When updating several connected files
118 | 4. **Resuming Major Work**: When continuing significant changes
119 | 5. **Before Restores**: Create safety backup before restoring
120 | 
121 | Keep backups minimal and purposeful. Document why each backup is needed.
122 | 
123 | ## Rules for Copy-Paste
124 | 
125 | ```
126 | Always try to use the backup MCP server for operations that require a backup, listing backups and restoring backups.
127 | Only backup before critical code changes, folder removal, changes to multiple related files, resuming major work, or restoring files.
128 | Keep backups minimal and focused only on files being changed.
129 | Always provide clear context for why a backup is being created.
130 | Use pattern filters to exclude irrelevant files from folder backups.
131 | Use relative file paths when creating backups.
132 | Create emergency backups before restore operations.
133 | Clean up old backups to maintain system efficiency.
134 | Backup tools: backup_create, backup_list, backup_restore, backup_folder_create, backup_folder_list, backup_folder_restore, backup_list_all, mcp_cancel.
135 | ```
136 | 
137 | ## For Human Users
138 | 
139 | Simple commands like these at the start you may have to mention MCP tool
140 | 
141 | ```
142 | # Back up an important file
143 | "Back up my core file before refactoring"
144 | 
145 | # Back up a folder before changes
146 | "Create backup of the API folder before restructuring"
147 | 
148 | # Find previous backups
149 | "Show me my recent backups"
150 | 
151 | # Restore a previous version
152 | "Restore my core file from this morning"
153 | ```
154 | 
155 | ## Agent Examples
156 | 
157 | ### Quick Backups
158 | ```json
159 | // Before project changes
160 | {
161 |   "name": "mcp0_backup_folder_create",
162 |   "parameters": {
163 |     "folder_path": "./src",
164 |     "include_pattern": "*.{js,ts}",
165 |     "exclude_pattern": "{node_modules,dist,test}/**",
166 |     "agent_context": "Start auth changes"
167 |   }
168 | }
169 | 
170 | // Before core fix
171 | {
172 |   "name": "mcp0_backup_create",
173 |   "parameters": {
174 |     "file_path": "./src/core.js",
175 |     "agent_context": "Fix validation"
176 |   }
177 | }
178 | ```
179 | 
180 | ### Resume Session
181 | ```json
182 | // View recent work
183 | {
184 |   "name": "mcp0_backup_list_all",
185 |   "parameters": {
186 |     "include_pattern": "src/**/*.js"
187 |   }
188 | }
189 | 
190 | // Get last version
191 | {
192 |   "name": "mcp0_backup_restore",
193 |   "parameters": {
194 |     "file_path": "./src/core.js",
195 |     "timestamp": "20250310-055950-000",
196 |     "create_emergency_backup": true
197 |   }
198 | }
199 | ```
200 | 
201 | ### Core Changes
202 | ```json
203 | // Critical update
204 | {
205 |   "name": "mcp0_backup_create",
206 |   "parameters": {
207 |     "file_path": "./src/core.js",
208 |     "agent_context": "Add validation"
209 |   }
210 | }
211 | 
212 | // Module update
213 | {
214 |   "name": "mcp0_backup_folder_create",
215 |   "parameters": {
216 |     "folder_path": "./src/api",
217 |     "include_pattern": "*.js",
218 |     "exclude_pattern": "test/**",
219 |     "agent_context": "Refactor modules"
220 |   }
221 | }
222 | ```
223 | 
224 | ### Restore Points
225 | ```json
226 | // Check versions
227 | {
228 |   "name": "mcp0_backup_list",
229 |   "parameters": {
230 |     "file_path": "./src/core.js"
231 |   }
232 | }
233 | 
234 | {
235 |   "name": "mcp0_backup_folder_list",
236 |   "parameters": {
237 |     "folder_path": "./src/api"
238 |   }
239 | }
240 | 
241 | // File restore
242 | {
243 |   "name": "mcp0_backup_restore",
244 |   "parameters": {
245 |     "file_path": "./src/core.js",
246 |     "timestamp": "20250310-055950-000",
247 |     "create_emergency_backup": true
248 |   }
249 | }
250 | 
251 | // Folder restore
252 | {
253 |   "name": "mcp0_backup_folder_restore",
254 |   "parameters": {
255 |     "folder_path": "./src/api",
256 |     "timestamp": "20250310-055950-000",
257 |     "create_emergency_backup": true
258 |   }
259 | }
260 | ```
261 | 
262 | ### Manage
263 | ```json
264 | // List recent
265 | {
266 |   "name": "mcp0_backup_list_all",
267 |   "parameters": {
268 |     "include_pattern": "src/**/*.js"
269 |   }
270 | }
271 | 
272 | // Stop backup
273 | {
274 |   "name": "mcp0_mcp_cancel",
275 |   "parameters": {
276 |     "operationId": "backup_1234"
277 |   }
278 | }
279 | ```
280 | 
281 | ## License
282 | MIT
283 | 
```

--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------

```markdown
  1 | # Contributor Covenant Code of Conduct
  2 | 
  3 | ## Our Pledge
  4 | 
  5 | We as members, contributors, and leaders pledge to make participation in our
  6 | community a harassment-free experience for everyone, regardless of age, body
  7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
  8 | identity and expression, level of experience, education, socio-economic status,
  9 | nationality, personal appearance, race, religion, or sexual identity
 10 | and orientation.
 11 | 
 12 | We pledge to act and interact in ways that contribute to an open, welcoming,
 13 | diverse, inclusive, and healthy community.
 14 | 
 15 | ## Our Standards
 16 | 
 17 | Examples of behavior that contributes to a positive environment for our
 18 | community include:
 19 | 
 20 | * Demonstrating empathy and kindness toward other people
 21 | * Being respectful of differing opinions, viewpoints, and experiences
 22 | * Giving and gracefully accepting constructive feedback
 23 | * Accepting responsibility and apologizing to those affected by our mistakes,
 24 |   and learning from the experience
 25 | * Focusing on what is best not just for us as individuals, but for the
 26 |   overall community
 27 | 
 28 | Examples of unacceptable behavior include:
 29 | 
 30 | * The use of sexualized language or imagery, and sexual attention or
 31 |   advances of any kind
 32 | * Trolling, insulting or derogatory comments, and personal or political attacks
 33 | * Public or private harassment
 34 | * Publishing others' private information, such as a physical or email
 35 |   address, without their explicit permission
 36 | * Other conduct which could reasonably be considered inappropriate in a
 37 |   professional setting
 38 | 
 39 | ## Enforcement Responsibilities
 40 | 
 41 | Community leaders are responsible for clarifying and enforcing our standards of
 42 | acceptable behavior and will take appropriate and fair corrective action in
 43 | response to any behavior that they deem inappropriate, threatening, offensive,
 44 | or harmful.
 45 | 
 46 | Community leaders have the right and responsibility to remove, edit, or reject
 47 | comments, commits, code, wiki edits, issues, and other contributions that are
 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
 49 | decisions when appropriate.
 50 | 
 51 | ## Scope
 52 | 
 53 | This Code of Conduct applies within all community spaces, and also applies when
 54 | an individual is officially representing the community in public spaces.
 55 | Examples of representing our community include using an official e-mail address,
 56 | posting via an official social media account, or acting as an appointed
 57 | representative at an online or offline event.
 58 | 
 59 | ## Enforcement
 60 | 
 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
 62 | reported to the community leaders responsible for enforcement at
 63 | .
 64 | All complaints will be reviewed and investigated promptly and fairly.
 65 | 
 66 | All community leaders are obligated to respect the privacy and security of the
 67 | reporter of any incident.
 68 | 
 69 | ## Enforcement Guidelines
 70 | 
 71 | Community leaders will follow these Community Impact Guidelines in determining
 72 | the consequences for any action they deem in violation of this Code of Conduct:
 73 | 
 74 | ### 1. Correction
 75 | 
 76 | **Community Impact**: Use of inappropriate language or other behavior deemed
 77 | unprofessional or unwelcome in the community.
 78 | 
 79 | **Consequence**: A private, written warning from community leaders, providing
 80 | clarity around the nature of the violation and an explanation of why the
 81 | behavior was inappropriate. A public apology may be requested.
 82 | 
 83 | ### 2. Warning
 84 | 
 85 | **Community Impact**: A violation through a single incident or series
 86 | of actions.
 87 | 
 88 | **Consequence**: A warning with consequences for continued behavior. No
 89 | interaction with the people involved, including unsolicited interaction with
 90 | those enforcing the Code of Conduct, for a specified period of time. This
 91 | includes avoiding interactions in community spaces as well as external channels
 92 | like social media. Violating these terms may lead to a temporary or
 93 | permanent ban.
 94 | 
 95 | ### 3. Temporary Ban
 96 | 
 97 | **Community Impact**: A serious violation of community standards, including
 98 | sustained inappropriate behavior.
 99 | 
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 | 
106 | ### 4. Permanent Ban
107 | 
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior,  harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 | 
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 | 
115 | ## Attribution
116 | 
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 | 
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 | 
124 | [homepage]: https://www.contributor-covenant.org
125 | 
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 | 
```

--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "compilerOptions": {
 3 |     "target": "ES2020",
 4 |     "module": "NodeNext",
 5 |     "moduleResolution": "NodeNext",
 6 |     "declaration": true,
 7 |     "outDir": "./dist",
 8 |     "strict": true,
 9 |     "esModuleInterop": true,
10 |     "skipLibCheck": true,
11 |     "forceConsistentCasingInFileNames": true
12 |   },
13 |   "include": ["src/**/*"],
14 |   "exclude": ["node_modules", "**/*.test.ts"]
15 | }
16 | 
```

--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------

```dockerfile
 1 | # Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
 2 | # syntax=docker/dockerfile:1
 3 | 
 4 | # Builder stage: install dependencies and build TypeScript
 5 | FROM node:lts-alpine AS builder
 6 | WORKDIR /app
 7 | 
 8 | # Install dependencies and build
 9 | COPY package.json tsconfig.json ./
10 | COPY src ./src
11 | RUN npm install --ignore-scripts && npm run build
12 | 
13 | # Final stage: runtime image
14 | FROM node:lts-alpine AS runner
15 | WORKDIR /app
16 | 
17 | # Copy built artifacts and dependencies
18 | COPY --from=builder /app/dist ./dist
19 | COPY --from=builder /app/node_modules ./node_modules
20 | 
21 | # Default command to start the MCP server
22 | CMD ["node", "dist/index.js"]
23 | 
```

--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------

```typescript
 1 | // Define interfaces for backup operations
 2 | export interface BackupMetadata {
 3 |   original_path: string;
 4 |   original_filename: string;
 5 |   timestamp: string;
 6 |   created_at: string;
 7 |   backup_path: string;
 8 |   relative_path: string;
 9 |   agent_context?: string; // Optional field for agent conversational context
10 | }
11 | 
12 | export interface BackupFolderMetadata {
13 |   original_path: string;
14 |   original_foldername: string;
15 |   timestamp: string;
16 |   backup_path: string;
17 |   include_pattern: string | null;
18 |   exclude_pattern: string | null;
19 |   agent_context?: string; // Optional field for agent conversational context
20 | }
21 | 
22 | export interface BackupResult {
23 |   success?: boolean;
24 |   timestamp?: string;
25 |   original_path?: string;
26 |   original_filename?: string;
27 |   original_foldername?: string;
28 |   backup_path?: string;
29 |   operation_id?: string;
30 |   error?: string;
31 | }
32 | 
33 | export interface Operation {
34 |   id: string;
35 |   type: string;
36 |   progress: number;
37 |   cancelled: boolean;
38 |   status: string;
39 | }
40 | 
```

--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "name": "@modelcontextprotocol/server-backup",
 3 |   "version": "1.0.0",
 4 |   "description": "MCP server for file backup and restoration",
 5 |   "license": "MIT",
 6 |   "type": "module",
 7 |   "bin": {
 8 |     "@modelcontextprotocol/server-backup": "dist/index.js",
 9 |     "mcp-server-backup": "dist/index.js"
10 |   },
11 |   "main": "dist/index.js",
12 |   "files": [
13 |     "dist"
14 |   ],
15 |   "engines": {
16 |     "node": ">=16"
17 |   },
18 |   "scripts": {
19 |     "build": "tsc && shx chmod +x dist/index.js",
20 |     "prepare": "npm run build",
21 |     "start": "node dist/index.js",
22 |     "watch": "tsc --watch",
23 |     "test": "node \"test scripts/test_client.js\""
24 |   },
25 |   "keywords": [
26 |     "mcp",
27 |     "backup",
28 |     "modelcontextprotocol"
29 |   ],
30 |   "author": "Rob MCGlade",
31 |   "dependencies": {
32 |     "@modelcontextprotocol/sdk": "0.5.0",
33 |     "@types/minimatch": "^5.1.2",
34 |     "minimatch": "^10.0.1",
35 |     "zod-to-json-schema": "^3.24.3"
36 |   },
37 |   "devDependencies": {
38 |     "@types/node": "^22",
39 |     "shx": "^0.3.4",
40 |     "typescript": "^5.3.3"
41 |   }
42 | }
43 | 
```

--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------

```yaml
 1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
 2 | 
 3 | startCommand:
 4 |   type: stdio
 5 |   configSchema:
 6 |     # JSON Schema defining the configuration options for the MCP.
 7 |     type: object
 8 |     properties:
 9 |       backupDir:
10 |         type: string
11 |         default: ./.code_backups
12 |         description: Directory to store regular backups
13 |       emergencyBackupDir:
14 |         type: string
15 |         default: ./.code_emergency_backups
16 |         description: Directory to store emergency backups
17 |       maxVersions:
18 |         type: number
19 |         default: 10
20 |         description: Maximum number of backup versions to keep per file/folder
21 |   commandFunction:
22 |     # A JS function that produces the CLI command based on the given config to start the MCP on stdio.
23 |     |-
24 |     (config) => ({ command: 'node', args: ['dist/index.js'], env: { BACKUP_DIR: config.backupDir, EMERGENCY_BACKUP_DIR: config.emergencyBackupDir, MAX_VERSIONS: String(config.maxVersions) } })
25 |   exampleConfig:
26 |     backupDir: ./.code_backups
27 |     emergencyBackupDir: ./.code_emergency_backups
28 |     maxVersions: 20
29 | 
```

--------------------------------------------------------------------------------
/test_list_all_backups.js:
--------------------------------------------------------------------------------

```javascript
 1 | import fs from 'fs';
 2 | import { spawn } from 'child_process';
 3 | 
 4 | // Create a request to the MCP server
 5 | const request = {
 6 |   jsonrpc: '2.0',
 7 |   method: 'tools/call',
 8 |   params: {
 9 |     name: 'backup_list_all',
10 |     arguments: {
11 |       include_emergency: true
12 |     }
13 |   },
14 |   id: 1
15 | };
16 | 
17 | // Spawn the MCP server process
18 | const mcp = spawn('node', ['dist/index.js'], {
19 |   stdio: ['pipe', 'pipe', 'pipe']
20 | });
21 | 
22 | // Send the request to the MCP server
23 | mcp.stdin.write(JSON.stringify(request) + '\n');
24 | 
25 | // Collect the response from the MCP server
26 | let responseData = '';
27 | mcp.stdout.on('data', (data) => {
28 |   responseData += data.toString();
29 | });
30 | 
31 | // Handle errors
32 | mcp.stderr.on('data', (data) => {
33 |   console.error(`stderr: ${data}`);
34 | });
35 | 
36 | // Process the response when the MCP server exits
37 | mcp.on('close', (code) => {
38 |   console.log(`MCP server exited with code ${code}`);
39 |   
40 |   if (responseData) {
41 |     try {
42 |       const response = JSON.parse(responseData);
43 |       console.log('Response from MCP server:');
44 |       console.log(JSON.stringify(response, null, 2));
45 |     } catch (error) {
46 |       console.error('Error parsing response:', error);
47 |       console.log('Raw response:', responseData);
48 |     }
49 |   }
50 | });
51 | 
```

--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import fs from 'fs';
  2 | import path from 'path';
  3 | import { promises as fsPromises } from 'fs';
  4 | import { Operation } from './types.js';
  5 | 
  6 | // Check if operation was cancelled and return appropriate response if it was
  7 | export function checkOperationCancelled(
  8 |   operationId: string | null, 
  9 |   operations: Map<string, Operation>,
 10 |   cleanupFn?: () => void
 11 | ): { isCancelled: boolean; response?: any } {
 12 |   if (operationId && operations.get(operationId)?.cancelled) {
 13 |     console.error(`Operation was cancelled`);
 14 |     
 15 |     // Run cleanup function if provided
 16 |     if (cleanupFn) {
 17 |       cleanupFn();
 18 |     }
 19 |     
 20 |     return {
 21 |       isCancelled: true,
 22 |       response: {
 23 |         content: [{ type: "text", text: "Operation cancelled" }],
 24 |         isError: true
 25 |       }
 26 |     };
 27 |   }
 28 |   
 29 |   return { isCancelled: false };
 30 | }
 31 | 
 32 | // Format response with JSON content
 33 | export function formatJsonResponse(data: any): any {
 34 |   return {
 35 |     content: [{ 
 36 |       type: "text", 
 37 |       text: JSON.stringify(data, null, 2)
 38 |     }]
 39 |   };
 40 | }
 41 | 
 42 | // Format error response
 43 | export function formatErrorResponse(error: any, operationId: string | null = null): any {
 44 |   return {
 45 |     content: [{ 
 46 |       type: "text", 
 47 |       text: JSON.stringify({ 
 48 |         error: String(error),
 49 |         operationId
 50 |       }) 
 51 |     }]
 52 |   };
 53 | }
 54 | 
 55 | // Validate required parameters
 56 | export function validateRequiredParams(params: Record<string, any>, requiredParams: string[]): void {
 57 |   for (const param of requiredParams) {
 58 |     if (!params[param]) {
 59 |       throw new Error(`Invalid params: ${param} is required`);
 60 |     }
 61 |   }
 62 | }
 63 | 
 64 | // Check if file exists and is a file
 65 | export async function validateFileExists(filePath: string): Promise<void> {
 66 |   try {
 67 |     const stats = await fsPromises.stat(filePath);
 68 |     if (!stats.isFile()) {
 69 |       throw new Error(`Not a file: ${filePath}`);
 70 |     }
 71 |   } catch (error) {
 72 |     throw new Error(`File not found: ${filePath}`);
 73 |   }
 74 | }
 75 | 
 76 | // Check if folder exists and is a directory
 77 | export async function validateFolderExists(folderPath: string): Promise<void> {
 78 |   try {
 79 |     const stats = await fsPromises.stat(folderPath);
 80 |     if (!stats.isDirectory()) {
 81 |       throw new Error(`Not a directory: ${folderPath}`);
 82 |     }
 83 |   } catch (error) {
 84 |     throw new Error(`Folder not found: ${folderPath}`);
 85 |   }
 86 | }
 87 | 
 88 | // Ensure directory exists
 89 | export async function ensureDirectoryExists(dirPath: string): Promise<void> {
 90 |   await fsPromises.mkdir(dirPath, { recursive: true });
 91 | }
 92 | 
 93 | // Check if path exists
 94 | export async function exists(path: string): Promise<boolean> {
 95 |   try {
 96 |     await fsPromises.stat(path);
 97 |     return true;
 98 |   } catch {
 99 |     return false;
100 |   }
101 | }
102 | 
```

--------------------------------------------------------------------------------
/src/toolDescriptions.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { z } from "zod";
  2 | import { zodToJsonSchema } from "zod-to-json-schema";
  3 | 
  4 | // Schema definitions
  5 | export const BackupCreateSchema = z.object({
  6 |   file_path: z.string().describe('Absolute path to the file to backup. This file must exist and be accessible.'),
  7 |   agent_context: z.string().optional().describe('Optional agent conversational context to store with the backup metadata. Agents should include the last relevant user instruction or context that explains why this backup is being created.')
  8 | });
  9 | 
 10 | export const BackupFolderCreateSchema = z.object({
 11 |   folder_path: z.string().describe('Absolute path to the folder to backup. This folder must exist and be accessible.'),
 12 |   include_pattern: z.string().optional().describe('Optional glob pattern to include specific files (e.g., "*.js")'),
 13 |   exclude_pattern: z.string().optional().describe('Optional glob pattern to exclude specific files (e.g., "node_modules/**")'),
 14 |   agent_context: z.string().optional().describe('Optional agent conversational context to store with the backup metadata. Agents should include the last relevant user instruction or context that explains why this backup is being created.')
 15 | });
 16 | 
 17 | export const BackupListSchema = z.object({
 18 |   file_path: z.string().describe('Absolute path to the file whose backups you want to list.')
 19 | });
 20 | 
 21 | export const BackupFolderListSchema = z.object({
 22 |   folder_path: z.string().describe('Absolute path to the folder whose backups you want to list.')
 23 | });
 24 | 
 25 | export const BackupRestoreSchema = z.object({
 26 |   file_path: z.string().describe('Absolute path to the file to restore.'),
 27 |   timestamp: z.string().describe('Timestamp of the backup version to restore (format: YYYYMMDD-HHMMSS-mmm).'),
 28 |   create_emergency_backup: z.boolean().optional().default(true).describe('Whether to create an emergency backup of the current file before restoring.')
 29 | });
 30 | 
 31 | export const BackupFolderRestoreSchema = z.object({
 32 |   folder_path: z.string().describe('Absolute path to the folder to restore.'),
 33 |   timestamp: z.string().describe('Timestamp of the backup version to restore (format: YYYYMMDD-HHMMSS-mmm).'),
 34 |   create_emergency_backup: z.boolean().optional().default(true).describe('Whether to create an emergency backup of the current folder before restoring.')
 35 | });
 36 | 
 37 | export const CancelSchema = z.object({
 38 |   operationId: z.string().describe('ID of the operation to cancel.')
 39 | });
 40 | 
 41 | // New schema for listing all backups
 42 | export const ListAllBackupsSchema = z.object({
 43 |   include_pattern: z.string().optional().describe('Optional glob pattern to filter backup files (e.g., "*.js")'),
 44 |   exclude_pattern: z.string().optional().describe('Optional glob pattern to exclude backup files (e.g., "node_modules/**")'),
 45 |   include_emergency: z.boolean().optional().default(true).describe('Whether to include emergency backups in the results.')
 46 | });
 47 | 
 48 | // Interface for tool description
 49 | export interface ToolDescription {
 50 |   name: string;
 51 |   description: string;
 52 |   usage: string;
 53 |   inputSchema: any;
 54 | }
 55 | 
 56 | // Define tool descriptions with detailed usage instructions
 57 | export const toolDescriptions: Record<string, ToolDescription> = {
 58 |   backup_create: {
 59 |     name: "backup_create",
 60 |     description: "Create a backup of a file before making big changes. The backup includes timestamp information and maintains the original directory structure.",
 61 |     usage: `Creates a timestamped backup of the specified file.
 62 | 
 63 | Parameters:
 64 | - file_path: Absolute path to the file to backup
 65 | - agent_context: (Optional) Include the last relevant user instruction or context
 66 | 
 67 | Best Practices for File Backups:
 68 | - Always prefer backing up individual files rather than entire folders when possible
 69 | - When modifying a single file, use backup_create instead of backup_folder_create
 70 | - For code edits, back up only the specific file being modified
 71 | - Agents should always include the user's last instruction as context
 72 | - Context should be concise and explain why the backup is being created
 73 | - Avoid including sensitive information in the context
 74 | - Keep context to 1-2 sentences that capture the purpose of the modification
 75 | 
 76 | Example contexts:
 77 | "Updating database connection string in config file"
 78 | "Fixing bug in login function that was causing authentication failures"
 79 | 
 80 | Returns:
 81 | - timestamp: Timestamp of the created backup
 82 | - backup_path: Path where the backup was stored
 83 | - agent_context: The context that was stored with the backup`,
 84 |     inputSchema: zodToJsonSchema(BackupCreateSchema)
 85 |   },
 86 |   backup_list: {
 87 |     name: "backup_list",
 88 |     description: "List all available backups for a file, including their timestamps and locations. Useful for finding specific versions to restore.",
 89 |     usage: `# Backup List Tool
 90 | 
 91 | Lists all available backup versions for a specified file.
 92 | 
 93 | ## When to Use
 94 | - Before restoring a file to check available versions
 95 | - To audit the backup history of a file
 96 | - To verify backup creation was successful
 97 | - To check timestamps of previous backups
 98 | - To list backups instead of using CLI commands or file system operations
 99 | 
100 | ## Best Practices
101 | 1. Use absolute paths for reliable file lookup
102 | 2. Check the timestamps to identify the right version
103 | 3. Review backup locations to ensure proper storage
104 | 4. Always use this tool rather than CLI commands to list backups
105 | 5. Agents should use this tool instead of file system operations to check backup history
106 | 6. Always provide the original file path (not the backup path) to this tool
107 | 7. The tool will automatically search in the configured backup location (BACKUP_DIR)
108 | 
109 | ## Important Note for Agents
110 | - Always try the backup_list and backup_folder_list tools first to check for backups
111 | - Only use command-line tools (like Get-ChildItem, ls, dir) as a fallback if:
112 |   1. The MCP tools return an empty array or no response
113 |   2. You need to verify if backups exist in a specific location
114 |   3. The MCP server is not responding or unavailable
115 | - The MCP backup tools provide the correct context and metadata for backups
116 | - When using CLI as fallback, check the configured backup directory (typically ~/.code_backups or as specified in environment)
117 | - Never attempt to directly access or manipulate files in the backup directory
118 | - Always use the original file path when listing backups, not paths within the backup directory
119 | 
120 | ## Example Usage
121 | \`\`\`json
122 | {
123 |   "name": "backup_list",
124 |   "arguments": {
125 |     "file_path": "C:/path/to/important/file.js"
126 |   }
127 | }
128 | \`\`\`
129 | 
130 | ## Response Format
131 | Returns an array of backup metadata:
132 | - timestamp: When the backup was created
133 | - backup_path: Where to find the backup
134 | - created_at: ISO timestamp of creation
135 | - agent_context: The context that was stored with the backup (if provided)`,
136 |     inputSchema: zodToJsonSchema(BackupListSchema)
137 |   },
138 |   backup_restore: {
139 |     name: "backup_restore",
140 |     description: "Restore a file from a previous backup using its timestamp. Use this to revert changes or recover previous versions.",
141 |     usage: `# Backup Restore Tool
142 | 
143 | Restores a file to a previous version using a specific backup timestamp.
144 | 
145 | ## When to Use
146 | - To revert unwanted changes
147 | - To recover from failed modifications
148 | - When comparing different versions of a file
149 | - After unsuccessful code changes
150 | 
151 | ## Best Practices
152 | 1. List available backups first to get the correct timestamp
153 | 2. Create a new backup before restoring (backup of current state)
154 | 3. Verify file permissions before restoration
155 | 4. Use absolute paths for reliable restoration
156 | 
157 | ## Example Usage
158 | \`\`\`json
159 | {
160 |   "name": "backup_restore",
161 |   "arguments": {
162 |     "file_path": "C:/path/to/important/file.js",
163 |     "timestamp": "20250309-120000-123"
164 |   }
165 | }
166 | \`\`\`
167 | 
168 | ## Response Format
169 | Confirms restoration with:
170 | - restored_path: Path to the restored file
171 | - timestamp: Backup version used`,
172 |     inputSchema: zodToJsonSchema(BackupRestoreSchema)
173 |   },
174 |   backup_folder_create: {
175 |     name: "backup_folder_create",
176 |     description: "Create a backup of a folder before making structural changes. The backup includes timestamp information and maintains the original directory structure.",
177 |     usage: `Creates a timestamped backup of the specified folder.
178 | 
179 | Parameters:
180 | - folder_path: Absolute path to the folder to backup
181 | - include_pattern: (Optional) Glob pattern to include specific files
182 | - exclude_pattern: (Optional) Glob pattern to exclude specific files
183 | - agent_context: (Optional) Include the last relevant user instruction or context
184 | 
185 | When to Use Folder vs. File Backup:
186 | - Use file backup (backup_create) for single file changes
187 | - Use folder backup (backup_folder_create) ONLY when:
188 |   1. Multiple files in a folder need to be modified together
189 |   2. You're making structural changes to a directory (adding/removing multiple files)
190 |   3. You need to preserve relationships between multiple files
191 | 
192 | Best Practices for Folder Backups:
193 | - Only backup the specific folder you're modifying, not parent directories
194 | - When removing a subfolder, backup just that subfolder, not the entire parent structure
195 | - For structural changes, backup the smallest unit of the structure being changed
196 | - For project-wide backups at the start of a session, ask the user first
197 | - Agents should always include the user's last instruction as context
198 | - Context should be concise and explain why the backup is being created
199 | - Avoid including sensitive information in the context
200 | - Keep context to 1-2 sentences that capture the purpose of the modification
201 | 
202 | Example contexts:
203 | "Refactoring authentication module to use JWT tokens"
204 | "Backing up subfolder before removal as requested by user"
205 | 
206 | Returns:
207 | - timestamp: Timestamp of the created backup
208 | - backup_path: Path where the backup was stored
209 | - agent_context: The context that was stored with the backup
210 | - versions_kept: Number of backup versions maintained`,
211 |     inputSchema: zodToJsonSchema(BackupFolderCreateSchema)
212 |   },
213 |   backup_folder_list: {
214 |     name: "backup_folder_list",
215 |     description: "List all available backups for a folder, including their timestamps and locations. Useful for finding specific versions to restore.",
216 |     usage: `# Backup Folder List Tool
217 | 
218 | Lists all available backup versions for a specified folder.
219 | 
220 | ## When to Use
221 | - Before restoring a folder to check available versions
222 | - To audit the backup history of a folder
223 | - To verify folder backup creation was successful
224 | - To check timestamps of previous folder backups
225 | - To list folder backups instead of using CLI commands or file system operations
226 | 
227 | ## Best Practices
228 | 1. Use absolute paths for reliable folder lookup
229 | 2. Check the timestamps to identify the right version
230 | 3. Review backup locations to ensure proper storage
231 | 4. Always use this tool rather than CLI commands to list backups
232 | 5. Agents should use this tool instead of file system operations to check backup history
233 | 6. Always provide the original folder path (not the backup path) to this tool
234 | 7. The tool will automatically search in the configured backup location (BACKUP_DIR)
235 | 8. Only backup folders that you are working on or removing, not the whole directory structure
236 | 
237 | ## Important Note for Agents
238 | - Always try the backup_list and backup_folder_list tools first to check for backups
239 | - Only use command-line tools (like Get-ChildItem, ls, dir) as a fallback if:
240 |   1. The MCP tools return an empty array or no response
241 |   2. You need to verify if backups exist in a specific location
242 |   3. The MCP server is not responding or unavailable
243 | - The MCP backup tools provide the correct context and metadata for backups
244 | - When using CLI as fallback, check the configured backup directory (typically ~/.code_backups or as specified in environment)
245 | - Never attempt to directly access or manipulate files in the backup directory
246 | - Always use the original folder path when listing backups, not paths within the backup directory
247 | - Create a project folder backup at the start of a resumed session
248 | - Create a folder backup before making structural changes to a folder, especially when removing child folders
249 | 
250 | ## Example Usage
251 | \`\`\`json
252 | {
253 |   "name": "backup_folder_list",
254 |   "arguments": {
255 |     "folder_path": "C:/path/to/important/folder"
256 |   }
257 | }
258 | \`\`\`
259 | 
260 | ## Response Format
261 | Returns an array of backup metadata:
262 | - timestamp: When the backup was created
263 | - backup_path: Where to find the backup
264 | - created_at: ISO timestamp of creation
265 | - agent_context: The context that was stored with the backup (if provided)`,
266 |     inputSchema: zodToJsonSchema(BackupFolderListSchema)
267 |   },
268 |   backup_folder_restore: {
269 |     name: "backup_folder_restore",
270 |     description: "Restore a folder from a previous backup using its timestamp. Use this to revert changes or recover previous versions.",
271 |     usage: `# Backup Folder Restore Tool
272 | 
273 | Restores a folder to a previous version using a specific backup timestamp.
274 | 
275 | ## When to Use
276 | - To revert unwanted changes
277 | - To recover from failed modifications
278 | - When comparing different versions of a folder
279 | - After unsuccessful code changes
280 | 
281 | ## Best Practices
282 | 1. List available backups first to get the correct timestamp
283 | 2. Create a new backup before restoring (backup of current state)
284 | 3. Verify folder permissions before restoration
285 | 4. Use absolute paths for reliable restoration
286 | 
287 | ## Example Usage
288 | \`\`\`json
289 | {
290 |   "name": "backup_folder_restore",
291 |   "arguments": {
292 |     "folder_path": "C:/path/to/important/folder",
293 |     "timestamp": "20250309-120000-123"
294 |   }
295 | }
296 | \`\`\`
297 | 
298 | ## Response Format
299 | Confirms restoration with:
300 | - restored_path: Path to the restored folder
301 | - timestamp: Backup version used`,
302 |     inputSchema: zodToJsonSchema(BackupFolderRestoreSchema)
303 |   },
304 |   backup_list_all: {
305 |     name: "backup_list_all",
306 |     description: "List all backup files in both the main backup directory and emergency backup directory.",
307 |     usage: `# List All Backups Tool
308 | 
309 | Lists all backup files in both the main backup directory and emergency backup directory.
310 | 
311 | ## When to Use
312 | - To get a comprehensive view of all backups across both directories
313 | - To audit all backup files in the system
314 | - To find specific backups using include/exclude patterns
315 | - To check for emergency backups created during restore operations
316 | 
317 | ## Best Practices
318 | 1. Use include/exclude patterns to filter results when looking for specific files
319 | 2. Set include_emergency to false if you only want to see regular backups
320 | 3. Review both directories to ensure proper backup management
321 | 
322 | ## Example Usage
323 | \`\`\`json
324 | {
325 |   "name": "backup_list_all",
326 |   "arguments": {
327 |     "include_pattern": "*.js",
328 |     "exclude_pattern": "node_modules/**",
329 |     "include_emergency": true
330 |   }
331 | }
332 | \`\`\`
333 | 
334 | ## Response Format
335 | Returns an object with two arrays:
336 | - main_backups: Array of backups in the main backup directory
337 | - emergency_backups: Array of backups in the emergency backup directory (if include_emergency is true)
338 | 
339 | Each backup entry contains:
340 | - path: Full path to the backup file
341 | - type: "file" or "folder" backup
342 | - size: Size of the backup in bytes
343 | - created_at: Creation timestamp
344 | - original_path: Original path of the backed up file/folder (if available from metadata)`,
345 |     inputSchema: zodToJsonSchema(ListAllBackupsSchema)
346 |   },
347 |   mcp_cancel: {
348 |     name: "mcp_cancel",
349 |     description: "Cancel an ongoing backup or restore operation. Use this to stop long-running operations safely.",
350 |     usage: `# Operation Cancel Tool
351 | 
352 | Cancels an in-progress backup or restore operation.
353 | 
354 | ## When to Use
355 | - To stop a long-running backup
356 | - When the wrong file was selected
357 | - If an operation appears stuck
358 | - To free up system resources
359 | 
360 | ## Best Practices
361 | 1. Keep track of operation IDs from responses
362 | 2. Check operation status before canceling
363 | 3. Verify the operation was actually cancelled
364 | 
365 | ## Example Usage
366 | \`\`\`json
367 | {
368 |   "name": "mcp_cancel",
369 |   "arguments": {
370 |     "operationId": "abc123-xyz789"
371 |   }
372 | }
373 | \`\`\`
374 | 
375 | ## Response Format
376 | Confirms cancellation with:
377 | - operationId: ID of cancelled operation
378 | - status: Final operation status`,
379 |     inputSchema: zodToJsonSchema(CancelSchema)
380 |   }
381 | };
382 | 
```

--------------------------------------------------------------------------------
/test scripts/test_client.js:
--------------------------------------------------------------------------------

```javascript
  1 | import fs from 'fs';
  2 | import path from 'path';
  3 | import { spawn } from 'child_process';
  4 | import { fileURLToPath } from 'url';
  5 | 
  6 | // Get current directory
  7 | const __filename = fileURLToPath(import.meta.url);
  8 | const __dirname = path.dirname(__filename);
  9 | 
 10 | // Create a test file
 11 | const testDir = path.join(__dirname, 'test_files');
 12 | const testFile = path.join(testDir, 'test_file.txt');
 13 | 
 14 | // Create a test folder structure
 15 | const testFolderStructure = path.join(testDir, 'test_folder_structure');
 16 | const testSubFolder1 = path.join(testFolderStructure, 'subfolder1');
 17 | const testSubFolder2 = path.join(testFolderStructure, 'subfolder2');
 18 | const testFileInFolder1 = path.join(testSubFolder1, 'file1.txt');
 19 | const testFileInFolder2 = path.join(testSubFolder2, 'file2.txt');
 20 | 
 21 | // Ensure test directory exists
 22 | if (!fs.existsSync(testDir)) {
 23 |   fs.mkdirSync(testDir, { recursive: true });
 24 | }
 25 | 
 26 | // Create or update test file with content
 27 | fs.writeFileSync(testFile, `This is a test file created at ${new Date().toISOString()}`);
 28 | console.log(`Created test file at: ${testFile}`);
 29 | 
 30 | // Create test folder structure
 31 | if (!fs.existsSync(testFolderStructure)) {
 32 |   fs.mkdirSync(testFolderStructure, { recursive: true });
 33 | }
 34 | if (!fs.existsSync(testSubFolder1)) {
 35 |   fs.mkdirSync(testSubFolder1, { recursive: true });
 36 | }
 37 | if (!fs.existsSync(testSubFolder2)) {
 38 |   fs.mkdirSync(testSubFolder2, { recursive: true });
 39 | }
 40 | 
 41 | // Create test files in subfolders
 42 | fs.writeFileSync(testFileInFolder1, `This is a test file in subfolder1 created at ${new Date().toISOString()}`);
 43 | fs.writeFileSync(testFileInFolder2, `This is a test file in subfolder2 created at ${new Date().toISOString()}`);
 44 | console.log(`Created test folder structure at: ${testFolderStructure}`);
 45 | 
 46 | // Start the server in a separate process
 47 | const server = spawn('node', ['dist/index.js'], {
 48 |   stdio: ['pipe', 'pipe', 'inherit'],
 49 |   env: {
 50 |     ...process.env,
 51 |     BACKUP_DIR: path.join(__dirname, 'test_backups'),
 52 |     MAX_VERSIONS: '3'
 53 |   }
 54 | });
 55 | 
 56 | // Function to send a JSON-RPC request and get the response
 57 | function sendRequest(request) {
 58 |   return new Promise((resolve, reject) => {
 59 |     console.log(`Sending request: ${JSON.stringify(request)}`);
 60 |     
 61 |     // Set up response handler
 62 |     const responseHandler = (data) => {
 63 |       const lines = data.toString().split('\n');
 64 |       
 65 |       for (const line of lines) {
 66 |         if (!line.trim()) continue;
 67 |         
 68 |         try {
 69 |           const response = JSON.parse(line);
 70 |           
 71 |           // If this is a response to our request
 72 |           if (response.id === request.id) {
 73 |             server.stdout.removeListener('data', responseHandler);
 74 |             resolve(response);
 75 |             return;
 76 |           }
 77 |         } catch (error) {
 78 |           console.error(`Error parsing response: ${line}`);
 79 |         }
 80 |       }
 81 |     };
 82 |     
 83 |     server.stdout.on('data', responseHandler);
 84 |     
 85 |     // Send the request
 86 |     server.stdin.write(JSON.stringify(request) + '\n');
 87 |     
 88 |     // Set a timeout
 89 |     setTimeout(() => {
 90 |       server.stdout.removeListener('data', responseHandler);
 91 |       reject(new Error('Request timed out'));
 92 |     }, 10000);
 93 |   });
 94 | }
 95 | 
 96 | // Run tests
 97 | async function runTests() {
 98 |   try {
 99 |     // Wait for server to start
100 |     await new Promise(resolve => setTimeout(resolve, 1000));
101 |     
102 |     // Test 1: List available tools
103 |     console.log('\n=== Test 1: List Tools ===');
104 |     const toolsResponse = await sendRequest({
105 |       jsonrpc: '2.0',
106 |       method: 'tools/list',
107 |       params: {},
108 |       id: Date.now().toString()
109 |     });
110 |     console.log('Available tools:', JSON.stringify(toolsResponse.result, null, 2));
111 |     
112 |     // Test 2: Create backup with agent context
113 |     console.log('\n=== Test 2: Create Backup with Agent Context ===');
114 |     const createResult = await sendRequest({
115 |       jsonrpc: '2.0',
116 |       method: 'tools/call',
117 |       params: { 
118 |         name: 'backup_create', 
119 |         arguments: { 
120 |           file_path: testFile,
121 |           agent_context: "This is a sample agent context for file backup. It could contain the last part of a conversation or other metadata."
122 |         }
123 |       },
124 |       id: Date.now().toString()
125 |     });
126 |     console.log('Backup created:', JSON.stringify(createResult.result, null, 2));
127 |     
128 |     // Test 3: List backups
129 |     console.log('\n=== Test 3: List Backups ===');
130 |     const listResult = await sendRequest({
131 |       jsonrpc: '2.0',
132 |       method: 'tools/call',
133 |       params: { 
134 |         name: 'backup_list', 
135 |         arguments: { file_path: testFile }
136 |       },
137 |       id: Date.now().toString()
138 |     });
139 |     console.log('Backups list:', JSON.stringify(listResult.result, null, 2));
140 |     
141 |     // Test 4: Create another backup with different agent context
142 |     console.log('\n=== Test 4: Create Another Backup with Different Agent Context ===');
143 |     const createResult2 = await sendRequest({
144 |       jsonrpc: '2.0',
145 |       method: 'tools/call',
146 |       params: { 
147 |         name: 'backup_create', 
148 |         arguments: { 
149 |           file_path: testFile,
150 |           agent_context: "This is a different agent context for the second backup. We can see how multiple backups store different context information."
151 |         }
152 |       },
153 |       id: Date.now().toString()
154 |     });
155 |     console.log('Second backup created:', JSON.stringify(createResult2.result, null, 2));
156 |     
157 |     // Test 5: List backups again
158 |     console.log('\n=== Test 5: List Backups Again ===');
159 |     const listResult2 = await sendRequest({
160 |       jsonrpc: '2.0',
161 |       method: 'tools/call',
162 |       params: { 
163 |         name: 'backup_list', 
164 |         arguments: { file_path: testFile }
165 |       },
166 |       id: Date.now().toString()
167 |     });
168 |     console.log('Updated backups list:', JSON.stringify(listResult2.result, null, 2));
169 | 
170 |     // Parse the content field from the response
171 |     let backups = [];
172 |     if (listResult2.result && listResult2.result.content && listResult2.result.content.length > 0) {
173 |       try {
174 |         backups = JSON.parse(listResult2.result.content[0].text);
175 |       } catch (err) {
176 |         console.error('Error parsing backups list:', err);
177 |       }
178 |     }
179 |     
180 |     // Test 6: Restore the first backup
181 |     if (backups && backups.length > 0) {
182 |       console.log('\n=== Test 6: Restore Backup ===');
183 |       const timestamp = backups[0].timestamp;
184 |       const restoreResult = await sendRequest({
185 |         jsonrpc: '2.0',
186 |         method: 'tools/call',
187 |         params: {
188 |           name: 'backup_restore', 
189 |           arguments: {
190 |             file_path: testFile,
191 |             timestamp: timestamp
192 |           }
193 |         },
194 |         id: Date.now().toString()
195 |       });
196 |       console.log('Restore result:', JSON.stringify(restoreResult.result, null, 2));
197 |     } else {
198 |       console.log('No backups found to restore');
199 |     }
200 |     
201 |     // Test 7: Get Tool Documentation
202 |     console.log('\n=== Test 7: Get Tool Documentation ===');
203 |     const describeRequest = {
204 |       jsonrpc: '2.0',
205 |       method: 'tools/describe',
206 |       params: {
207 |         name: 'backup_create'
208 |       },
209 |       id: Date.now().toString()
210 |     };
211 |     console.log(`Sending request: ${JSON.stringify(describeRequest)}`);
212 |     await sendRequest(describeRequest).then(response => {
213 |       console.log(`Tool documentation: ${JSON.stringify(response, null, 2)}`);
214 |     });
215 |     
216 |     // Test 8: Create folder backup with agent context
217 |     console.log('\n=== Test 8: Create Folder Backup with Agent Context ===');
218 |     const folderCreateResult = await sendRequest({
219 |       jsonrpc: '2.0',
220 |       method: 'tools/call',
221 |       params: { 
222 |         name: 'backup_folder_create', 
223 |         arguments: { 
224 |           folder_path: testFolderStructure,
225 |           include_pattern: "*.txt",
226 |           agent_context: "This is a sample agent context for folder backup. It demonstrates storing context with folder backups."
227 |         }
228 |       },
229 |       id: Date.now().toString()
230 |     });
231 |     console.log('Folder backup created:', JSON.stringify(folderCreateResult.result, null, 2));
232 |     
233 |     // Test 9: List folder backups
234 |     console.log('\n=== Test 9: List Folder Backups ===');
235 |     const folderListResult = await sendRequest({
236 |       jsonrpc: '2.0',
237 |       method: 'tools/call',
238 |       params: { 
239 |         name: 'backup_folder_list', 
240 |         arguments: { folder_path: testFolderStructure }
241 |       },
242 |       id: Date.now().toString()
243 |     });
244 |     console.log('Folder backups list:', JSON.stringify(folderListResult.result, null, 2));
245 |     
246 |     // Test 10: Create another folder backup
247 |     console.log('\n=== Test 10: Create Another Folder Backup ===');
248 |     const folderCreateResult2 = await sendRequest({
249 |       jsonrpc: '2.0',
250 |       method: 'tools/call',
251 |       params: { 
252 |         name: 'backup_folder_create', 
253 |         arguments: { folder_path: testFolderStructure }
254 |       },
255 |       id: Date.now().toString()
256 |     });
257 |     console.log('Second folder backup created:', JSON.stringify(folderCreateResult2.result, null, 2));
258 |     
259 |     // Test 11: List folder backups again
260 |     console.log('\n=== Test 11: List Folder Backups Again ===');
261 |     const folderListResult2 = await sendRequest({
262 |       jsonrpc: '2.0',
263 |       method: 'tools/call',
264 |       params: { 
265 |         name: 'backup_folder_list', 
266 |         arguments: { folder_path: testFolderStructure }
267 |       },
268 |       id: Date.now().toString()
269 |     });
270 |     console.log('Updated folder backups list:', JSON.stringify(folderListResult2.result, null, 2));
271 | 
272 |     // Parse the content field from the response
273 |     let folderBackups = [];
274 |     if (folderListResult2.result && folderListResult2.result.content && folderListResult2.result.content.length > 0) {
275 |       try {
276 |         folderBackups = JSON.parse(folderListResult2.result.content[0].text);
277 |       } catch (err) {
278 |         console.error('Error parsing folder backups list:', err);
279 |       }
280 |     }
281 |     
282 |     // Test 12: Restore the first folder backup
283 |     if (folderBackups && folderBackups.length > 0) {
284 |       console.log('\n=== Test 12: Restore Folder Backup ===');
285 |       const timestamp = folderBackups[0].timestamp;
286 |       
287 |       // Modify a file in the folder to verify restoration
288 |       fs.writeFileSync(testFileInFolder1, `This file was modified before restore at ${new Date().toISOString()}`);
289 |       console.log(`Modified test file before restore: ${testFileInFolder1}`);
290 |       
291 |       const folderRestoreResult = await sendRequest({
292 |         jsonrpc: '2.0',
293 |         method: 'tools/call',
294 |         params: {
295 |           name: 'backup_folder_restore', 
296 |           arguments: {
297 |             folder_path: testFolderStructure,
298 |             timestamp: timestamp
299 |           }
300 |         },
301 |         id: Date.now().toString()
302 |       });
303 |       console.log('Folder restore result:', JSON.stringify(folderRestoreResult.result, null, 2));
304 |       
305 |       // Verify the file was restored
306 |       const restoredContent = fs.readFileSync(testFileInFolder1, 'utf8');
307 |       console.log(`Restored file content: ${restoredContent}`);
308 |     } else {
309 |       console.log('No folder backups found to restore');
310 |     }
311 |     
312 |     // Test 13: Restore with emergency backup creation
313 |     if (folderBackups && folderBackups.length > 0) {
314 |       console.log('\n=== Test 13: Restore with Emergency Backup ===');
315 |       const timestamp = folderBackups[0].timestamp;
316 |       
317 |       // Modify a file in the folder to verify restoration and emergency backup
318 |       fs.writeFileSync(testFileInFolder1, `This file was modified before emergency backup restore at ${new Date().toISOString()}`);
319 |       console.log(`Modified test file before emergency backup restore: ${testFileInFolder1}`);
320 |       
321 |       const emergencyRestoreResult = await sendRequest({
322 |         jsonrpc: '2.0',
323 |         method: 'tools/call',
324 |         params: {
325 |           name: 'backup_folder_restore', 
326 |           arguments: {
327 |             folder_path: testFolderStructure,
328 |             timestamp: timestamp,
329 |             create_emergency_backup: true
330 |           }
331 |         },
332 |         id: Date.now().toString()
333 |       });
334 |       console.log('Folder restore with emergency backup result:', JSON.stringify(emergencyRestoreResult.result, null, 2));
335 |     }
336 |     
337 |     // Test 14: List all backups including emergency backups
338 |     console.log('\n=== Test 14: List All Backups Including Emergency Backups ===');
339 |     const listAllResult = await sendRequest({
340 |       jsonrpc: '2.0',
341 |       method: 'tools/call',
342 |       params: { 
343 |         name: 'backup_list_all', 
344 |         arguments: { include_emergency: true }
345 |       },
346 |       id: Date.now().toString()
347 |     });
348 |     console.log('All backups list:', JSON.stringify(listAllResult.result, null, 2));
349 |     
350 |     // Test 15: Verify emergency backups have metadata
351 |     console.log('\n=== Test 15: Verify Emergency Backups Have Metadata ===');
352 |     let emergencyBackups = [];
353 |     if (listAllResult.result && listAllResult.result.content && listAllResult.result.content.length > 0) {
354 |       try {
355 |         const allBackups = JSON.parse(listAllResult.result.content[0].text);
356 |         emergencyBackups = allBackups.emergency_backups || [];
357 |         console.log(`Found ${emergencyBackups.length} emergency backups with metadata`);
358 |         
359 |         // Check if we have emergency backups with metadata
360 |         if (emergencyBackups.length > 0) {
361 |           console.log('Emergency backups with metadata found:', JSON.stringify(emergencyBackups, null, 2));
362 |         } else {
363 |           console.log('No emergency backups with metadata found. This may indicate an issue with emergency backup metadata creation.');
364 |         }
365 |       } catch (err) {
366 |         console.error('Error parsing all backups list:', err);
367 |       }
368 |     }
369 |     
370 |     // Test 16: File restore with emergency backup
371 |     console.log('\n=== Test 16: File Restore with Emergency Backup ===');
372 |     // Modify test file
373 |     fs.writeFileSync(testFile, `This file was modified before emergency backup restore at ${new Date().toISOString()}`);
374 |     console.log(`Modified test file before emergency backup restore: ${testFile}`);
375 |     
376 |     // Get the latest backup timestamp
377 |     const latestFileBackups = await sendRequest({
378 |       jsonrpc: '2.0',
379 |       method: 'tools/call',
380 |       params: { 
381 |         name: 'backup_list', 
382 |         arguments: { file_path: testFile }
383 |       },
384 |       id: Date.now().toString()
385 |     });
386 |     
387 |     let fileBackups = [];
388 |     if (latestFileBackups.result && latestFileBackups.result.content && latestFileBackups.result.content.length > 0) {
389 |       try {
390 |         fileBackups = JSON.parse(latestFileBackups.result.content[0].text);
391 |       } catch (err) {
392 |         console.error('Error parsing file backups list:', err);
393 |       }
394 |     }
395 |     
396 |     if (fileBackups && fileBackups.length > 0) {
397 |       const fileTimestamp = fileBackups[0].timestamp;
398 |       
399 |       const fileEmergencyRestoreResult = await sendRequest({
400 |         jsonrpc: '2.0',
401 |         method: 'tools/call',
402 |         params: {
403 |           name: 'backup_restore', 
404 |           arguments: {
405 |             file_path: testFile,
406 |             timestamp: fileTimestamp,
407 |             create_emergency_backup: true
408 |           }
409 |         },
410 |         id: Date.now().toString()
411 |       });
412 |       console.log('File restore with emergency backup result:', JSON.stringify(fileEmergencyRestoreResult.result, null, 2));
413 |       
414 |       // List all backups again to verify the new emergency backup
415 |       const finalListAllResult = await sendRequest({
416 |         jsonrpc: '2.0',
417 |         method: 'tools/call',
418 |         params: { 
419 |           name: 'backup_list_all', 
420 |           arguments: { include_emergency: true }
421 |         },
422 |         id: Date.now().toString()
423 |       });
424 |       
425 |       // Check for new emergency backups
426 |       let finalEmergencyBackups = [];
427 |       if (finalListAllResult.result && finalListAllResult.result.content && finalListAllResult.result.content.length > 0) {
428 |         try {
429 |           const finalAllBackups = JSON.parse(finalListAllResult.result.content[0].text);
430 |           finalEmergencyBackups = finalAllBackups.emergency_backups || [];
431 |           console.log(`Found ${finalEmergencyBackups.length} emergency backups with metadata after file restore`);
432 |           
433 |           // Check if we have more emergency backups than before
434 |           if (finalEmergencyBackups.length > emergencyBackups.length) {
435 |             console.log('New emergency backup with metadata created successfully!');
436 |           } else {
437 |             console.log('No new emergency backup metadata found. This may indicate an issue with file emergency backup metadata creation.');
438 |           }
439 |         } catch (err) {
440 |           console.error('Error parsing final all backups list:', err);
441 |         }
442 |       }
443 |     } else {
444 |       console.log('No file backups found to restore');
445 |     }
446 |     
447 |     console.log('\nAll tests completed successfully!');
448 |   } catch (error) {
449 |     console.error('Test failed:', error);
450 |   } finally {
451 |     // Clean up
452 |     server.stdin.end();
453 |     process.exit(0);
454 |   }
455 | }
456 | 
457 | // Run the tests
458 | runTests();
459 | 
```

--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------

```typescript
   1 | #!/usr/bin/env node
   2 | 
   3 | import fs from 'fs';
   4 | import path from 'path';
   5 | import crypto from 'crypto';
   6 | import { promises as fsPromises } from 'fs';
   7 | import { Server } from "@modelcontextprotocol/sdk/server/index.js";
   8 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
   9 | import { 
  10 |   CallToolRequestSchema, 
  11 |   ListToolsRequestSchema,
  12 |   ToolSchema 
  13 | } from "@modelcontextprotocol/sdk/types.js";
  14 | import { z } from "zod";
  15 | import os from 'os';
  16 | import { minimatch } from 'minimatch';
  17 | import { 
  18 |   BackupCreateSchema, 
  19 |   BackupListSchema, 
  20 |   BackupRestoreSchema,
  21 |   BackupFolderCreateSchema,
  22 |   BackupFolderListSchema,
  23 |   BackupFolderRestoreSchema,
  24 |   ListAllBackupsSchema,
  25 |   CancelSchema,
  26 |   toolDescriptions 
  27 | } from './toolDescriptions.js';
  28 | import { 
  29 |   BackupMetadata, 
  30 |   BackupFolderMetadata, 
  31 |   BackupResult, 
  32 |   Operation 
  33 | } from './types.js';
  34 | import { 
  35 |   checkOperationCancelled, 
  36 |   formatJsonResponse, 
  37 |   formatErrorResponse, 
  38 |   validateRequiredParams,
  39 |   validateFileExists,
  40 |   validateFolderExists,
  41 |   exists
  42 | } from './utils.js';
  43 | 
  44 | // Type for tool input
  45 | const ToolInputSchema = ToolSchema.shape.inputSchema;
  46 | type ToolInput = z.infer<typeof ToolInputSchema>;
  47 | 
  48 | // Create a local ensureDirectoryExists function to avoid conflict with the imported one
  49 | async function ensureBackupDirectoryExists(dirPath: string): Promise<void> {
  50 |   try {
  51 |     await fsPromises.mkdir(dirPath, { recursive: true });
  52 |   } catch (error) {
  53 |     console.error(`Error creating directory ${dirPath}:`, error);
  54 |     throw error;
  55 |   }
  56 | }
  57 | 
  58 | // Constants
  59 | const SERVER_VERSION = '1.0.0';
  60 | const SERVER_NAME = 'backup-mcp-server';
  61 | const BACKUP_DIR = process.env.BACKUP_DIR || path.join(os.homedir(), '.code_backups');
  62 | const MAX_VERSIONS = parseInt(process.env.MAX_VERSIONS || '10', 10);
  63 | const EMERGENCY_BACKUP_DIR = process.env.EMERGENCY_BACKUP_DIR || path.join(os.homedir(), '.code_emergency_backups');
  64 | 
  65 | // Normalize backup directory paths for Windows
  66 | const BACKUP_DIR_NORMALIZED = path.normalize(BACKUP_DIR);
  67 | const EMERGENCY_BACKUP_DIR_NORMALIZED = path.normalize(EMERGENCY_BACKUP_DIR);
  68 | 
  69 | // Track current operation
  70 | let currentOperationId: string | null = null;
  71 | 
  72 | // Map to track operations
  73 | const operations = new Map<string, Operation>();
  74 | 
  75 | // Report progress for an operation
  76 | function reportProgress(operationId: string, progress: number): void {
  77 |   // Only report progress if operationId is valid
  78 |   if (operationId) {
  79 |     console.error(`Operation ${operationId} progress: ${progress}%`);
  80 |   }
  81 | }
  82 | 
  83 | // Update operation progress safely
  84 | function updateOperationProgress(operationId: string, progress: number): void {
  85 |   const operation = operations.get(operationId);
  86 |   if (operation) {
  87 |     operation.progress = progress;
  88 |   }
  89 | }
  90 | 
  91 | // Helper function to report progress
  92 | function logProgress(progress: number): void {
  93 |   if (currentOperationId) {
  94 |     updateOperationProgress(currentOperationId, progress);
  95 |     reportProgress(currentOperationId, progress);
  96 |   }
  97 | }
  98 | 
  99 | // Generate a backup folder name
 100 | function getBackupFolderName(folderPath: string, timestamp: string): string {
 101 |   const folderName = path.basename(folderPath);
 102 |   return `${folderName}.${timestamp}`;
 103 | }
 104 | 
 105 | // Create a new operation
 106 | function createOperation(type: string, params: any): Operation {
 107 |   const id = crypto.randomUUID();
 108 |   const operation: Operation = {
 109 |     id,
 110 |     type,
 111 |     progress: 0,
 112 |     cancelled: false,
 113 |     status: 'running'
 114 |   };
 115 |   operations.set(id, operation);
 116 |   return operation;
 117 | }
 118 | 
 119 | // Cancel operation
 120 | function cancelOperation(operationId: string): boolean {
 121 |   const operation = operations.get(operationId);
 122 |   if (operation) {
 123 |     operation.cancelled = true;
 124 |     return true;
 125 |   }
 126 |   return false;
 127 | }
 128 | 
 129 | // Create MCP server
 130 | const server = new Server(
 131 |   {
 132 |     name: SERVER_NAME,
 133 |     version: SERVER_VERSION,
 134 |   },
 135 |   {
 136 |     capabilities: {
 137 |       tools: {},
 138 |     },
 139 |   },
 140 | );
 141 | 
 142 | // Initialize server methods if not already initialized
 143 | if (!(server as any).methods) {
 144 |   (server as any).methods = {};
 145 | }
 146 | 
 147 | // Define tools
 148 | server.setRequestHandler(ListToolsRequestSchema, async () => {
 149 |   return {
 150 |     tools: Object.values(toolDescriptions).map(tool => ({
 151 |       name: tool.name,
 152 |       description: tool.description,
 153 |       inputSchema: tool.inputSchema as ToolInput,
 154 |     }))
 155 |   };
 156 | });
 157 | 
 158 | // Custom schema for tool documentation requests
 159 | const DescribeToolRequestSchema = z.object({
 160 |   jsonrpc: z.literal('2.0'),
 161 |   method: z.literal('tools/describe'),
 162 |   params: z.object({
 163 |     name: z.string().describe('Name of the tool to describe')
 164 |   }),
 165 |   id: z.union([z.string(), z.number()])
 166 | });
 167 | 
 168 | // Implement tool documentation
 169 | server.setRequestHandler(DescribeToolRequestSchema, async (request) => {
 170 |   const { name } = request.params;
 171 |   const toolInfo = toolDescriptions[name];
 172 |   
 173 |   if (!toolInfo) {
 174 |     throw new Error(`Tool '${name}' not found`);
 175 |   }
 176 | 
 177 |   return {
 178 |     content: [{
 179 |       type: "text",
 180 |       text: toolInfo.usage
 181 |     }]
 182 |   };
 183 | });
 184 | 
 185 | // Implement tool handlers
 186 | server.setRequestHandler(CallToolRequestSchema, async (request) => {
 187 |   let currentOperationId: string | null = null;
 188 |   
 189 |   try {
 190 |     const { name, arguments: toolInput } = request.params;
 191 |     console.error(`Received request for ${name} with params:`, toolInput);
 192 |     
 193 |     // Create a unique operation ID for tracking progress
 194 |     currentOperationId = createOperation(name, toolInput).id;
 195 |     
 196 |     switch (name) {
 197 |       case "backup_create": {
 198 |         const params = toolInput as z.infer<typeof BackupCreateSchema>;
 199 |         console.error('Received request for backup_create with params:', params);
 200 |         
 201 |         // Validate required parameters
 202 |         validateRequiredParams(params, ['file_path']);
 203 |         
 204 |         const filePath = path.normalize(params.file_path);
 205 |         
 206 |         // Check if file exists
 207 |         await validateFileExists(filePath);
 208 |         
 209 |         // Generate timestamp for the backup
 210 |         const timestamp = generateTimestamp();
 211 |         
 212 |         // Create backup directory
 213 |         const backupDir = getBackupDir(filePath);
 214 |         await ensureBackupDirectoryExists(backupDir);
 215 |         
 216 |         // Create backup filename
 217 |         const backupFilename = getBackupFilename(filePath, timestamp);
 218 |         const backupPath = path.join(backupDir, backupFilename);
 219 |         
 220 |         // Report progress
 221 |         logProgress(10);
 222 |         
 223 |         // Check if operation was cancelled
 224 |         const cancelCheck = checkOperationCancelled(
 225 |           currentOperationId, 
 226 |           operations,
 227 |           () => {}
 228 |         );
 229 |         if (cancelCheck.isCancelled) return cancelCheck.response;
 230 |         
 231 |         // Copy the file
 232 |         await fsPromises.copyFile(filePath, backupPath);
 233 |         
 234 |         // Report progress
 235 |         logProgress(70);
 236 |         
 237 |         // Check if operation was cancelled
 238 |         const cancelCheck2 = checkOperationCancelled(
 239 |           currentOperationId, 
 240 |           operations,
 241 |           () => {
 242 |             // Clean up the partial backup
 243 |             if (fs.existsSync(backupPath)) {
 244 |               fs.unlinkSync(backupPath);
 245 |             }
 246 |           }
 247 |         );
 248 |         if (cancelCheck2.isCancelled) return cancelCheck2.response;
 249 |         
 250 |         // Create and save metadata
 251 |         const metadata = createBackupMetadata(filePath, timestamp, backupPath, params.agent_context);
 252 |         const metadataPath = getBackupMetadataFilename(backupPath);
 253 |         saveBackupMetadata(metadataPath, metadata);
 254 |         
 255 |         // Report progress
 256 |         logProgress(90);
 257 |         
 258 |         // Clean up old backups
 259 |         const versionsKept = cleanupOldBackups(filePath);
 260 |         
 261 |         // Report completion
 262 |         logProgress(100);
 263 |         
 264 |         // Return result with versionsKept
 265 |         return formatJsonResponse({
 266 |           ...metadata,
 267 |           versions_kept: versionsKept
 268 |         });
 269 |       }
 270 |       
 271 |       case "backup_list": {
 272 |         const params = toolInput as z.infer<typeof BackupListSchema>;
 273 |         console.error('Received request for backup_list with params:', params);
 274 |         
 275 |         // Validate required parameters
 276 |         validateRequiredParams(params, ['file_path']);
 277 |         
 278 |         const filePath = path.normalize(params.file_path);
 279 |         
 280 |         // Report initial progress
 281 |         logProgress(0);
 282 |         
 283 |         // Check if file exists
 284 |         await validateFileExists(filePath);
 285 |         
 286 |         // Report progress
 287 |         logProgress(30);
 288 |         
 289 |         const backups = findBackupsByFilePath(filePath);
 290 |         
 291 |         // Report progress
 292 |         logProgress(70);
 293 |         
 294 |         // Sort backups by timestamp (newest first)
 295 |         backups.sort((a, b) => {
 296 |           return b.timestamp.localeCompare(a.timestamp);
 297 |         });
 298 |         
 299 |         // Report completion
 300 |         logProgress(100);
 301 |         
 302 |         return formatJsonResponse(backups);
 303 |       }
 304 |       
 305 |       case "backup_restore": {
 306 |         const params = toolInput as z.infer<typeof BackupRestoreSchema>;
 307 |         console.error('Received request for backup_restore with params:', params);
 308 |         
 309 |         // Validate required parameters
 310 |         validateRequiredParams(params, ['file_path', 'timestamp']);
 311 |         
 312 |         const filePath = path.normalize(params.file_path);
 313 |         const timestamp = params.timestamp;
 314 |         
 315 |         // Find the backup
 316 |         const backup = await findBackupByTimestamp(filePath, timestamp);
 317 |         
 318 |         if (!backup) {
 319 |           throw new Error(`Backup with timestamp ${timestamp} not found for ${filePath}`);
 320 |         }
 321 |         
 322 |         // Report progress
 323 |         logProgress(20);
 324 |         
 325 |         // Check if operation was cancelled
 326 |         const cancelCheck = checkOperationCancelled(
 327 |           currentOperationId, 
 328 |           operations,
 329 |           () => {}
 330 |         );
 331 |         if (cancelCheck.isCancelled) return cancelCheck.response;
 332 |         
 333 |         // Ensure the target directory exists
 334 |         const targetDir = path.dirname(filePath);
 335 |         await ensureBackupDirectoryExists(targetDir);
 336 |         
 337 |         // Report progress
 338 |         logProgress(50);
 339 |         
 340 |         // Check if operation was cancelled
 341 |         const cancelCheck2 = checkOperationCancelled(
 342 |           currentOperationId, 
 343 |           operations,
 344 |           () => {}
 345 |         );
 346 |         if (cancelCheck2.isCancelled) return cancelCheck2.response;
 347 |         
 348 |         // Create emergency backup if requested
 349 |         if (params.create_emergency_backup) {
 350 |           const emergencyBackupPath = await createEmergencyBackup(filePath);
 351 |           if (emergencyBackupPath) {
 352 |             console.error(`Created emergency backup at ${emergencyBackupPath}`);
 353 |           }
 354 |         }
 355 |         
 356 |         // Copy the backup file to the original location
 357 |         await restoreBackup(filePath, timestamp, params.create_emergency_backup);
 358 |         
 359 |         // Report completion
 360 |         logProgress(100);
 361 |         
 362 |         // Return result
 363 |         return formatJsonResponse({
 364 |           restored_path: filePath,
 365 |           timestamp: timestamp
 366 |         });
 367 |       }
 368 |       
 369 |       case "backup_folder_create": {
 370 |         const params = toolInput as z.infer<typeof BackupFolderCreateSchema>;
 371 |         console.error('Received request for backup_folder_create with params:', params);
 372 |         
 373 |         // Validate required parameters
 374 |         validateRequiredParams(params, ['folder_path']);
 375 |         
 376 |         const folderPath = path.normalize(params.folder_path);
 377 |         
 378 |         // Check if folder exists
 379 |         await validateFolderExists(folderPath);
 380 |         
 381 |         // Generate timestamp for the backup
 382 |         const timestamp = generateTimestamp();
 383 |         
 384 |         // Create backup directory
 385 |         const backupDir = getBackupDir(folderPath);
 386 |         await ensureBackupDirectoryExists(backupDir);
 387 |         
 388 |         // Create backup folder name
 389 |         const backupFolderName = getBackupFolderName(folderPath, timestamp);
 390 |         const backupFolderPath = path.join(backupDir, backupFolderName);
 391 |         
 392 |         // Report progress
 393 |         logProgress(10);
 394 |         
 395 |         // Check if operation was cancelled
 396 |         const cancelCheck = checkOperationCancelled(
 397 |           currentOperationId, 
 398 |           operations,
 399 |           () => {}
 400 |         );
 401 |         if (cancelCheck.isCancelled) return cancelCheck.response;
 402 |         
 403 |         // Copy the folder
 404 |         await copyFolderContents(folderPath, backupFolderPath, params.include_pattern, params.exclude_pattern);
 405 |         
 406 |         // Report progress
 407 |         logProgress(70);
 408 |         
 409 |         // Check if operation was cancelled
 410 |         const cancelCheck2 = checkOperationCancelled(
 411 |           currentOperationId, 
 412 |           operations,
 413 |           () => {
 414 |             // Clean up the partial backup
 415 |             if (fs.existsSync(backupFolderPath)) {
 416 |               fs.rmdirSync(backupFolderPath, { recursive: true });
 417 |             }
 418 |           }
 419 |         );
 420 |         if (cancelCheck2.isCancelled) return cancelCheck2.response;
 421 |         
 422 |         // Create and save metadata
 423 |         const metadata = createBackupMetadata(folderPath, timestamp, backupFolderPath, params.agent_context);
 424 |         const metadataPath = `${backupFolderPath}.meta.json`;
 425 |         saveBackupMetadata(metadataPath, metadata);
 426 |         
 427 |         // Report progress
 428 |         logProgress(90);
 429 |         
 430 |         // Clean up old backups
 431 |         const versionsKept = cleanupOldBackups(folderPath);
 432 |         
 433 |         // Report completion
 434 |         logProgress(100);
 435 |         
 436 |         // Return result with versionsKept
 437 |         return formatJsonResponse({
 438 |           ...metadata,
 439 |           versions_kept: versionsKept
 440 |         });
 441 |       }
 442 |       
 443 |       case "backup_folder_list": {
 444 |         const params = toolInput as z.infer<typeof BackupFolderListSchema>;
 445 |         console.error('Received request for backup_folder_list with params:', params);
 446 |         
 447 |         // Validate required parameters
 448 |         validateRequiredParams(params, ['folder_path']);
 449 |         
 450 |         const folderPath = path.normalize(params.folder_path);
 451 |         
 452 |         // Report initial progress
 453 |         logProgress(0);
 454 |         
 455 |         // Check if folder exists
 456 |         await validateFolderExists(folderPath);
 457 |         
 458 |         // Report progress
 459 |         logProgress(30);
 460 |         
 461 |         const backups = findBackupsByFolderPath(folderPath);
 462 |         
 463 |         // Report progress
 464 |         logProgress(70);
 465 |         
 466 |         // Sort backups by timestamp (newest first)
 467 |         backups.sort((a, b) => {
 468 |           return b.timestamp.localeCompare(a.timestamp);
 469 |         });
 470 |         
 471 |         // Report completion
 472 |         logProgress(100);
 473 |         
 474 |         return formatJsonResponse(backups);
 475 |       }
 476 |       
 477 |       case "backup_folder_restore": {
 478 |         const params = toolInput as z.infer<typeof BackupFolderRestoreSchema>;
 479 |         console.error('Received request for backup_folder_restore with params:', params);
 480 |         
 481 |         // Validate required parameters
 482 |         validateRequiredParams(params, ['folder_path', 'timestamp']);
 483 |         
 484 |         const { folder_path, timestamp, create_emergency_backup = true } = params;
 485 |         const folderPath = path.normalize(folder_path);
 486 |         
 487 |         // Check if folder exists
 488 |         await validateFolderExists(folderPath);
 489 |         
 490 |         // Report initial progress
 491 |         logProgress(0);
 492 |         
 493 |         try {
 494 |           // Find the backup
 495 |           const backups = findBackupsByFolderPath(folderPath);
 496 |           const backup = backups.find(b => b.timestamp === timestamp);
 497 |           
 498 |           if (!backup) {
 499 |             throw new Error(`Backup with timestamp ${timestamp} not found for ${folderPath}`);
 500 |           }
 501 |           
 502 |           // Report progress
 503 |           logProgress(10);
 504 |           
 505 |           // Create emergency backup if requested
 506 |           let emergencyBackupPath: string | null = null;
 507 |           if (create_emergency_backup) {
 508 |             emergencyBackupPath = await createEmergencyFolderBackup(folderPath);
 509 |           }
 510 |           
 511 |           // Check if backup path exists
 512 |           if (!backup.backup_path || !fs.existsSync(backup.backup_path)) {
 513 |             throw new Error(`Backup folder not found: ${backup.backup_path}`);
 514 |           }
 515 |           
 516 |           // Check if operation was cancelled
 517 |           const cancelCheck = checkOperationCancelled(
 518 |             currentOperationId, 
 519 |             operations,
 520 |             () => {}
 521 |           );
 522 |           if (cancelCheck.isCancelled) return cancelCheck.response;
 523 |           
 524 |           // Copy the backup folder to the original location
 525 |           await copyFolderContents(backup.backup_path, folderPath);
 526 |           
 527 |           // Report completion
 528 |           logProgress(100);
 529 |           
 530 |           return formatJsonResponse({
 531 |             restored_path: folderPath,
 532 |             timestamp: timestamp,
 533 |             emergency_backup_path: emergencyBackupPath
 534 |           });
 535 |         } catch (error) {
 536 |           // Update operation status on error
 537 |           const operation = operations.get(currentOperationId);
 538 |           if (operation) {
 539 |             operation.status = 'error';
 540 |           }
 541 |           
 542 |           throw error;
 543 |         }
 544 |       }
 545 |       
 546 |       case "backup_list_all": {
 547 |         const params = toolInput as z.infer<typeof ListAllBackupsSchema>;
 548 |         console.error('Received request for backup_list_all with params:', params);
 549 |         
 550 |         // Extract parameters
 551 |         const includePattern = params.include_pattern;
 552 |         const excludePattern = params.exclude_pattern;
 553 |         const includeEmergency = params.include_emergency !== false; // Default to true if not specified
 554 |         
 555 |         // Create operation for tracking
 556 |         const operation = operations.get(currentOperationId);
 557 |         if (operation) {
 558 |           operation.status = 'running';
 559 |         }
 560 |         
 561 |         // Report initial progress
 562 |         logProgress(0);
 563 |         
 564 |         try {
 565 |           // Initialize results object
 566 |           const results: {
 567 |             main_backups: Array<{
 568 |               path: string;
 569 |               type: string;
 570 |               size: number;
 571 |               created_at: string;
 572 |               original_path: string | null;
 573 |             }>;
 574 |             emergency_backups: Array<{
 575 |               path: string;
 576 |               type: string;
 577 |               size: number;
 578 |               created_at: string;
 579 |               original_path: string | null;
 580 |             }>;
 581 |           } = {
 582 |             main_backups: [],
 583 |             emergency_backups: []
 584 |           };
 585 |           
 586 |           // Function to scan a directory and get all backup files
 587 |           async function scanBackupDirectory(directory: string, isEmergency: boolean = false) {
 588 |             if (!fs.existsSync(directory)) {
 589 |               return [];
 590 |             }
 591 |             
 592 |             // Get all files and folders in the directory recursively
 593 |             const getAllFiles = async (dir: string, fileList: any[] = []) => {
 594 |               const files = await fsPromises.readdir(dir, { withFileTypes: true });
 595 |               
 596 |               for (const file of files) {
 597 |                 const filePath = path.join(dir, file.name);
 598 |                 
 599 |                 // Check if operation was cancelled
 600 |                 if (currentOperationId && operations.get(currentOperationId)?.cancelled) {
 601 |                   throw new Error('Operation cancelled');
 602 |                 }
 603 |                 
 604 |                 // Apply include/exclude patterns if specified
 605 |                 if (includePattern && !minimatch(filePath, includePattern)) {
 606 |                   continue;
 607 |                 }
 608 |                 
 609 |                 if (excludePattern && minimatch(filePath, excludePattern)) {
 610 |                   continue;
 611 |                 }
 612 |                 
 613 |                 if (file.isDirectory()) {
 614 |                   fileList = await getAllFiles(filePath, fileList);
 615 |                 } else {
 616 |                   // Check if this is a backup file (has timestamp format in name)
 617 |                   const isBackupFile = /\.\d{8}-\d{6}-\d{3}$/.test(file.name);
 618 |                   const isMetadataFile = file.name.endsWith('.meta.json');
 619 |                   
 620 |                   if (isBackupFile || isMetadataFile) {
 621 |                     try {
 622 |                       const stats = await fsPromises.stat(filePath);
 623 |                       
 624 |                       // Try to get original path from metadata if this is a backup file
 625 |                       let originalPath = null;
 626 |                       let backupType = 'unknown';
 627 |                       
 628 |                       if (isBackupFile) {
 629 |                         // Look for corresponding metadata file
 630 |                         const metadataPath = `${filePath}.meta.json`;
 631 |                         if (await exists(metadataPath)) {
 632 |                           try {
 633 |                             const metadataContent = await fsPromises.readFile(metadataPath, 'utf8');
 634 |                             const metadata = JSON.parse(metadataContent);
 635 |                             originalPath = metadata.original_path;
 636 |                           } catch (err) {
 637 |                             console.error(`Error reading metadata for ${filePath}:`, err);
 638 |                           }
 639 |                         }
 640 |                       } else if (isMetadataFile) {
 641 |                         try {
 642 |                           const metadataContent = await fsPromises.readFile(filePath, 'utf8');
 643 |                           const metadata = JSON.parse(metadataContent);
 644 |                           originalPath = metadata.original_path;
 645 |                         } catch (err) {
 646 |                           console.error(`Error reading metadata file ${filePath}:`, err);
 647 |                         }
 648 |                       }
 649 |                       
 650 |                       // Add to appropriate list
 651 |                       const result = {
 652 |                         path: filePath,
 653 |                         type: file.isDirectory() ? 'directory' : 'file',
 654 |                         size: stats.size,
 655 |                         created_at: stats.birthtime.toISOString(),
 656 |                         original_path: originalPath
 657 |                       };
 658 |                       
 659 |                       if (isEmergency) {
 660 |                         results.emergency_backups.push(result);
 661 |                       } else {
 662 |                         results.main_backups.push(result);
 663 |                       }
 664 |                       
 665 |                       // Update progress periodically
 666 |                       if (results.main_backups.length % 10 === 0 || results.emergency_backups.length % 10 === 0) {
 667 |                         // Calculate progress based on number of files found
 668 |                         const totalFiles = results.main_backups.length + results.emergency_backups.length;
 669 |                         // Cap progress at 90% until we're completely done
 670 |                         const progress = Math.min(90, Math.floor(totalFiles / 10) * 5);
 671 |                         logProgress(progress);
 672 |                       }
 673 |                     } catch (err) {
 674 |                       console.error(`Error processing file ${filePath}:`, err);
 675 |                     }
 676 |                   }
 677 |                 }
 678 |               }
 679 |               
 680 |               return fileList;
 681 |             };
 682 |             
 683 |             await getAllFiles(directory);
 684 |           }
 685 |           
 686 |           // Scan main backup directory
 687 |           await scanBackupDirectory(BACKUP_DIR_NORMALIZED);
 688 |           
 689 |           // Report progress after scanning main directory
 690 |           logProgress(50);
 691 |           
 692 |           // Scan emergency backup directory if requested
 693 |           if (includeEmergency) {
 694 |             console.error('Scanning emergency backup directory:', EMERGENCY_BACKUP_DIR_NORMALIZED);
 695 |             if (!fs.existsSync(EMERGENCY_BACKUP_DIR_NORMALIZED)) {
 696 |               console.error('Emergency backup directory does not exist, creating it');
 697 |               await fsPromises.mkdir(EMERGENCY_BACKUP_DIR_NORMALIZED, { recursive: true });
 698 |             }
 699 |             await scanBackupDirectory(EMERGENCY_BACKUP_DIR_NORMALIZED, true);
 700 |           }
 701 |           
 702 |           // Report completion
 703 |           logProgress(100);
 704 |           
 705 |           return formatJsonResponse(results);
 706 |         } catch (error) {
 707 |           // Update operation status on error
 708 |           const operation = operations.get(currentOperationId);
 709 |           if (operation) {
 710 |             operation.status = 'error';
 711 |           }
 712 |           
 713 |           throw error;
 714 |         }
 715 |       }
 716 |         
 717 |       case "mcp_cancel": {
 718 |         const params = toolInput as z.infer<typeof CancelSchema>;
 719 |         console.error('Received request for mcp_cancel with params:', params);
 720 |         
 721 |         // Validate required parameters
 722 |         validateRequiredParams(params, ['operationId']);
 723 |         
 724 |         const { operationId } = params;
 725 |         const cancelled = cancelOperation(operationId);
 726 |         
 727 |         if (!cancelled) {
 728 |           return formatJsonResponse({
 729 |             success: false,
 730 |             error: `Operation ${operationId} not found or already completed`
 731 |           });
 732 |         }
 733 |         
 734 |         return formatJsonResponse({
 735 |           success: true,
 736 |           operationId,
 737 |           status: 'cancelled'
 738 |         });
 739 |       }
 740 |       
 741 |       default:
 742 |         throw new Error(`Unknown tool: ${name}`);
 743 |     }
 744 |   } catch (error) {
 745 |     console.error('Error handling request:', error);
 746 |     return formatErrorResponse(error, currentOperationId);
 747 |   }
 748 | });
 749 | 
 750 | // Utility functions
 751 | function generateOperationId(): string {
 752 |   return crypto.randomUUID();
 753 | }
 754 | 
 755 | function generateTimestamp(): string {
 756 |   const now = new Date();
 757 |   const year = now.getFullYear();
 758 |   const month = String(now.getMonth() + 1).padStart(2, '0');
 759 |   const day = String(now.getDate()).padStart(2, '0');
 760 |   const hours = String(now.getHours()).padStart(2, '0');
 761 |   const minutes = String(now.getMinutes()).padStart(2, '0');
 762 |   const seconds = String(now.getSeconds()).padStart(2, '0');
 763 |   const milliseconds = String(now.getMilliseconds()).padStart(3, '0');
 764 |   
 765 |   return `${year}${month}${day}-${hours}${minutes}${seconds}-${milliseconds}`;
 766 | }
 767 | 
 768 | function getBackupDir(filePath: string): string {
 769 |   // Create a directory structure that mirrors the original file's path
 770 |   const normalizedPath = path.normalize(filePath);
 771 |   const parsedPath = path.parse(normalizedPath);
 772 |   
 773 |   // Remove drive letter (on Windows) and create backup path
 774 |   let relativePath = parsedPath.dir.replace(/^[a-zA-Z]:/, '');
 775 |   
 776 |   // Ensure the path is safe by removing leading slashes
 777 |   relativePath = relativePath.replace(/^[/\\]+/, '');
 778 |   
 779 |   // Create the backup directory path
 780 |   return path.join(BACKUP_DIR_NORMALIZED, relativePath);
 781 | }
 782 | 
 783 | function getBackupFilename(filePath: string, timestamp: string): string {
 784 |   const parsedPath = path.parse(filePath);
 785 |   return `${parsedPath.name}${parsedPath.ext}.${timestamp}`;
 786 | }
 787 | 
 788 | function getBackupMetadataFilename(backupFilePath: string): string {
 789 |   return `${backupFilePath}.meta.json`;
 790 | }
 791 | 
 792 | function createBackupMetadata(filePath: string, timestamp: string, backupPath: string, agentContext?: string): BackupMetadata {
 793 |   return {
 794 |     original_path: filePath,
 795 |     original_filename: path.basename(filePath),
 796 |     timestamp: timestamp,
 797 |     created_at: new Date().toISOString(),
 798 |     backup_path: backupPath,
 799 |     relative_path: path.relative(process.cwd(), backupPath),
 800 |     agent_context: agentContext
 801 |   };
 802 | }
 803 | 
 804 | function saveBackupMetadata(metadataPath: string, metadata: BackupMetadata): void {
 805 |   fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
 806 | }
 807 | 
 808 | function readBackupMetadata(metadataPath: string): BackupMetadata | BackupFolderMetadata | null {
 809 |   try {
 810 |     const data = fs.readFileSync(metadataPath, 'utf8');
 811 |     return JSON.parse(data);
 812 |   } catch (err) {
 813 |     console.error(`Error reading metadata: ${err}`);
 814 |     return null;
 815 |   }
 816 | }
 817 | 
 818 | function isFolderMetadata(metadata: any): metadata is BackupFolderMetadata {
 819 |   // Check if this is a folder metadata by examining the backup_path
 820 |   // Folder backups have a directory structure, while file backups have a file
 821 |   return metadata && 
 822 |     metadata.original_path && 
 823 |     metadata.backup_path && 
 824 |     !metadata.backup_path.endsWith('.meta.json') &&
 825 |     fs.existsSync(metadata.backup_path) && 
 826 |     fs.statSync(metadata.backup_path).isDirectory();
 827 | }
 828 | 
 829 | // Helper function to check if a path is a parent of another path
 830 | function isParentPath(parentPath: string, childPath: string): boolean {
 831 |   const normalizedParent = path.normalize(parentPath).toLowerCase() + path.sep;
 832 |   const normalizedChild = path.normalize(childPath).toLowerCase() + path.sep;
 833 |   return normalizedChild.startsWith(normalizedParent);
 834 | }
 835 | 
 836 | // Helper function to recursively search for backup metadata files
 837 | function findAllBackupMetadataFiles(directory: string): string[] {
 838 |   if (!fs.existsSync(directory)) {
 839 |     return [];
 840 |   }
 841 | 
 842 |   let results: string[] = [];
 843 |   const items = fs.readdirSync(directory);
 844 | 
 845 |   for (const item of items) {
 846 |     const itemPath = path.join(directory, item);
 847 |     const stats = fs.statSync(itemPath);
 848 | 
 849 |     if (stats.isDirectory()) {
 850 |       // Recursively search subdirectories
 851 |       results = results.concat(findAllBackupMetadataFiles(itemPath));
 852 |     } else if (item.endsWith('.meta.json')) {
 853 |       // Add metadata files to results
 854 |       results.push(itemPath);
 855 |     }
 856 |   }
 857 | 
 858 |   return results;
 859 | }
 860 | 
 861 | function findBackupsByFilePath(filePath: string): BackupMetadata[] {
 862 |   const backupDir = getBackupDir(filePath);
 863 |   const backups: BackupMetadata[] = [];
 864 |   
 865 |   // Start at the root of the backup directory to find all possible backups
 866 |   const rootBackupDir = BACKUP_DIR_NORMALIZED;
 867 |   
 868 |   // Find all metadata files recursively
 869 |   const metadataFiles = findAllBackupMetadataFiles(rootBackupDir);
 870 |   
 871 |   // Process each metadata file
 872 |   for (const metadataPath of metadataFiles) {
 873 |     const metadata = readBackupMetadata(metadataPath);
 874 |     
 875 |     // Check if this backup is for the requested file (exact match)
 876 |     if (metadata && metadata.original_path === filePath && !isFolderMetadata(metadata)) {
 877 |       backups.push(metadata);
 878 |     }
 879 |   }
 880 |   
 881 |   // Sort backups by timestamp (newest first)
 882 |   backups.sort((a, b) => {
 883 |     return new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime();
 884 |   });
 885 |   
 886 |   return backups;
 887 | }
 888 | 
 889 | function findBackupsByFolderPath(folderPath: string): BackupFolderMetadata[] {
 890 |   const backups: BackupFolderMetadata[] = [];
 891 |   
 892 |   // Start at the root of the backup directory to find all possible backups
 893 |   const rootBackupDir = BACKUP_DIR_NORMALIZED;
 894 |   
 895 |   // Find all metadata files recursively
 896 |   const metadataFiles = findAllBackupMetadataFiles(rootBackupDir);
 897 |   
 898 |   // Process each metadata file
 899 |   for (const metadataPath of metadataFiles) {
 900 |     try {
 901 |       const metadata = readBackupMetadata(metadataPath);
 902 |       
 903 |       // Check if this backup is for the requested folder (exact match) or any subfolder
 904 |       if (metadata && isFolderMetadata(metadata)) {
 905 |         // Include if it's an exact match or if the original path is a parent of the requested path
 906 |         // or if the requested path is a parent of the original path
 907 |         if (metadata.original_path === folderPath || 
 908 |             isParentPath(metadata.original_path, folderPath) || 
 909 |             isParentPath(folderPath, metadata.original_path)) {
 910 |           backups.push(metadata);
 911 |         }
 912 |       }
 913 |     } catch (error) {
 914 |       console.error(`Error processing metadata file ${metadataPath}:`, error);
 915 |       // Continue processing other metadata files
 916 |     }
 917 |   }
 918 |   
 919 |   // Sort backups by timestamp (newest first)
 920 |   backups.sort((a, b) => {
 921 |     return b.timestamp.localeCompare(a.timestamp);
 922 |   });
 923 |   
 924 |   return backups;
 925 | }
 926 | 
 927 | async function findBackupByTimestamp(filePath: string, timestamp: string): Promise<BackupMetadata | null> {
 928 |   const backupDir = getBackupDir(filePath);
 929 |   const backupFilename = getBackupFilename(filePath, timestamp);
 930 |   const backupPath = path.join(backupDir, backupFilename);
 931 |   const metadataPath = `${backupPath}.meta.json`;
 932 |   
 933 |   if (fs.existsSync(metadataPath)) {
 934 |     const metadata = readBackupMetadata(metadataPath);
 935 |     if (metadata && !isFolderMetadata(metadata)) {
 936 |       return metadata;
 937 |     }
 938 |   }
 939 |   
 940 |   return null;
 941 | }
 942 | 
 943 | async function findFolderBackupByTimestamp(folderPath: string, timestamp: string): Promise<BackupFolderMetadata | null> {
 944 |   const backupDir = getBackupDir(folderPath);
 945 |   const backupFolderName = getBackupFolderName(folderPath, timestamp);
 946 |   const backupPath = path.join(backupDir, backupFolderName);
 947 |   const metadataPath = `${backupPath}.meta.json`;
 948 |   
 949 |   if (fs.existsSync(metadataPath)) {
 950 |     const metadata = readBackupMetadata(metadataPath);
 951 |     if (metadata && isFolderMetadata(metadata)) {
 952 |       return metadata;
 953 |     }
 954 |   }
 955 |   
 956 |   return null;
 957 | }
 958 | 
 959 | async function listFolderBackups(folderPath: string): Promise<BackupFolderMetadata[]> {
 960 |   return findBackupsByFolderPath(folderPath);
 961 | }
 962 | 
 963 | function cleanupOldBackups(filePath: string): number {
 964 |   // Get all backups for this file
 965 |   const backups = findBackupsByFilePath(filePath);
 966 |   
 967 |   // If we have more than MAX_VERSIONS, remove the oldest ones
 968 |   if (backups.length > MAX_VERSIONS) {
 969 |     // Sort backups by timestamp (oldest first)
 970 |     backups.sort((a, b) => {
 971 |       return new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime();
 972 |     });
 973 |     
 974 |     // Remove oldest backups
 975 |     const backupsToRemove = backups.slice(0, backups.length - MAX_VERSIONS);
 976 |     for (const backup of backupsToRemove) {
 977 |       try {
 978 |         fs.unlinkSync(backup.backup_path);
 979 |         console.log(`Removed old backup: ${backup.backup_path}`);
 980 |       } catch (error) {
 981 |         console.error(`Error removing old backup: ${backup.backup_path}`, error);
 982 |       }
 983 |     }
 984 |     
 985 |     return MAX_VERSIONS;
 986 |   }
 987 |   
 988 |   return backups.length;
 989 | }
 990 | 
 991 | // Copy folder recursively
 992 | async function copyFolderRecursive(sourcePath: string, targetPath: string, includePattern?: string, excludePattern?: string): Promise<void> {
 993 |   // Create target folder if it doesn't exist
 994 |   if (!fs.existsSync(targetPath)) {
 995 |     await fsPromises.mkdir(targetPath, { recursive: true });
 996 |   }
 997 |   
 998 |   // Read source directory
 999 |   const entries = fs.readdirSync(sourcePath, { withFileTypes: true });
1000 |   
1001 |   // Process each entry
1002 |   for (const entry of entries) {
1003 |     const srcPath = path.join(sourcePath, entry.name);
1004 |     const destPath = path.join(targetPath, entry.name);
1005 |     
1006 |     // Skip excluded files/folders
1007 |     if (excludePattern && minimatch(entry.name, excludePattern)) {
1008 |       continue;
1009 |     }
1010 |     
1011 |     // Only include files/folders matching the include pattern if specified
1012 |     if (includePattern && !minimatch(entry.name, includePattern)) {
1013 |       continue;
1014 |     }
1015 |     
1016 |     if (entry.isDirectory()) {
1017 |       // Recursively copy subdirectories
1018 |       await copyFolderRecursive(srcPath, destPath, includePattern || undefined, excludePattern || undefined);
1019 |     } else {
1020 |       // Copy files
1021 |       await fsPromises.copyFile(srcPath, destPath);
1022 |     }
1023 |   }
1024 | }
1025 | 
1026 | // Copy folder contents helper function
1027 | async function copyFolderContents(sourcePath: string, targetPath: string, includePattern?: string, excludePattern?: string): Promise<void> {
1028 |   if (!sourcePath || !targetPath) {
1029 |     throw new Error('Source and target paths are required');
1030 |   }
1031 |   
1032 |   // Ensure target directory exists
1033 |   await fsPromises.mkdir(targetPath, { recursive: true });
1034 |   
1035 |   // Copy folder contents
1036 |   await copyFolderRecursive(sourcePath, targetPath, includePattern, excludePattern);
1037 | }
1038 | 
1039 | // Ensure emergency backup directory exists
1040 | async function ensureEmergencyBackupDir(): Promise<void> {
1041 |   if (!fs.existsSync(EMERGENCY_BACKUP_DIR_NORMALIZED)) {
1042 |     await fsPromises.mkdir(EMERGENCY_BACKUP_DIR_NORMALIZED, { recursive: true });
1043 |   }
1044 | }
1045 | 
1046 | // Create emergency backup of a file before restoration
1047 | async function createEmergencyBackup(filePath: string): Promise<string | null> {
1048 |   try {
1049 |     if (!fs.existsSync(filePath)) {
1050 |       console.error(`File not found for emergency backup: ${filePath}`);
1051 |       return null;
1052 |     }
1053 |     
1054 |     await ensureEmergencyBackupDir();
1055 |     const timestamp = generateTimestamp();
1056 |     const fileName = path.basename(filePath);
1057 |     
1058 |     // Create a directory structure that mirrors the original file's path
1059 |     const normalizedPath = path.normalize(filePath);
1060 |     const parsedPath = path.parse(normalizedPath);
1061 |     
1062 |     // Remove drive letter (on Windows) and create backup path
1063 |     let relativePath = parsedPath.dir.replace(/^[a-zA-Z]:/, '');
1064 |     
1065 |     // Ensure the path is safe by removing leading slashes
1066 |     relativePath = relativePath.replace(/^[/\\]+/, '');
1067 |     
1068 |     // Create the emergency backup directory path
1069 |     const emergencyBackupDir = path.join(EMERGENCY_BACKUP_DIR_NORMALIZED, relativePath);
1070 |     
1071 |     // Ensure the directory structure exists
1072 |     await fsPromises.mkdir(emergencyBackupDir, { recursive: true });
1073 |     
1074 |     // Create the emergency backup file path
1075 |     const backupPath = path.join(emergencyBackupDir, `${parsedPath.name}${parsedPath.ext}.emergency.${timestamp}`);
1076 |     
1077 |     // Copy file to emergency backup location
1078 |     await fsPromises.copyFile(filePath, backupPath);
1079 |     
1080 |     // Create metadata file for the emergency backup
1081 |     const metadata = createBackupMetadata(filePath, timestamp, backupPath, "Emergency backup created before restoration");
1082 |     const metadataPath = path.join(EMERGENCY_BACKUP_DIR_NORMALIZED, `${parsedPath.name}.emergency.${timestamp}.meta.json`);
1083 |     await fsPromises.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
1084 |     
1085 |     return backupPath;
1086 |   } catch (error) {
1087 |     console.error('Error creating emergency backup:', error);
1088 |     return null;
1089 |   }
1090 | }
1091 | 
1092 | // Create emergency backup of a folder before restoration
1093 | async function createEmergencyFolderBackup(folderPath: string): Promise<string | null> {
1094 |   try {
1095 |     if (!fs.existsSync(folderPath)) {
1096 |       console.error(`Folder not found for emergency backup: ${folderPath}`);
1097 |       return null;
1098 |     }
1099 |     
1100 |     await ensureEmergencyBackupDir();
1101 |     const timestamp = generateTimestamp();
1102 |     
1103 |     // Create a directory structure that mirrors the original folder's path
1104 |     const normalizedPath = path.normalize(folderPath);
1105 |     const parsedPath = path.parse(normalizedPath);
1106 |     
1107 |     // Remove drive letter (on Windows) and create backup path
1108 |     let relativePath = parsedPath.dir.replace(/^[a-zA-Z]:/, '');
1109 |     
1110 |     // Ensure the path is safe by removing leading slashes
1111 |     relativePath = relativePath.replace(/^[/\\]+/, '');
1112 |     
1113 |     // Create the emergency backup directory path
1114 |     const emergencyBackupDir = path.join(EMERGENCY_BACKUP_DIR_NORMALIZED, relativePath);
1115 |     
1116 |     // Ensure the directory structure exists
1117 |     await fsPromises.mkdir(emergencyBackupDir, { recursive: true });
1118 |     
1119 |     // Create the emergency backup folder path
1120 |     const backupPath = path.join(emergencyBackupDir, `${parsedPath.name}.emergency.${timestamp}`);
1121 |     
1122 |     // Copy folder to emergency backup location
1123 |     await copyFolderContents(folderPath, backupPath);
1124 |     
1125 |     // Create metadata file for the emergency backup
1126 |     const metadata = {
1127 |       original_path: folderPath,
1128 |       original_filename: path.basename(folderPath),
1129 |       timestamp: timestamp,
1130 |       created_at: new Date().toISOString(),
1131 |       backup_path: backupPath,
1132 |       relative_path: path.relative(process.cwd(), backupPath),
1133 |       agent_context: "Emergency backup created before restoration"
1134 |     };
1135 |     const metadataPath = path.join(EMERGENCY_BACKUP_DIR_NORMALIZED, `${parsedPath.name}.emergency.${timestamp}.meta.json`);
1136 |     await fsPromises.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
1137 |     
1138 |     return backupPath;
1139 |   } catch (error) {
1140 |     console.error('Error creating emergency folder backup:', error);
1141 |     return null;
1142 |   }
1143 | }
1144 | 
1145 | // Fix string | null assignment errors
1146 | async function mcp_backup_status(params: { operationId: string }): Promise<{ progress: number, status: string }> {
1147 |   const { operationId } = params;
1148 |   
1149 |   if (!operationId) {
1150 |     return { progress: 0, status: 'error' };
1151 |   }
1152 |   
1153 |   // Check if operation exists
1154 |   if (operations.has(operationId)) {
1155 |     const operation = operations.get(operationId);
1156 |     if (operation) {
1157 |       return {
1158 |         progress: operation.progress,
1159 |         status: operation.cancelled ? 'cancelled' : operation.progress >= 100 ? 'completed' : 'in_progress'
1160 |       };
1161 |     }
1162 |   }
1163 |   
1164 |   return { progress: 0, status: 'not_found' };
1165 | }
1166 | 
1167 | // Restore backup function
1168 | async function restoreBackup(filePath: string, timestamp: string, createEmergencyBackupFlag: boolean = false): Promise<void> {
1169 |   // Find the backup
1170 |   const backups = findBackupsByFilePath(filePath);
1171 |   const backup = backups.find(b => b.timestamp === timestamp);
1172 |   
1173 |   if (!backup) {
1174 |     throw new Error(`Backup with timestamp ${timestamp} not found for ${filePath}`);
1175 |   }
1176 |   
1177 |   // Create emergency backup if requested
1178 |   if (createEmergencyBackupFlag) {
1179 |     const emergencyBackupPath = await createEmergencyBackup(filePath);
1180 |     console.log(`Created emergency backup at: ${emergencyBackupPath}`);
1181 |   }
1182 |   
1183 |   // Get backup path
1184 |   const backupPath = backup.backup_path;
1185 |   
1186 |   // Check if backup exists
1187 |   if (!backupPath || !fs.existsSync(backupPath)) {
1188 |     throw new Error(`Backup file not found: ${backupPath}`);
1189 |   }
1190 |   
1191 |   // Check if original file exists
1192 |   if (!fs.existsSync(filePath)) {
1193 |     throw new Error(`Original file not found: ${filePath}`);
1194 |   }
1195 |   
1196 |   // Restore backup by copying it to original location
1197 |   await fsPromises.copyFile(backupPath, filePath);
1198 | }
1199 | 
1200 | // Start the server with stdio transport
1201 | const transport = new StdioServerTransport();
1202 | server.connect(transport).catch((error: Error) => {
1203 |   console.error("Fatal error running server:", error);
1204 |   process.exit(1);
1205 | });
1206 | 
```