#
tokens: 5098/50000 8/8 files
lines: off (toggle) GitHub
raw markdown copy
# Directory Structure

```
├── .gitignore
├── LICENSE
├── package-lock.json
├── package.json
├── README.md
├── src
│   ├── index.ts
│   ├── tools
│   │   ├── definitions.ts
│   │   └── handlers.ts
│   └── utils
│       ├── ffmpeg.ts
│       └── file.ts
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
node_modules/
build/
*.log
.env*
*.mp3
*.jpg
*.png
*.mp4
*.avi

```

--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------

```json
{
  "compilerOptions": {
    "target": "ES2022",
    "module": "Node16",
    "moduleResolution": "Node16",
    "outDir": "./build",
    "rootDir": "./src",
    "strict": true,
    "esModuleInterop": true,
    "skipLibCheck": true,
    "forceConsistentCasingInFileNames": true
  },
  "include": ["src/**/*"],
  "exclude": ["node_modules"]
}

```

--------------------------------------------------------------------------------
/src/utils/file.ts:
--------------------------------------------------------------------------------

```typescript
import { existsSync } from "fs";
import { dirname } from "path";
import { mkdir } from "fs/promises";

/**
 * Helper function to ensure a directory exists
 */
export async function ensureDirectoryExists(filePath: string): Promise<void> {
  const dir = dirname(filePath);
  try {
    await mkdir(dir, { recursive: true });
  } catch (error) {
    // Directory already exists or cannot be created
    if ((error as any).code !== 'EEXIST') {
      throw error;
    }
  }
}

/**
 * Helper function to validate file path
 */
export function validatePath(path: string, isInput: boolean = false): string {
  if (!path) {
    throw new Error("File path is required");
  }
  
  if (isInput && !existsSync(path)) {
    throw new Error(`Input file does not exist: ${path}`);
  }
  
  return path;
}

```

--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------

```json
{
  "name": "@sworddut/mcp-ffmpeg-helper",
  "version": "0.1.0",
  "description": "A Model Context Protocol (MCP) helper for FFmpeg video processing operations",
  "type": "module",
  "bin": {
    "mcp-ffmpeg-helper": "./build/index.js"
  },
  "files": [
    "build"
  ],
  "scripts": {
    "build": "tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"",
    "prepare": "npm run build",
    "watch": "tsc --watch",
    "inspector": "npx @modelcontextprotocol/inspector build/index.js"
  },
  "dependencies": {
    "@modelcontextprotocol/sdk": "0.6.0",
    "ffmpeg": "^0.0.4"
  },
  "devDependencies": {
    "@types/node": "^20.11.24",
    "typescript": "^5.3.3"
  },
  "keywords": [
    "ffmpeg",
    "video",
    "mcp",
    "model-context-protocol",
    "video-processing",
    "watermark",
    "trim",
    "convert"
  ],
  "author": "Your Name",
  "license": "MIT",
  "repository": {
    "type": "git",
    "url": "https://github.com/yourusername/mcp-ffmpeg-helper.git"
  },
  "bugs": {
    "url": "https://github.com/yourusername/mcp-ffmpeg-helper/issues"
  },
  "homepage": "https://github.com/yourusername/mcp-ffmpeg-helper#readme",
  "engines": {
    "node": ">=14.0.0"
  }
}

```

--------------------------------------------------------------------------------
/src/utils/ffmpeg.ts:
--------------------------------------------------------------------------------

```typescript
import { exec } from "child_process";
import { promisify } from "util";
import { validatePath } from "./file.js";

const execPromise = promisify(exec);

/**
 * Helper function to run FFmpeg commands with better error handling
 */
export async function runFFmpegCommand(command: string): Promise<string> {
  try {
    console.log(`Running FFmpeg command: ffmpeg ${command}`);
    const { stdout, stderr } = await execPromise(`ffmpeg ${command}`);
    return stdout || stderr;
  } catch (error: any) {
    console.error("FFmpeg error:", error.message);
    if (error.stderr) {
      return error.stderr;
    }
    throw new Error(`FFmpeg error: ${error.message}`);
  }
}

/**
 * Helper function to get information about a video file
 */
export async function getVideoInfo(filePath: string): Promise<string> {
  try {
    validatePath(filePath, true);
    console.log(`Getting video info for: ${filePath}`);
    const { stdout, stderr } = await execPromise(`ffprobe -v error -show_format -show_streams -print_format json "${filePath}"`);
    return stdout || stderr;
  } catch (error: any) {
    console.error("FFprobe error:", error.message);
    if (error.stderr) {
      return error.stderr;
    }
    throw new Error(`FFprobe error: ${error.message}`);
  }
}

```

--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------

```typescript
#!/usr/bin/env node

/**
 * FFmpeg Helper MCP Server
 * A simple MCP server that provides FFmpeg functionality through tools.
 * It allows video operations like:
 * - Getting video information
 * - Converting video formats
 * - Extracting audio from video
 * - Creating video from image sequences
 * - Trimming videos
 * - Adding watermarks
 * - Applying filters
 */

import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
  CallToolRequestSchema,
  ListToolsRequestSchema,
} from "@modelcontextprotocol/sdk/types.js";

// Import our modularized code
import { toolDefinitions } from "./tools/definitions.js";
import { handleToolCall } from "./tools/handlers.js";

/**
 * Create an MCP server with capabilities for tools to interact with FFmpeg
 */
const server = new Server(
  {
    name: "mcp-ffmpeg-helper",
    version: "0.2.0",
  },
  {
    capabilities: {
      tools: {},
    },
  }
);

/**
 * Handler that lists available tools.
 * Exposes FFmpeg-related tools for video operations.
 */
server.setRequestHandler(ListToolsRequestSchema, async () => {
  return {
    tools: toolDefinitions
  };
});

/**
 * Handler for FFmpeg tools.
 * Implements various video operations using FFmpeg.
 */
server.setRequestHandler(CallToolRequestSchema, async (request) => {
  try {
    return await handleToolCall(request.params.name, request.params.arguments);
  } catch (error: any) {
    console.error("Tool execution error:", error.message);
    return {
      content: [{
        type: "text",
        text: `Error: ${error.message}`
      }]
    };
  }
});

/**
 * Start the server using stdio transport.
 * This allows the server to communicate via standard input/output streams.
 */
async function main() {
  console.log("Starting MCP FFmpeg Helper server...");
  const transport = new StdioServerTransport();
  await server.connect(transport);
  console.log("MCP FFmpeg Helper server connected and ready");
}

main().catch((error) => {
  console.error("Server error:", error);
  process.exit(1);
});

```

--------------------------------------------------------------------------------
/src/tools/definitions.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Tool definitions for FFmpeg operations
 * Defines the available tools and their input schemas
 */
export const toolDefinitions = [
  {
    name: "get_video_info",
    description: "Get detailed information about a video file",
    inputSchema: {
      type: "object",
      properties: {
        filePath: {
          type: "string",
          description: "Path to the video file"
        }
      },
      required: ["filePath"]
    }
  },
  {
    name: "convert_video",
    description: "Convert a video file to a different format",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input video file"
        },
        outputPath: {
          type: "string",
          description: "Path for the output video file"
        },
        options: {
          type: "string",
          description: "Additional FFmpeg options (optional)"
        }
      },
      required: ["inputPath", "outputPath"]
    }
  },
  {
    name: "extract_audio",
    description: "Extract audio from a video file",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input video file"
        },
        outputPath: {
          type: "string",
          description: "Path for the output audio file"
        },
        format: {
          type: "string",
          description: "Audio format (mp3, aac, etc.)"
        }
      },
      required: ["inputPath", "outputPath", "format"]
    }
  },
  {
    name: "create_video_from_images",
    description: "Create a video from a sequence of images",
    inputSchema: {
      type: "object",
      properties: {
        inputPattern: {
          type: "string",
          description: "Pattern for input images (e.g., 'img%03d.jpg' or 'folder/*.png')"
        },
        outputPath: {
          type: "string",
          description: "Path for the output video file"
        },
        framerate: {
          type: "number",
          description: "Frames per second (default: 25)"
        },
        codec: {
          type: "string",
          description: "Video codec to use (default: libx264)"
        },
        pixelFormat: {
          type: "string",
          description: "Pixel format (default: yuv420p)"
        },
        extraOptions: {
          type: "string",
          description: "Additional FFmpeg options"
        }
      },
      required: ["inputPattern", "outputPath"]
    }
  },
  {
    name: "trim_video",
    description: "Trim a video to a specific duration",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input video file"
        },
        outputPath: {
          type: "string",
          description: "Path for the output video file"
        },
        startTime: {
          type: "string",
          description: "Start time (format: HH:MM:SS.mmm or seconds)"
        },
        duration: {
          type: "string",
          description: "Duration (format: HH:MM:SS.mmm or seconds)"
        },
        endTime: {
          type: "string",
          description: "End time (format: HH:MM:SS.mmm or seconds)"
        }
      },
      required: ["inputPath", "outputPath"]
    }
  },
  {
    name: "add_watermark",
    description: "Add a watermark to a video",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input video file"
        },
        watermarkPath: {
          type: "string",
          description: "Path to the watermark image"
        },
        outputPath: {
          type: "string",
          description: "Path for the output video file"
        },
        position: {
          type: "string",
          description: "Position of watermark (topleft, topright, bottomleft, bottomright, center)"
        },
        opacity: {
          type: "number",
          description: "Opacity of watermark (0.0-1.0)"
        }
      },
      required: ["inputPath", "watermarkPath", "outputPath"]
    }
  },
  {
    name: "trim_audio",
    description: "Trim an audio file to a specific duration",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input audio file"
        },
        outputPath: {
          type: "string",
          description: "Path for the output audio file"
        },
        startTime: {
          type: "string",
          description: "Start time (format: HH:MM:SS.mmm or seconds)"
        },
        duration: {
          type: "string",
          description: "Duration (format: HH:MM:SS.mmm or seconds)"
        },
        endTime: {
          type: "string",
          description: "End time (format: HH:MM:SS.mmm or seconds)"
        },
        format: {
          type: "string",
          description: "Audio format for output (mp3, aac, etc.)"
        }
      },
      required: ["inputPath", "outputPath"]
    }
  },
  {
    name: "extract_frames",
    description: "Extract frames from a video as sequential image files",
    inputSchema: {
      type: "object",
      properties: {
        inputPath: {
          type: "string",
          description: "Path to the input video file"
        },
        outputDir: {
          type: "string",
          description: "Directory to save the extracted frames (default: 'output')"
        },
        frameRate: {
          type: "string",
          description: "Frame extraction rate (e.g., '1' for every frame, '0.5' for every 2nd frame, '1/30' for 1 frame per 30 seconds)"
        },
        format: {
          type: "string",
          description: "Output image format (jpg, png, etc., default: jpg)"
        },
        quality: {
          type: "number",
          description: "Image quality for jpg format (1-100, default: 95)"
        },
        startTime: {
          type: "string",
          description: "Start time to begin extraction (format: HH:MM:SS.mmm or seconds)"
        },
        duration: {
          type: "string",
          description: "Duration to extract frames (format: HH:MM:SS.mmm or seconds)"
        }
      },
      required: ["inputPath"]
    }
  }
];

```

--------------------------------------------------------------------------------
/src/tools/handlers.ts:
--------------------------------------------------------------------------------

```typescript
import { validatePath } from "../utils/file.js";
import { getVideoInfo, runFFmpegCommand } from "../utils/ffmpeg.js";
import { ensureDirectoryExists } from "../utils/file.js";
import { join } from "path";

/**
 * Handles all FFmpeg tool requests
 */
export async function handleToolCall(toolName: string, args: any) {
  switch (toolName) {
    case "get_video_info": {
      const filePath = validatePath(String(args?.filePath), true);
      const info = await getVideoInfo(filePath);
      return {
        content: [{
          type: "text",
          text: info
        }]
      };
    }

    case "convert_video": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const outputPath = validatePath(String(args?.outputPath));
      const options = String(args?.options || "");
      
      await ensureDirectoryExists(outputPath);
      const command = `-i "${inputPath}" ${options} "${outputPath}" -y`;
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Video conversion completed: ${inputPath} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "extract_audio": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const outputPath = validatePath(String(args?.outputPath));
      const format = String(args?.format || "mp3");
      
      await ensureDirectoryExists(outputPath);
      const command = `-i "${inputPath}" -vn -acodec ${format} "${outputPath}" -y`;
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Audio extraction completed: ${inputPath} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "create_video_from_images": {
      const inputPattern = String(args?.inputPattern);
      const outputPath = validatePath(String(args?.outputPath));
      const framerate = Number(args?.framerate || 25);
      const codec = String(args?.codec || "libx264");
      const pixelFormat = String(args?.pixelFormat || "yuv420p");
      const extraOptions = String(args?.extraOptions || "");
      
      if (!inputPattern) {
        throw new Error("Input pattern is required");
      }
      
      await ensureDirectoryExists(outputPath);
      const command = `-framerate ${framerate} -i "${inputPattern}" -c:v ${codec} -pix_fmt ${pixelFormat} ${extraOptions} "${outputPath}" -y`;
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Video creation completed: ${inputPattern} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "trim_video": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const outputPath = validatePath(String(args?.outputPath));
      const startTime = String(args?.startTime || "0");
      const duration = String(args?.duration || "");
      const endTime = String(args?.endTime || "");
      
      await ensureDirectoryExists(outputPath);
      
      let command = `-i "${inputPath}" -ss ${startTime}`;
      if (duration) {
        command += ` -t ${duration}`;
      } else if (endTime) {
        command += ` -to ${endTime}`;
      }
      command += ` -c copy "${outputPath}" -y`;
      
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Video trimming completed: ${inputPath} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "add_watermark": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const watermarkPath = validatePath(String(args?.watermarkPath), true);
      const outputPath = validatePath(String(args?.outputPath));
      const position = String(args?.position || "bottomright");
      const opacity = Number(args?.opacity || 0.5);
      
      await ensureDirectoryExists(outputPath);
      
      // Determine overlay position
      let overlayPosition = "";
      switch (position.toLowerCase()) {
        case "topleft":
          overlayPosition = "10:10";
          break;
        case "topright":
          overlayPosition = "W-w-10:10";
          break;
        case "bottomleft":
          overlayPosition = "10:H-h-10";
          break;
        case "center":
          overlayPosition = "(W-w)/2:(H-h)/2";
          break;
        case "bottomright":
        default:
          overlayPosition = "W-w-10:H-h-10";
          break;
      }
      
      // Improved command with better handling of watermark opacity and format
      const command = `-i "${inputPath}" -i "${watermarkPath}" -filter_complex "[1:v]format=rgba,colorchannelmixer=aa=${opacity}[watermark];[0:v][watermark]overlay=${overlayPosition}:format=auto,format=yuv420p" -codec:a copy "${outputPath}" -y`;
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Watermark added: ${inputPath} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "trim_audio": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const outputPath = validatePath(String(args?.outputPath));
      const startTime = String(args?.startTime || "0");
      const duration = String(args?.duration || "");
      const endTime = String(args?.endTime || "");
      const format = String(args?.format || "");
      
      await ensureDirectoryExists(outputPath);
      
      // Build the FFmpeg command
      let command = `-i "${inputPath}" -ss ${startTime}`;
      
      // Add duration or end time if provided
      if (duration) {
        command += ` -t ${duration}`;
      } else if (endTime) {
        command += ` -to ${endTime}`;
      }
      
      // Add format if specified, otherwise use copy codec
      if (format) {
        command += ` -acodec ${format}`;
      } else {
        command += ` -acodec copy`;
      }
      
      command += ` "${outputPath}" -y`;
      
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Audio trimming completed: ${inputPath} → ${outputPath}\n\n${result}`
        }]
      };
    }

    case "extract_frames": {
      const inputPath = validatePath(String(args?.inputPath), true);
      const outputDir = String(args?.outputDir || "output");
      const frameRate = String(args?.frameRate || "1");
      const format = String(args?.format || "jpg");
      const quality = Number(args?.quality || 95);
      const startTime = args?.startTime ? String(args?.startTime) : "";
      const duration = args?.duration ? String(args?.duration) : "";
      
      // Create output directory if it doesn't exist
      await ensureDirectoryExists(join(outputDir, "dummy.txt"));
      
      // Build the FFmpeg command
      let command = `-i "${inputPath}"`;
      
      // Add start time if provided
      if (startTime) {
        command += ` -ss ${startTime}`;
      }
      
      // Add duration if provided
      if (duration) {
        command += ` -t ${duration}`;
      }
      
      // Set frame extraction rate
      command += ` -vf "fps=${frameRate}"`;
      
      // Set quality based on format
      if (format.toLowerCase() === "jpg" || format.toLowerCase() === "jpeg") {
        // For JPEG, use a better quality setting (lower values = higher quality in FFmpeg's scale)
        // Convert 1-100 scale to FFmpeg's 1-31 scale (inverted, where 1 is best quality)
        const ffmpegQuality = Math.max(1, Math.min(31, Math.round(31 - ((quality / 100) * 30))));
        command += ` -q:v ${ffmpegQuality}`;
      } else if (format.toLowerCase() === "png") {
        // For PNG, use compression level (0-9, where 0 is no compression)
        const compressionLevel = Math.min(9, Math.max(0, Math.round(9 - ((quality / 100) * 9))));
        command += ` -compression_level ${compressionLevel}`;
      }
      
      // Set output pattern with 5-digit numbering
      const outputPattern = join(outputDir, `%05d.${format}`);
      command += ` "${outputPattern}" -y`;
      
      const result = await runFFmpegCommand(command);
      
      return {
        content: [{
          type: "text",
          text: `Frames extracted from video: ${inputPath} → ${outputDir}/*.${format}\n\n${result}`
        }]
      };
    }

    default:
      throw new Error(`Unknown tool: ${toolName}`);
  }
}

```