# Directory Structure
```
├── .gitignore
├── Dockerfile
├── pyproject.toml
├── README.md
├── smithery.yaml
└── src
└── mcp_server_vegalite
├── __init__.py
└── server.py
```
# Files
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
logs/*
.env
.venv
src/mcp_server_vegalite/__pycache__
uv.lock
.DS_Store
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
[](https://mseep.ai/app/isaacwasserman-mcp-vegalite-server)
# Data Visualization MCP Server
[](https://smithery.ai/server/mcp-server-vegalite)
## Overview
A Model Context Protocol (MCP) server implementation that provides the LLM an interface for visualizing data using Vega-Lite syntax.
## Components
### Tools
The server offers two core tools:
- `save_data`
- Save a table of data agregations to the server for later visualization
- Input:
- `name` (string): Name of the data table to be saved
- `data` (array): Array of objects representing the data table
- Returns: success message
- `visualize_data`
- Visualize a table of data using Vega-Lite syntax
- Input:
- `data_name` (string): Name of the data table to be visualized
- `vegalite_specification` (string): JSON string representing the Vega-Lite specification
- Returns: If the `--output_type` is set to `text`, returns a success message with an additional `artifact` key containing the complete Vega-Lite specification with data. If the `--output_type` is set to `png`, returns a base64 encoded PNG image of the visualization using the MPC `ImageContent` container.
## Usage with Claude Desktop
```python
# Add the server to your claude_desktop_config.json
{
"mcpServers": {
"datavis": {
"command": "uv",
"args": [
"--directory",
"/absolute/path/to/mcp-datavis-server",
"run",
"mcp_server_datavis",
"--output_type",
"png" # or "text"
]
}
}
}
```
```
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
```toml
[project]
name = "mcp-server-vegalite"
version = "0.0.1"
description = "A simple Data Visualization MCP server using Vega-Lite"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"mcp>=1.0.0",
"vl-convert-python"
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.uv]
dev-dependencies = ["pyright>=1.1.389"]
[project.scripts]
mcp_server_vegalite = "mcp_server_vegalite:main"
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
startCommand:
type: stdio
configSchema:
# JSON Schema defining the configuration options for the MCP.
type: object
required:
- outputType
properties:
outputType:
type: string
description: "The format of the visualization output: 'text' or 'png'"
commandFunction:
# A function that produces the CLI command to start the MCP on stdio.
|-
(config) => ({ command: 'uv', args: ['run', 'mcp_server_vegalite', '--output_type', config.outputType] })
```
--------------------------------------------------------------------------------
/src/mcp_server_vegalite/__init__.py:
--------------------------------------------------------------------------------
```python
from . import server
import asyncio
import argparse
def main():
"""Main entry point for the package."""
parser = argparse.ArgumentParser(description="Data Visualization MCP Server")
# parser.add_argument(
# "--language", default="vegalite", choices=["vegalite"], help="The visualization language/grammar/framework to use"
# )
parser.add_argument("--output-type", default="png", choices=["text", "png"], help="Format of the output")
args = parser.parse_args()
asyncio.run(server.main(output_type=args.output_type))
# Optionally expose other important items at package level
__all__ = ["main", "server"]
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
# Use a Python image with uv pre-installed
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
# Install the project into /app
WORKDIR /app
# Enable bytecode compilation
ENV UV_COMPILE_BYTECODE=1
# Copy from the cache instead of linking since it's a mounted volume
ENV UV_LINK_MODE=copy
# Install the project's dependencies using the lockfile and settings
RUN --mount=type=cache,target=/root/.cache/uv --mount=type=bind,source=pyproject.toml,target=pyproject.toml uv sync --frozen --no-install-project --no-dev --no-editable
# Then, add the rest of the project source code and install it
# Installing separately from its dependencies allows optimal layer caching
ADD . /app
RUN --mount=type=cache,target=/root/.cache/uv uv sync --frozen --no-dev --no-editable
FROM python:3.12-slim-bookworm
WORKDIR /app
COPY --from=uv /root/.local /root/.local
COPY --from=uv --chown=app:app /app/.venv /app/.venv
# Place executables in the environment at the front of the path
ENV PATH="/app/.venv/bin:$PATH"
# when running the container, add --output_type and a bind mount to the host's db file
ENTRYPOINT ["uv", "run", "mcp_server_vegalite"]
```
--------------------------------------------------------------------------------
/src/mcp_server_vegalite/server.py:
--------------------------------------------------------------------------------
```python
import logging
from mcp.server.models import InitializationOptions
import mcp.types as types
from mcp.server import NotificationOptions, Server
import mcp.server.stdio
from pydantic import AnyUrl
from typing import Any
import vl_convert as vlc
import base64
logging.basicConfig(
level=logging.INFO, # Set the log level
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
handlers=[
logging.FileHandler("logs/mcp_vegalite_server.log"), # Log file path
logging.StreamHandler(), # Optional: still output to the console
],
)
logger = logging.getLogger("mcp_vegalite_server")
logger.info("Starting MCP Vega-Lite Server")
saved_data = {
"sample_data": [
{"name": "Alice", "age": 25, "city": "New York"},
{"name": "Bob", "age": 30, "city": "San Francisco"},
{"name": "Charlie", "age": 35, "city": "Los Angeles"},
]
}
SAVE_DATA_TOOL_DESCRIPTION = """
A tool which allows you to save data to a named table for later use in visualizations.
When to use this tool:
- Use this tool when you have data that you want to visualize later.
How to use this tool:
- Provide the name of the table to save the data to (for later reference) and the data itself.
""".strip()
VISUALIZE_DATA_TOOL_DESCRIPTION = """
A tool which allows you to produce a data visualization using the Vega-Lite grammar.
When to use this tool:
- At times, it will be advantageous to provide the user with a visual representation of some data, rather than just a textual representation.
- This tool is particularly useful when the data is complex or has many dimensions, making it difficult to understand in a tabular format. It is not useful for singular data points.
How to use this tool:
- Prior to visualization, data must be saved to a named table using the save_data tool.
- After saving the data, use this tool to visualize the data by providing the name of the table with the saved data and a Vega-Lite specification.
""".strip()
async def main(output_type: str):
logger.info("Starting Vega-Lite MCP Server")
server = Server("vegalite-manager")
# Register handlers
logger.debug("Registering handlers")
@server.list_resources()
async def handle_list_resources() -> list[types.Resource]:
logger.debug("Handling list_resources request")
return []
@server.read_resource()
async def handle_read_resource(uri: AnyUrl) -> str:
logger.debug(f"Handling read_resource request for URI: {uri}")
path = str(uri).replace("memo://", "")
raise ValueError(f"Unknown resource path: {path}")
@server.list_prompts()
async def handle_list_prompts() -> list[types.Prompt]:
logger.debug("Handling list_prompts request")
return []
@server.get_prompt()
async def handle_get_prompt(name: str, arguments: dict[str, str] | None) -> types.GetPromptResult:
logger.debug(f"Handling get_prompt request for {name} with args {arguments}")
raise ValueError(f"Unknown prompt: {name}")
@server.list_tools()
async def handle_list_tools() -> list[types.Tool]:
"""List available tools"""
return [
types.Tool(
name="save_data",
description=SAVE_DATA_TOOL_DESCRIPTION,
inputSchema={
"type": "object",
"properties": {
"name": {"type": "string", "description": "The name of the table to save the data to"},
"data": {
"type": "array",
"items": {"type": "object", "description": "Row of the table as a dictionary/object"},
"description": "The data to save",
},
},
"required": ["name", "data"],
},
),
types.Tool(
name="visualize_data",
description=VISUALIZE_DATA_TOOL_DESCRIPTION,
inputSchema={
"type": "object",
"properties": {
"data_name": {
"type": "string",
"description": "The name of the data table to visualize",
},
"vegalite_specification": {
"type": "string",
"description": "The vegalite v5 specification for the visualization. Do not include the data field, as this will be added automatically.",
},
},
"required": ["data_name", "vegalite_specification"],
},
),
]
@server.call_tool()
async def handle_call_tool(
name: str, arguments: dict[str, Any] | None
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
"""Handle tool execution requests"""
logger.info(f"Handling tool execution request for {name} with args {arguments}")
try:
if name == "save_data":
save_name = arguments["name"]
saved_data[save_name] = arguments["data"]
return [types.TextContent(type="text", text=f"Data saved successfully to table {save_name}")]
elif name == "visualize_data":
data_name = arguments["data_name"]
vegalite_specification = eval(arguments["vegalite_specification"])
data = saved_data[data_name]
vegalite_specification["data"] = {"values": data}
if output_type == "png":
png = vlc.vegalite_to_png(vl_spec=vegalite_specification, scale=2)
png = base64.b64encode(png).decode("utf-8")
return [types.ImageContent(type="image", data=png, mimeType="image/png")]
else:
return [
types.TextContent(
type="text",
text=f"Visualized data from table {data_name} with provided spec.",
artifact=vegalite_specification,
)
]
else:
raise ValueError(f"Unknown tool: {name}")
except Exception as e:
return [types.TextContent(type="text", text=f"Error: {str(e)}")]
async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
logger.info("Server running with stdio transport")
await server.run(
read_stream,
write_stream,
InitializationOptions(
server_name="vegalite",
server_version="0.1.0",
capabilities=server.get_capabilities(
notification_options=NotificationOptions(),
experimental_capabilities={},
),
),
)
```