# Directory Structure
```
├── .gitignore
├── Dockerfile
├── pyproject.toml
├── README.md
├── smithery.yaml
└── src
└── mcp_server_vegalite
├── __init__.py
└── server.py
```
# Files
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | logs/*
2 | .env
3 | .venv
4 | src/mcp_server_vegalite/__pycache__
5 | uv.lock
6 | .DS_Store
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | [](https://mseep.ai/app/isaacwasserman-mcp-vegalite-server)
2 |
3 | # Data Visualization MCP Server
4 | [](https://smithery.ai/server/mcp-server-vegalite)
5 |
6 | ## Overview
7 | A Model Context Protocol (MCP) server implementation that provides the LLM an interface for visualizing data using Vega-Lite syntax.
8 |
9 | ## Components
10 |
11 | ### Tools
12 | The server offers two core tools:
13 |
14 | - `save_data`
15 | - Save a table of data agregations to the server for later visualization
16 | - Input:
17 | - `name` (string): Name of the data table to be saved
18 | - `data` (array): Array of objects representing the data table
19 | - Returns: success message
20 | - `visualize_data`
21 | - Visualize a table of data using Vega-Lite syntax
22 | - Input:
23 | - `data_name` (string): Name of the data table to be visualized
24 | - `vegalite_specification` (string): JSON string representing the Vega-Lite specification
25 | - Returns: If the `--output_type` is set to `text`, returns a success message with an additional `artifact` key containing the complete Vega-Lite specification with data. If the `--output_type` is set to `png`, returns a base64 encoded PNG image of the visualization using the MPC `ImageContent` container.
26 |
27 | ## Usage with Claude Desktop
28 |
29 | ```python
30 | # Add the server to your claude_desktop_config.json
31 | {
32 | "mcpServers": {
33 | "datavis": {
34 | "command": "uv",
35 | "args": [
36 | "--directory",
37 | "/absolute/path/to/mcp-datavis-server",
38 | "run",
39 | "mcp_server_datavis",
40 | "--output_type",
41 | "png" # or "text"
42 | ]
43 | }
44 | }
45 | }
46 | ```
47 |
```
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
```toml
1 | [project]
2 | name = "mcp-server-vegalite"
3 | version = "0.0.1"
4 | description = "A simple Data Visualization MCP server using Vega-Lite"
5 | readme = "README.md"
6 | requires-python = ">=3.10"
7 | dependencies = [
8 | "mcp>=1.0.0",
9 | "vl-convert-python"
10 | ]
11 |
12 | [build-system]
13 | requires = ["hatchling"]
14 | build-backend = "hatchling.build"
15 |
16 | [tool.uv]
17 | dev-dependencies = ["pyright>=1.1.389"]
18 |
19 | [project.scripts]
20 | mcp_server_vegalite = "mcp_server_vegalite:main"
21 |
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
2 |
3 | startCommand:
4 | type: stdio
5 | configSchema:
6 | # JSON Schema defining the configuration options for the MCP.
7 | type: object
8 | required:
9 | - outputType
10 | properties:
11 | outputType:
12 | type: string
13 | description: "The format of the visualization output: 'text' or 'png'"
14 | commandFunction:
15 | # A function that produces the CLI command to start the MCP on stdio.
16 | |-
17 | (config) => ({ command: 'uv', args: ['run', 'mcp_server_vegalite', '--output_type', config.outputType] })
18 |
```
--------------------------------------------------------------------------------
/src/mcp_server_vegalite/__init__.py:
--------------------------------------------------------------------------------
```python
1 | from . import server
2 | import asyncio
3 | import argparse
4 |
5 |
6 | def main():
7 | """Main entry point for the package."""
8 | parser = argparse.ArgumentParser(description="Data Visualization MCP Server")
9 | # parser.add_argument(
10 | # "--language", default="vegalite", choices=["vegalite"], help="The visualization language/grammar/framework to use"
11 | # )
12 | parser.add_argument("--output-type", default="png", choices=["text", "png"], help="Format of the output")
13 |
14 | args = parser.parse_args()
15 | asyncio.run(server.main(output_type=args.output_type))
16 |
17 |
18 | # Optionally expose other important items at package level
19 | __all__ = ["main", "server"]
20 |
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
1 | # Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
2 | # Use a Python image with uv pre-installed
3 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
4 |
5 | # Install the project into /app
6 | WORKDIR /app
7 |
8 | # Enable bytecode compilation
9 | ENV UV_COMPILE_BYTECODE=1
10 |
11 | # Copy from the cache instead of linking since it's a mounted volume
12 | ENV UV_LINK_MODE=copy
13 |
14 | # Install the project's dependencies using the lockfile and settings
15 | RUN --mount=type=cache,target=/root/.cache/uv --mount=type=bind,source=pyproject.toml,target=pyproject.toml uv sync --frozen --no-install-project --no-dev --no-editable
16 |
17 | # Then, add the rest of the project source code and install it
18 | # Installing separately from its dependencies allows optimal layer caching
19 | ADD . /app
20 | RUN --mount=type=cache,target=/root/.cache/uv uv sync --frozen --no-dev --no-editable
21 |
22 | FROM python:3.12-slim-bookworm
23 |
24 | WORKDIR /app
25 |
26 | COPY --from=uv /root/.local /root/.local
27 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
28 |
29 | # Place executables in the environment at the front of the path
30 | ENV PATH="/app/.venv/bin:$PATH"
31 |
32 | # when running the container, add --output_type and a bind mount to the host's db file
33 | ENTRYPOINT ["uv", "run", "mcp_server_vegalite"]
34 |
```
--------------------------------------------------------------------------------
/src/mcp_server_vegalite/server.py:
--------------------------------------------------------------------------------
```python
1 | import logging
2 | from mcp.server.models import InitializationOptions
3 | import mcp.types as types
4 | from mcp.server import NotificationOptions, Server
5 | import mcp.server.stdio
6 | from pydantic import AnyUrl
7 | from typing import Any
8 | import vl_convert as vlc
9 | import base64
10 |
11 | logging.basicConfig(
12 | level=logging.INFO, # Set the log level
13 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
14 | handlers=[
15 | logging.FileHandler("logs/mcp_vegalite_server.log"), # Log file path
16 | logging.StreamHandler(), # Optional: still output to the console
17 | ],
18 | )
19 |
20 | logger = logging.getLogger("mcp_vegalite_server")
21 | logger.info("Starting MCP Vega-Lite Server")
22 |
23 | saved_data = {
24 | "sample_data": [
25 | {"name": "Alice", "age": 25, "city": "New York"},
26 | {"name": "Bob", "age": 30, "city": "San Francisco"},
27 | {"name": "Charlie", "age": 35, "city": "Los Angeles"},
28 | ]
29 | }
30 |
31 | SAVE_DATA_TOOL_DESCRIPTION = """
32 | A tool which allows you to save data to a named table for later use in visualizations.
33 | When to use this tool:
34 | - Use this tool when you have data that you want to visualize later.
35 | How to use this tool:
36 | - Provide the name of the table to save the data to (for later reference) and the data itself.
37 | """.strip()
38 |
39 | VISUALIZE_DATA_TOOL_DESCRIPTION = """
40 | A tool which allows you to produce a data visualization using the Vega-Lite grammar.
41 | When to use this tool:
42 | - At times, it will be advantageous to provide the user with a visual representation of some data, rather than just a textual representation.
43 | - This tool is particularly useful when the data is complex or has many dimensions, making it difficult to understand in a tabular format. It is not useful for singular data points.
44 | How to use this tool:
45 | - Prior to visualization, data must be saved to a named table using the save_data tool.
46 | - After saving the data, use this tool to visualize the data by providing the name of the table with the saved data and a Vega-Lite specification.
47 | """.strip()
48 |
49 |
50 | async def main(output_type: str):
51 | logger.info("Starting Vega-Lite MCP Server")
52 |
53 | server = Server("vegalite-manager")
54 |
55 | # Register handlers
56 | logger.debug("Registering handlers")
57 |
58 | @server.list_resources()
59 | async def handle_list_resources() -> list[types.Resource]:
60 | logger.debug("Handling list_resources request")
61 | return []
62 |
63 | @server.read_resource()
64 | async def handle_read_resource(uri: AnyUrl) -> str:
65 | logger.debug(f"Handling read_resource request for URI: {uri}")
66 | path = str(uri).replace("memo://", "")
67 | raise ValueError(f"Unknown resource path: {path}")
68 |
69 | @server.list_prompts()
70 | async def handle_list_prompts() -> list[types.Prompt]:
71 | logger.debug("Handling list_prompts request")
72 | return []
73 |
74 | @server.get_prompt()
75 | async def handle_get_prompt(name: str, arguments: dict[str, str] | None) -> types.GetPromptResult:
76 | logger.debug(f"Handling get_prompt request for {name} with args {arguments}")
77 | raise ValueError(f"Unknown prompt: {name}")
78 |
79 | @server.list_tools()
80 | async def handle_list_tools() -> list[types.Tool]:
81 | """List available tools"""
82 | return [
83 | types.Tool(
84 | name="save_data",
85 | description=SAVE_DATA_TOOL_DESCRIPTION,
86 | inputSchema={
87 | "type": "object",
88 | "properties": {
89 | "name": {"type": "string", "description": "The name of the table to save the data to"},
90 | "data": {
91 | "type": "array",
92 | "items": {"type": "object", "description": "Row of the table as a dictionary/object"},
93 | "description": "The data to save",
94 | },
95 | },
96 | "required": ["name", "data"],
97 | },
98 | ),
99 | types.Tool(
100 | name="visualize_data",
101 | description=VISUALIZE_DATA_TOOL_DESCRIPTION,
102 | inputSchema={
103 | "type": "object",
104 | "properties": {
105 | "data_name": {
106 | "type": "string",
107 | "description": "The name of the data table to visualize",
108 | },
109 | "vegalite_specification": {
110 | "type": "string",
111 | "description": "The vegalite v5 specification for the visualization. Do not include the data field, as this will be added automatically.",
112 | },
113 | },
114 | "required": ["data_name", "vegalite_specification"],
115 | },
116 | ),
117 | ]
118 |
119 | @server.call_tool()
120 | async def handle_call_tool(
121 | name: str, arguments: dict[str, Any] | None
122 | ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
123 | """Handle tool execution requests"""
124 | logger.info(f"Handling tool execution request for {name} with args {arguments}")
125 | try:
126 | if name == "save_data":
127 | save_name = arguments["name"]
128 | saved_data[save_name] = arguments["data"]
129 | return [types.TextContent(type="text", text=f"Data saved successfully to table {save_name}")]
130 | elif name == "visualize_data":
131 | data_name = arguments["data_name"]
132 | vegalite_specification = eval(arguments["vegalite_specification"])
133 | data = saved_data[data_name]
134 | vegalite_specification["data"] = {"values": data}
135 |
136 | if output_type == "png":
137 | png = vlc.vegalite_to_png(vl_spec=vegalite_specification, scale=2)
138 | png = base64.b64encode(png).decode("utf-8")
139 | return [types.ImageContent(type="image", data=png, mimeType="image/png")]
140 | else:
141 | return [
142 | types.TextContent(
143 | type="text",
144 | text=f"Visualized data from table {data_name} with provided spec.",
145 | artifact=vegalite_specification,
146 | )
147 | ]
148 | else:
149 | raise ValueError(f"Unknown tool: {name}")
150 |
151 | except Exception as e:
152 | return [types.TextContent(type="text", text=f"Error: {str(e)}")]
153 |
154 | async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
155 | logger.info("Server running with stdio transport")
156 | await server.run(
157 | read_stream,
158 | write_stream,
159 | InitializationOptions(
160 | server_name="vegalite",
161 | server_version="0.1.0",
162 | capabilities=server.get_capabilities(
163 | notification_options=NotificationOptions(),
164 | experimental_capabilities={},
165 | ),
166 | ),
167 | )
168 |
```