# Directory Structure
```
├── .github
│ └── workflows
│ ├── main.yml
│ └── publish.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .python-version
├── .zed
│ └── settings.json
├── glama.json
├── LICENSE
├── logfire_mcp
│ ├── __init__.py
│ ├── __main__.py
│ └── main.py
├── Makefile
├── pyproject.toml
├── README.md
├── tests
│ ├── __init__.py
│ ├── cassettes
│ │ ├── test_logfire_link
│ │ │ └── test_logfire_link.yaml
│ │ └── test_schema_reference
│ │ └── test_schema_reference.yaml
│ ├── conftest.py
│ ├── README.md.jinja
│ ├── test_logfire_link.py
│ ├── test_readme.py
│ └── test_schema_reference.py
└── uv.lock
```
# Files
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
```
1 | 3.12
2 |
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | # Python-generated files
2 | __pycache__/
3 | *.py[oc]
4 | build/
5 | dist/
6 | wheels/
7 | *.egg-info
8 |
9 | # Virtual environments
10 | .venv
11 |
12 | .envrc
13 | .env
14 | .claude
15 |
```
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
```yaml
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.3.0
4 | hooks:
5 | - id: no-commit-to-branch # prevent direct commits to the `main` branch
6 | - id: check-yaml
7 | - id: check-toml
8 | - id: end-of-file-fixer
9 | - id: trailing-whitespace
10 |
11 | - repo: https://github.com/sirosen/texthooks
12 | rev: 0.6.8
13 | hooks:
14 | - id: fix-smartquotes
15 | exclude: "cassettes/"
16 | - id: fix-spaces
17 | exclude: "cassettes/"
18 | - id: fix-ligatures
19 | exclude: "cassettes/"
20 |
21 | - repo: https://github.com/codespell-project/codespell
22 | # Configuration for codespell is in pyproject.toml
23 | rev: v2.3.0
24 | hooks:
25 | - id: codespell
26 | args: ["--skip", "tests/cassettes/*"]
27 | additional_dependencies:
28 | - tomli
29 |
30 | - repo: local
31 | hooks:
32 | - id: format
33 | name: Format
34 | entry: make
35 | args: [format]
36 | language: system
37 | types: [python]
38 | pass_filenames: false
39 | - id: lint
40 | name: Lint
41 | entry: make
42 | args: [lint]
43 | types: [python]
44 | language: system
45 | pass_filenames: false
46 | - id: typecheck
47 | name: Typecheck
48 | entry: make
49 | args: [typecheck]
50 | language: system
51 | types: [python]
52 | pass_filenames: false
53 |
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | <!-- DO NOT MODIFY THIS FILE DIRECTLY, IT IS GENERATED BY THE TESTS! -->
2 |
3 | # Pydantic Logfire MCP Server
4 |
5 | This repository contains a Model Context Protocol (MCP) server with tools that can access the OpenTelemetry traces and
6 | metrics you've sent to Pydantic Logfire.
7 |
8 | <a href="https://glama.ai/mcp/servers/@pydantic/logfire-mcp">
9 | <img width="380" height="200" src="https://glama.ai/mcp/servers/@pydantic/logfire-mcp/badge" alt="Pydantic Logfire Server MCP server" />
10 | </a>
11 |
12 | This MCP server enables LLMs to retrieve your application's telemetry data, analyze distributed
13 | traces, and make use of the results of arbitrary SQL queries executed using the Pydantic Logfire APIs.
14 |
15 | ## Available Tools
16 |
17 | * `find_exceptions_in_file` - Get the details about the 10 most recent exceptions on the file.
18 | * Arguments:
19 | * `filepath` (string) - The path to the file to find exceptions in.
20 | * `age` (integer) - Number of minutes to look back, e.g. 30 for last 30 minutes. Maximum allowed value is 7 days.
21 |
22 | * `arbitrary_query` - Run an arbitrary query on the Pydantic Logfire database.
23 | * Arguments:
24 | * `query` (string) - The query to run, as a SQL string.
25 | * `age` (integer) - Number of minutes to look back, e.g. 30 for last 30 minutes. Maximum allowed value is 7 days.
26 |
27 | * `logfire_link` - Creates a link to help the user to view the trace in the Logfire UI.
28 | * Arguments:
29 | * `trace_id` (string) - The trace ID to link to.
30 |
31 | * `schema_reference` - The database schema for the Logfire DataFusion database.
32 |
33 |
34 | ## Setup
35 |
36 | ### Install `uv`
37 |
38 | The first thing to do is make sure `uv` is installed, as `uv` is used to run the MCP server.
39 |
40 | For installation instructions, see the [`uv` installation docs](https://docs.astral.sh/uv/getting-started/installation/).
41 |
42 | If you already have an older version of `uv` installed, you might need to update it with `uv self update`.
43 |
44 | ### Obtain a Pydantic Logfire read token
45 | In order to make requests to the Pydantic Logfire APIs, the Pydantic Logfire MCP server requires a "read token".
46 |
47 | You can create one under the "Read Tokens" section of your project settings in Pydantic Logfire:
48 | https://logfire.pydantic.dev/-/redirect/latest-project/settings/read-tokens
49 |
50 | > [!IMPORTANT]
51 | > Pydantic Logfire read tokens are project-specific, so you need to create one for the specific project you want to expose to the Pydantic Logfire MCP server.
52 |
53 | ### Manually run the server
54 |
55 | Once you have `uv` installed and have a Pydantic Logfire read token, you can manually run the MCP server using `uvx` (which is provided by `uv`).
56 |
57 | You can specify your read token using the `LOGFIRE_READ_TOKEN` environment variable:
58 |
59 | ```bash
60 | LOGFIRE_READ_TOKEN=YOUR_READ_TOKEN uvx logfire-mcp@latest
61 | ```
62 |
63 | You can also set `LOGFIRE_READ_TOKEN` in a `.env` file:
64 |
65 | ```bash
66 | LOGFIRE_READ_TOKEN=pylf_v1_us_...
67 | ```
68 |
69 | **NOTE:** for this to work, the MCP server needs to run with the directory containing the `.env` file in its working directory.
70 |
71 | or using the `--read-token` flag:
72 |
73 | ```bash
74 | uvx logfire-mcp@latest --read-token=YOUR_READ_TOKEN
75 | ```
76 | > [!NOTE]
77 | > If you are using Cursor, Claude Desktop, Cline, or other MCP clients that manage your MCP servers for you, you **_do
78 | NOT_** need to manually run the server yourself. The next section will show you how to configure these clients to make
79 | use of the Pydantic Logfire MCP server.
80 |
81 | ### Base URL
82 |
83 | If you are running Logfire in a self hosted environment, you need to specify the base URL.
84 | This can be done using the `LOGFIRE_BASE_URL` environment variable:
85 |
86 | ```bash
87 | LOGFIRE_BASE_URL=https://logfire.my-company.com uvx logfire-mcp@latest --read-token=YOUR_READ_TOKEN
88 | ```
89 |
90 | You can also use the `--base-url` argument:
91 |
92 | ```bash
93 | uvx logfire-mcp@latest --base-url=https://logfire.my-company.com --read-token=YOUR_READ_TOKEN
94 | ```
95 |
96 | ## Configuration with well-known MCP clients
97 |
98 | ### Configure for Cursor
99 |
100 | Create a `.cursor/mcp.json` file in your project root:
101 |
102 | ```json
103 | {
104 | "mcpServers": {
105 | "logfire": {
106 | "command": "uvx",
107 | "args": ["logfire-mcp@latest", "--read-token=YOUR-TOKEN"]
108 | }
109 | }
110 | }
111 | ```
112 |
113 | The Cursor doesn't accept the `env` field, so you need to use the `--read-token` flag instead.
114 |
115 | ### Configure for Claude code
116 |
117 | Run the following command:
118 |
119 | ```bash
120 | claude mcp add logfire -e LOGFIRE_READ_TOKEN=YOUR_TOKEN -- uvx logfire-mcp@latest
121 | ```
122 |
123 | ### Configure for Claude Desktop
124 |
125 | Add to your Claude settings:
126 |
127 | ```json
128 | {
129 | "command": ["uvx"],
130 | "args": ["logfire-mcp@latest"],
131 | "type": "stdio",
132 | "env": {
133 | "LOGFIRE_READ_TOKEN": "YOUR_TOKEN"
134 | }
135 | }
136 | ```
137 |
138 | ### Configure for Cline
139 |
140 | Add to your Cline settings in `cline_mcp_settings.json`:
141 |
142 | ```json
143 | {
144 | "mcpServers": {
145 | "logfire": {
146 | "command": "uvx",
147 | "args": ["logfire-mcp@latest"],
148 | "env": {
149 | "LOGFIRE_READ_TOKEN": "YOUR_TOKEN"
150 | },
151 | "disabled": false,
152 | "autoApprove": []
153 | }
154 | }
155 | }
156 | ```
157 |
158 | ### Configure for VS Code
159 |
160 | Make sure you [enabled MCP support in VS Code](https://code.visualstudio.com/docs/copilot/chat/mcp-servers#_enable-mcp-support-in-vs-code).
161 |
162 | Create a `.vscode/mcp.json` file in your project's root directory:
163 |
164 | ```json
165 | {
166 | "servers": {
167 | "logfire": {
168 | "type": "stdio",
169 | "command": "uvx", // or the absolute /path/to/uvx
170 | "args": ["logfire-mcp@latest"],
171 | "env": {
172 | "LOGFIRE_READ_TOKEN": "YOUR_TOKEN"
173 | }
174 | }
175 | }
176 | }
177 | ```
178 |
179 | ### Configure for Zed
180 |
181 | Create a `.zed/settings.json` file in your project's root directory:
182 |
183 | ```json
184 | {
185 | "context_servers": {
186 | "logfire": {
187 | "source": "custom",
188 | "command": "uvx",
189 | "args": ["logfire-mcp@latest"],
190 | "env": {
191 | "LOGFIRE_READ_TOKEN": "YOUR_TOKEN"
192 | },
193 | "enabled": true
194 | }
195 | }
196 | }
197 | ```
198 |
199 | ## Example Interactions
200 |
201 | 1. Get details about exceptions from traces in a specific file:
202 | ```json
203 | {
204 | "name": "find_exceptions_in_file",
205 | "arguments": {
206 | "filepath": "app/api.py",
207 | "age": 1440
208 | }
209 | }
210 | ```
211 |
212 | Response:
213 | ```json
214 | [
215 | {
216 | "created_at": "2024-03-20T10:30:00Z",
217 | "message": "Failed to process request",
218 | "exception_type": "ValueError",
219 | "exception_message": "Invalid input format",
220 | "function_name": "process_request",
221 | "line_number": "42",
222 | "attributes": {
223 | "service.name": "api-service",
224 | "code.filepath": "app/api.py"
225 | },
226 | "trace_id": "1234567890abcdef"
227 | }
228 | ]
229 | ```
230 |
231 | 2. Run a custom query on traces:
232 | ```json
233 | {
234 | "name": "arbitrary_query",
235 | "arguments": {
236 | "query": "SELECT trace_id, message, created_at, attributes->>'service.name' as service FROM records WHERE severity_text = 'ERROR' ORDER BY created_at DESC LIMIT 10",
237 | "age": 1440
238 | }
239 | }
240 | ```
241 |
242 | ## Examples of Questions for Claude
243 |
244 | 1. "What exceptions occurred in traces from the last hour across all services?"
245 | 2. "Show me the recent errors in the file 'app/api.py' with their trace context"
246 | 3. "How many errors were there in the last 24 hours per service?"
247 | 4. "What are the most common exception types in my traces, grouped by service name?"
248 | 5. "Get me the OpenTelemetry schema for traces and metrics"
249 | 6. "Find all errors from yesterday and show their trace contexts"
250 |
251 | ## Getting Started
252 |
253 | 1. First, obtain a Pydantic Logfire read token from:
254 | https://logfire.pydantic.dev/-/redirect/latest-project/settings/read-tokens
255 |
256 | 2. Run the MCP server:
257 | ```bash
258 | uvx logfire-mcp@latest --read-token=YOUR_TOKEN
259 | ```
260 |
261 | 3. Configure your preferred client (Cursor, Claude Desktop, or Cline) using the configuration examples above
262 |
263 | 4. Start using the MCP server to analyze your OpenTelemetry traces and metrics!
264 |
265 | ## Contributing
266 |
267 | We welcome contributions to help improve the Pydantic Logfire MCP server. Whether you want to add new trace analysis tools, enhance metrics querying functionality, or improve documentation, your input is valuable.
268 |
269 | For examples of other MCP servers and implementation patterns, see the [Model Context Protocol servers repository](https://github.com/modelcontextprotocol/servers).
270 |
271 | ## License
272 |
273 | Pydantic Logfire MCP is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License.
274 |
```
--------------------------------------------------------------------------------
/logfire_mcp/__init__.py:
--------------------------------------------------------------------------------
```python
1 |
```
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
```python
1 |
```
--------------------------------------------------------------------------------
/glama.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "$schema": "https://glama.ai/mcp/schemas/server.json",
3 | "maintainers": [
4 | "Kludex",
5 | "samuelcolvin"
6 | ]
7 | }
8 |
```
--------------------------------------------------------------------------------
/.zed/settings.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "context_servers": {
3 | "logfire": {
4 | "source": "custom",
5 | // use uv run logfire-mcp, not uvx so we use the local version
6 | "command": "uv",
7 | "args": ["run", "logfire-mcp"],
8 | "enabled": true
9 | }
10 | }
11 | }
12 |
```
--------------------------------------------------------------------------------
/tests/test_logfire_link.py:
--------------------------------------------------------------------------------
```python
1 | import pytest
2 | from mcp.client.session import ClientSession
3 | from mcp.types import TextContent
4 |
5 | pytestmark = [pytest.mark.vcr, pytest.mark.anyio]
6 |
7 |
8 | async def test_logfire_link(session: ClientSession) -> None:
9 | result = await session.call_tool('logfire_link', {'trace_id': '019837e6ba8ab0ede383b398b6706f28'})
10 |
11 | assert result.content == [
12 | TextContent(
13 | type='text',
14 | text='https://logfire-us.pydantic.dev/kludex/logfire-mcp?q=trace_id%3D%27019837e6ba8ab0ede383b398b6706f28%27',
15 | )
16 | ]
17 |
```
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
```yaml
1 | name: Publishing
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | build:
9 | name: Build distribution
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 |
14 | - uses: astral-sh/setup-uv@v6
15 | with:
16 | enable-cache: true
17 |
18 | - name: Build
19 | run: uv build
20 |
21 | - name: Upload artifacts
22 | uses: actions/upload-artifact@v4
23 | with:
24 | name: release-dists
25 | path: dist/
26 |
27 | pypi-publish:
28 | name: Upload release to PyPI
29 | runs-on: ubuntu-latest
30 | environment: release
31 | needs: [build]
32 | permissions:
33 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
34 |
35 | steps:
36 | - name: Retrieve release distributions
37 | uses: actions/download-artifact@v4
38 | with:
39 | name: release-dists
40 | path: dist/
41 |
42 | - uses: astral-sh/setup-uv@v6
43 | - run: uv publish --trusted-publishing always
44 |
```
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
```python
1 | import os
2 | from collections.abc import AsyncGenerator
3 |
4 | import pytest
5 | from mcp.client.session import ClientSession
6 | from mcp.server.fastmcp import FastMCP
7 | from mcp.shared.memory import create_connected_server_and_client_session
8 |
9 | from logfire_mcp.__main__ import app_factory
10 |
11 |
12 | @pytest.fixture
13 | def anyio_backend():
14 | return 'asyncio'
15 |
16 |
17 | @pytest.fixture
18 | def vcr_config():
19 | return {'filter_headers': [('authorization', None)]}
20 |
21 |
22 | @pytest.fixture
23 | async def logfire_read_token() -> str:
24 | # To get a read token, go to https://logfire-us.pydantic.dev/kludex/logfire-mcp/settings/read-tokens/.
25 | return os.getenv('LOGFIRE_READ_TOKEN', 'fake-token')
26 |
27 |
28 | @pytest.fixture
29 | def app(logfire_read_token: str) -> FastMCP:
30 | return app_factory(logfire_read_token)
31 |
32 |
33 | @pytest.fixture
34 | async def session(app: FastMCP) -> AsyncGenerator[ClientSession]:
35 | mcp_server = app._mcp_server # type: ignore
36 | async with create_connected_server_and_client_session(mcp_server, raise_exceptions=True) as _session:
37 | yield _session
38 |
```
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
```yaml
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request: {}
8 |
9 | env:
10 | COLUMNS: 150
11 | UV_PYTHON: 3.12
12 | UV_FROZEN: "1"
13 |
14 | jobs:
15 | lint:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: actions/checkout@v4
19 |
20 | - uses: astral-sh/setup-uv@v6
21 | with:
22 | enable-cache: true
23 |
24 | - run: uv sync
25 |
26 | - uses: actions/cache@v4
27 | with:
28 | path: ~/.cache/pre-commit
29 | key: pre-commit|${{ env.UV_PYTHON }}|${{ hashFiles('.pre-commit-config.yaml') }}
30 |
31 | - run: uvx pre-commit run --color=always --all-files --verbose
32 | env:
33 | SKIP: no-commit-to-branch
34 |
35 | test:
36 | runs-on: ubuntu-latest
37 | strategy:
38 | matrix:
39 | python-version: ["3.11", "3.12", "3.13"]
40 | steps:
41 | - uses: actions/checkout@v4
42 |
43 | - uses: astral-sh/setup-uv@v6
44 | with:
45 | python-version: ${{ matrix.python-version }}
46 | enable-cache: true
47 |
48 | - run: uv sync --frozen
49 |
50 | - run: uv run pytest
51 |
52 | # https://github.com/marketplace/actions/alls-green#why used for branch protection checks
53 | check:
54 | if: always()
55 | needs: [lint, test]
56 | runs-on: ubuntu-latest
57 | steps:
58 | - uses: re-actors/alls-green@release/v1
59 | with:
60 | jobs: ${{ toJSON(needs) }}
61 |
```
--------------------------------------------------------------------------------
/tests/test_readme.py:
--------------------------------------------------------------------------------
```python
1 | from pathlib import Path
2 | from typing import TypedDict
3 |
4 | import pytest
5 | from jinja2 import Environment, FileSystemLoader
6 | from mcp.client.session import ClientSession
7 |
8 | env = Environment(loader=FileSystemLoader(Path(__file__).parent))
9 | template = env.get_template('README.md.jinja')
10 |
11 |
12 | pytestmark = [pytest.mark.vcr, pytest.mark.anyio]
13 |
14 |
15 | class Argument(TypedDict):
16 | name: str
17 | description: str
18 | type: str
19 |
20 |
21 | class Tool(TypedDict):
22 | name: str
23 | description: str
24 | arguments: list[Argument]
25 |
26 |
27 | async def test_generate_readme(session: ClientSession) -> None:
28 | tools: list[Tool] = []
29 | mcp_tools = await session.list_tools()
30 |
31 | for tool in mcp_tools.tools:
32 | assert tool.description
33 | description = tool.description.split('\n', 1)[0].strip()
34 |
35 | arguments: list[Argument] = []
36 | for argument_name, argument_schema in tool.inputSchema['properties'].items():
37 | arguments.append(
38 | {'name': argument_name, 'description': argument_schema['description'], 'type': argument_schema['type']}
39 | )
40 | tools.append({'name': tool.name, 'description': description, 'arguments': arguments})
41 |
42 | readme = template.render(tools=tools)
43 | with open('README.md', 'w') as f:
44 | f.write(readme)
45 |
```
--------------------------------------------------------------------------------
/tests/cassettes/test_logfire_link/test_logfire_link.yaml:
--------------------------------------------------------------------------------
```yaml
1 | interactions:
2 | - request:
3 | body: ''
4 | headers:
5 | accept:
6 | - '*/*'
7 | accept-encoding:
8 | - gzip, deflate
9 | connection:
10 | - keep-alive
11 | host:
12 | - logfire-us.pydantic.dev
13 | user-agent:
14 | - logfire-mcp/0.0.1
15 | method: GET
16 | uri: https://logfire-us.pydantic.dev/api/read-token-info
17 | response:
18 | body:
19 | string: !!binary |
20 | H4sIAAAAAAAAA1yNSQrDMAwA/6JzBN4S2/lMsS0puFkcQgqlpX/vJYfS68DMvOFsM2+3SjBCCr0u
21 | YgkDeYuOs2D2VqPW3gWT+hy5hw7aMaWtvtJZ2yUaoiKKBxRKCp0vgoGiYLAmqaDUECJBB/vR7lzO
22 | a9ZrTqwMRi2CTgbBmKNHUkNmU4JT0fzPtrQyjDAvD+LnT/DiS5ukHoxr2eHzBQAA//8DAGdhjOza
23 | AAAA
24 | headers:
25 | CF-RAY:
26 | - 9643f8370dddf5dc-AMS
27 | Connection:
28 | - keep-alive
29 | Content-Encoding:
30 | - gzip
31 | Content-Type:
32 | - application/json
33 | Date:
34 | - Thu, 24 Jul 2025 14:04:56 GMT
35 | NEL:
36 | - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
37 | Report-To:
38 | - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=VUHjqy%2BTrdTOsaOXBzrBG9FuRkXpOwjRA%2FxWrdx0nSl%2BXScKANX0WkbEpjBsbCTgwWOd%2Frk0%2FjuqV3cex4dPjNbjFciTk%2BIEkmBhrwxOEBIu3pPQdTf8ia7Htc8EMWlnpT0SPCfxfM3v"}],"group":"cf-nel","max_age":604800}'
39 | Server:
40 | - cloudflare
41 | Transfer-Encoding:
42 | - chunked
43 | access-control-expose-headers:
44 | - traceresponse
45 | cf-cache-status:
46 | - DYNAMIC
47 | server-timing:
48 | - cfL4;desc="?proto=TCP&rtt=12945&min_rtt=12595&rtt_var=4973&sent=4&recv=6&lost=0&retrans=0&sent_bytes=2843&recv_bytes=859&delivery_rate=229932&cwnd=252&unsent_bytes=0&cid=f1a8bab48a8921f3&ts=186&x=0"
49 | traceresponse:
50 | - 00-01983cc05eb814b18b2b449cd2e76a6d-8432e93ab1a5c2bc-01
51 | via:
52 | - 1.1 google
53 | x-api-version:
54 | - 8B3/58p72Z+yU8ZrVkBW+6WjdphoLsyQ1sXK9c1305Y=
55 | status:
56 | code: 200
57 | message: OK
58 | version: 1
59 |
```
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
```toml
1 | [build-system]
2 | requires = ["hatchling", "uv-dynamic-versioning"]
3 | build-backend = "hatchling.build"
4 |
5 | [tool.hatch.version]
6 | source = "uv-dynamic-versioning"
7 |
8 | [tool.uv-dynamic-versioning]
9 | vcs = "git"
10 | style = "pep440"
11 | bump = true
12 |
13 | [project]
14 | name = "logfire-mcp"
15 | dynamic = ["version"]
16 | description = "The Pydantic Logfire MCP server! 🔍"
17 | authors = [
18 | { name = "Marcelo Trylesinski", email = "[email protected]" },
19 | { name = "Samuel Colvin", email = "[email protected]" },
20 | ]
21 | readme = "README.md"
22 | requires-python = ">=3.11"
23 | license = "MIT"
24 | license-files = ["LICENSE"]
25 | classifiers = [
26 | "Development Status :: 4 - Beta",
27 | "Intended Audience :: Developers",
28 | "Programming Language :: Python :: 3",
29 | "Programming Language :: Python :: 3.11",
30 | "Programming Language :: Python :: 3.12",
31 | "Programming Language :: Python :: 3.13",
32 | ]
33 | dependencies = ["logfire>=3.7.1", "mcp[cli]>=1.10.0", "python-dotenv>=1.1.1"]
34 |
35 | [project.scripts]
36 | logfire-mcp = "logfire_mcp.__main__:main"
37 |
38 | [project.urls]
39 | Homepage = "https://github.com/pydantic/logfire-mcp"
40 | Repository = "https://github.com/pydantic/logfire-mcp"
41 | Issues = "https://github.com/pydantic/logfire-mcp/issues"
42 |
43 | [dependency-groups]
44 | dev = [
45 | "devtools>=0.12.2",
46 | "inline-snapshot[black]>=0.24.0",
47 | "jinja2>=3.1.6",
48 | "pyright>=1.1.403",
49 | "pytest-recording>=0.13.4",
50 | "ruff",
51 | ]
52 |
53 | [tool.ruff]
54 | line-length = 120
55 |
56 | [tool.ruff.lint]
57 | extend-select = [
58 | "Q",
59 | "RUF100",
60 | "RUF018", # https://docs.astral.sh/ruff/rules/assignment-in-assert/
61 | "C90",
62 | "UP",
63 | "I",
64 | "TID251",
65 | ]
66 | ignore = ["UP031"] # https://docs.astral.sh/ruff/rules/printf-string-formatting/
67 | flake8-quotes = { inline-quotes = "single", multiline-quotes = "double" }
68 | isort = { combine-as-imports = true }
69 |
70 | [tool.ruff.format]
71 | # don't format python in docstrings, pytest-examples takes care of it
72 | docstring-code-format = false
73 | quote-style = "single"
74 |
75 | [tool.inline-snapshot]
76 | format-command = "ruff format --stdin-filename {filename}"
77 |
78 | [tool.inline-snapshot.shortcuts]
79 | snap-fix = ["create", "fix"]
80 | snap = ["create"]
81 |
```
--------------------------------------------------------------------------------
/tests/cassettes/test_schema_reference/test_schema_reference.yaml:
--------------------------------------------------------------------------------
```yaml
1 | interactions:
2 | - request:
3 | body: ''
4 | headers:
5 | accept:
6 | - '*/*'
7 | accept-encoding:
8 | - gzip, deflate
9 | connection:
10 | - keep-alive
11 | host:
12 | - logfire-us.pydantic.dev
13 | user-agent:
14 | - logfire-mcp/0.3.2.dev4+ae94fe7
15 | method: GET
16 | uri: https://logfire-us.pydantic.dev/v1/schemas
17 | response:
18 | body:
19 | string: !!binary |
20 | H4sIAI0k1GgC/+2YbW/bIBDHv0rk1/0Eeddu1hapbao206ZVFSL4YtNgYHBEzaJ89+GkbZYHLw7Q
21 | SdP6ztjm57vjuOPvRYZ0LMBm/ftFJmkNWT8zwJQpbHaWFWCZ4Rq5kv6+v2FZBTXN+ouMIho+dthM
22 | XWQFRUpwrpvpo/zbyL8qnRANOuujcbDHWp79hiCPVkmygQfhmAGKUBCKe4jBVX43Or+66X0djD73
23 | mmHv+/A638JOqLAHuQWd7wI/no+6TgYt1LwGiQTkjBslm+tgHwtn6Hq4Y9Dwy8Vl3ru5zT8M7gbD
24 | 604wkAVBXoNFWuu0MYMnBqsx8XhLSwj2eEPyZrIpGspSwNYzAzEVovaOYaWKOIYBq5W00PiGzhKm
25 | ij2rBtej/FN+ewJU+S0VbBe35DVKu5CL4fAyP++WXFMujwenNX8EzEB0iUQ7QZVkrIp5cCC6Jm6r
26 | BQpBEO+HxPASuWIILqeRCJ9oyhkGJEHZXgEtUzo1bd1+4jkzMPZA9p6Iat+RQaDYOqipaZqI1VQS
27 | Hl52tFHMW0L0PuOUQuMxj8CwiyWt+8OCmXGfklz6EMnmItytF1anFDpqUUPxgY5oNC+k2FTsutzt
28 | LjWAyKggNfhWBwWk5cHacf/QKT5+g4FvxGZObDElgsrSxWyybVxUPdpGxebB6tgTlQjOCDLxrwWb
29 | 0AA0xSoK8MOBCW3Jy+XZq0RpIsvZMYlSlgZKuj7vQa2VoYLjPIFeeZc8f0/ywJNfOkGNjSBoUnGL
30 | qjS0JnKVEjMgY8emgL7DuwMntOdm2LEMdfkCUZOJBYzputuf0cryt3Xk8BeSO2IZFZAO9xOMWpua
31 | mImVP0ZXShRJ1PcGnnD9NtDoAGxQPhSCM45eTzlZ2GPen2xqTZ8SR7TmMjHRujoJ0cvrWkmFSnIW
32 | I6/XDZBsPQgWuitU3CHxGdLpt8qfFfO7VP035dz6rzHE/lJsVUK+SVBDZlQ4SLIT3+VnB04C6fe/
33 | SzcnOYbmxHL5sPwF6X85hKIZAAA=
34 | headers:
35 | CF-RAY:
36 | - 9843dc15cb69cba0-LAX
37 | Connection:
38 | - keep-alive
39 | Content-Encoding:
40 | - gzip
41 | Content-Length:
42 | - '761'
43 | Content-Type:
44 | - application/json
45 | Date:
46 | - Wed, 24 Sep 2025 17:04:13 GMT
47 | Nel:
48 | - '{"report_to":"cf-nel","success_fraction":0.0,"max_age":604800}'
49 | Report-To:
50 | - '{"group":"cf-nel","max_age":604800,"endpoints":[{"url":"https://a.nel.cloudflare.com/report/v4?s=PzOyGdfLWRnle1nd8OxmfFcCYNg7p4jnLwzUghIyvIkSPowyFzWQt1xCSwl77Pi7c5k4tJOTJnp0aXIaT7c2AdQYy4ZfGJxsI5IUFBvuNAqb%2F1gYuFo%3D"}]}'
51 | Server:
52 | - cloudflare
53 | access-control-expose-headers:
54 | - traceresponse
55 | cf-cache-status:
56 | - DYNAMIC
57 | traceresponse:
58 | - 00-01997caec9c993425dabd0264fe732ed-2c46a3fa819c4259-01
59 | vary:
60 | - Accept-Encoding
61 | via:
62 | - 1.1 google
63 | x-api-version:
64 | - 54zyD1FXxO9eq0NYG+0f6iMvkItkOgIG55xblSjRFPw=
65 | status:
66 | code: 200
67 | message: OK
68 | version: 1
69 |
```
--------------------------------------------------------------------------------
/tests/test_schema_reference.py:
--------------------------------------------------------------------------------
```python
1 | import pytest
2 | from inline_snapshot import snapshot
3 | from mcp.client.session import ClientSession
4 | from mcp.types import TextContent
5 |
6 | pytestmark = [pytest.mark.vcr, pytest.mark.anyio]
7 |
8 |
9 | async def test_schema_reference(session: ClientSession) -> None:
10 | result = await session.call_tool('schema_reference')
11 |
12 | assert result.content == snapshot(
13 | [
14 | TextContent(
15 | type='text',
16 | text="""\
17 | CREATE TABLE records (
18 | attributes TEXT,
19 | attributes_json_schema TEXT,
20 | created_at TIMESTAMP WITH TIME ZONE NOT NULL,
21 | day DATE NOT NULL,
22 | deployment_environment TEXT,
23 | duration DOUBLE PRECISION,
24 | end_timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
25 | exception_message TEXT,
26 | exception_stacktrace TEXT,
27 | exception_type TEXT,
28 | http_method TEXT,
29 | http_response_status_code INTEGER,
30 | http_route TEXT,
31 | is_exception BOOLEAN,
32 | kind TEXT NOT NULL,
33 | level INTEGER NOT NULL,
34 | log_body TEXT,
35 | message TEXT NOT NULL,
36 | otel_events TEXT,
37 | otel_links TEXT,
38 | otel_resource_attributes TEXT,
39 | otel_scope_attributes TEXT,
40 | otel_scope_name TEXT,
41 | otel_scope_version TEXT,
42 | otel_status_code TEXT,
43 | otel_status_message TEXT,
44 | parent_span_id TEXT,
45 | process_pid INTEGER,
46 | project_id TEXT NOT NULL,
47 | service_instance_id TEXT,
48 | service_name TEXT NOT NULL,
49 | service_namespace TEXT,
50 | service_version TEXT,
51 | span_id TEXT NOT NULL,
52 | span_name TEXT NOT NULL,
53 | start_timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
54 | tags TEXT[],
55 | telemetry_sdk_language TEXT,
56 | telemetry_sdk_name TEXT,
57 | telemetry_sdk_version TEXT,
58 | trace_id TEXT NOT NULL,
59 | url_full TEXT,
60 | url_path TEXT,
61 | url_query TEXT
62 | );
63 |
64 | CREATE TABLE metrics (
65 | aggregation_temporality TEXT,
66 | attributes TEXT,
67 | attributes_json_schema TEXT,
68 | created_at TIMESTAMP WITH TIME ZONE NOT NULL,
69 | day DATE NOT NULL,
70 | deployment_environment TEXT,
71 | exemplars TEXT,
72 | exp_histogram_negative_bucket_counts INTEGER[],
73 | exp_histogram_negative_bucket_counts_offset INTEGER,
74 | exp_histogram_positive_bucket_counts INTEGER[],
75 | exp_histogram_positive_bucket_counts_offset INTEGER,
76 | exp_histogram_scale INTEGER,
77 | exp_histogram_zero_count INTEGER,
78 | exp_histogram_zero_threshold DOUBLE PRECISION,
79 | histogram_bucket_counts INTEGER[],
80 | histogram_count INTEGER,
81 | histogram_explicit_bounds DOUBLE PRECISION[],
82 | histogram_max DOUBLE PRECISION,
83 | histogram_min DOUBLE PRECISION,
84 | histogram_sum DOUBLE PRECISION,
85 | is_monotonic BOOLEAN,
86 | metric_description TEXT,
87 | metric_name TEXT NOT NULL,
88 | metric_type TEXT NOT NULL,
89 | otel_resource_attributes TEXT,
90 | otel_scope_attributes TEXT,
91 | otel_scope_name TEXT,
92 | otel_scope_version TEXT,
93 | process_pid INTEGER,
94 | project_id TEXT NOT NULL,
95 | recorded_timestamp TIMESTAMP WITH TIME ZONE,
96 | scalar_value DOUBLE PRECISION,
97 | service_instance_id TEXT,
98 | service_name TEXT NOT NULL,
99 | service_namespace TEXT,
100 | service_version TEXT,
101 | start_timestamp TIMESTAMP WITH TIME ZONE,
102 | telemetry_sdk_language TEXT,
103 | telemetry_sdk_name TEXT,
104 | telemetry_sdk_version TEXT,
105 | unit TEXT NOT NULL
106 | );\
107 | """,
108 | )
109 | ]
110 | )
111 |
```
--------------------------------------------------------------------------------
/logfire_mcp/__main__.py:
--------------------------------------------------------------------------------
```python
1 | import argparse
2 | import asyncio
3 | import os
4 | import sys
5 |
6 | from dotenv import dotenv_values, find_dotenv
7 | from mcp import ClientSession, StdioServerParameters, stdio_client
8 | from mcp.types import TextContent
9 |
10 | from .main import __version__, app_factory
11 |
12 |
13 | def main():
14 | name_version = f'Logfire MCP v{__version__}'
15 | parser = argparse.ArgumentParser(
16 | prog='logfire-mcp',
17 | description=f'{name_version}\n\nSee github.com/pydantic/logfire-mcp',
18 | formatter_class=argparse.RawTextHelpFormatter,
19 | )
20 | parser.add_argument(
21 | '--read-token',
22 | type=str,
23 | help='Pydantic Logfire read token. Can also be set via LOGFIRE_READ_TOKEN environment variable.',
24 | )
25 | parser.add_argument(
26 | '--base-url',
27 | type=str,
28 | required=False,
29 | help='Pydantic Logfire base URL. Can also be set via LOGFIRE_BASE_URL environment variable.',
30 | )
31 | parser.add_argument('--test', action='store_true', help='Test the MCP server and exit')
32 | parser.add_argument('--version', action='store_true', help='Show version and exit')
33 | args = parser.parse_args()
34 | if args.version:
35 | print(name_version)
36 | return
37 |
38 | # Get token from args or environment
39 | logfire_read_token, source = get_read_token(args)
40 | if not logfire_read_token:
41 | parser.error(
42 | 'Pydantic Logfire read token must be provided either via --read-token argument '
43 | 'or LOGFIRE_READ_TOKEN environment variable'
44 | )
45 |
46 | logfire_base_url = args.base_url or os.getenv('LOGFIRE_BASE_URL')
47 | if args.test:
48 | asyncio.run(test(logfire_read_token, logfire_base_url, source))
49 | else:
50 | app = app_factory(logfire_read_token, logfire_base_url)
51 | app.run(transport='stdio')
52 |
53 |
54 | async def test(logfire_read_token: str, logfire_base_url: str | None, source: str):
55 | print('testing Logfire MCP server:\n')
56 | print(f'logfire_read_token: `{logfire_read_token[:12]}...{logfire_read_token[-5:]}` from {source}\n')
57 |
58 | args = ['-m', 'logfire_mcp', '--read-token', logfire_read_token]
59 | if logfire_base_url:
60 | print(f'logfire_base_url: `{logfire_base_url}`')
61 | args += ['--base-url', logfire_base_url]
62 |
63 | server_params = StdioServerParameters(command=sys.executable, args=args)
64 | async with stdio_client(server_params) as (read, write):
65 | async with ClientSession(read, write) as session:
66 | await session.initialize()
67 |
68 | tools = await session.list_tools()
69 | print('tools:')
70 | for tool in tools.tools:
71 | print(f' - {tool.name}')
72 |
73 | list_resources = await session.list_resources()
74 | print('resources:')
75 | for resource in list_resources.resources:
76 | print(f' - {resource.name}')
77 |
78 | for tool in 'sql_reference', 'get_logfire_records_schema':
79 | print(f'\ncalling `{tool}`:')
80 | output = await session.call_tool(tool)
81 | # debug(output)
82 | content = output.content[0]
83 | assert isinstance(content, TextContent), f'Expected TextContent, got {type(content)}'
84 | if len(content.text) < 200:
85 | print(f'> {content.text.strip()}')
86 | else:
87 | first_line = content.text.strip().split('\n', 1)[0]
88 | print(f'> {first_line}... ({len(content.text) - len(first_line)} more characters)\n')
89 |
90 |
91 | def get_read_token(args: argparse.Namespace) -> tuple[str | None, str]:
92 | if args.read_token:
93 | return args.read_token, 'CLI argument'
94 | elif token := os.getenv('LOGFIRE_READ_TOKEN'):
95 | return token, 'environment variable'
96 | else:
97 | return dotenv_values(dotenv_path=find_dotenv(usecwd=True)).get('LOGFIRE_READ_TOKEN'), 'dotenv file'
98 |
99 |
100 | if __name__ == '__main__':
101 | main()
102 |
```
--------------------------------------------------------------------------------
/logfire_mcp/main.py:
--------------------------------------------------------------------------------
```python
1 | from collections.abc import AsyncIterator
2 | from contextlib import asynccontextmanager
3 | from dataclasses import dataclass
4 | from datetime import UTC, datetime, timedelta
5 | from importlib.metadata import version
6 | from typing import Annotated, Any, TypedDict, cast
7 |
8 | from logfire.experimental.query_client import AsyncLogfireQueryClient
9 | from mcp.server.fastmcp import Context, FastMCP
10 | from mcp.server.session import ServerSession
11 | from pydantic import Field, WithJsonSchema
12 |
13 |
14 | @dataclass
15 | class MCPState:
16 | logfire_client: AsyncLogfireQueryClient
17 |
18 |
19 | HOUR = 60 # minutes
20 | DAY = 24 * HOUR
21 |
22 | __version__ = version('logfire-mcp')
23 |
24 | Age = Annotated[
25 | int,
26 | Field(
27 | ge=0,
28 | le=7 * DAY,
29 | description='Number of minutes to look back, e.g. 30 for last 30 minutes. Maximum allowed value is 7 days.',
30 | ),
31 | WithJsonSchema({'type': 'integer'}),
32 | ]
33 |
34 |
35 | async def find_exceptions_in_file(
36 | ctx: Context[ServerSession, MCPState],
37 | filepath: Annotated[str, Field(description='The path to the file to find exceptions in.')],
38 | age: Age,
39 | ) -> list[Any]:
40 | """Get the details about the 10 most recent exceptions on the file."""
41 | logfire_client = ctx.request_context.lifespan_context.logfire_client
42 | min_timestamp = datetime.now(UTC) - timedelta(minutes=age)
43 | result = await logfire_client.query_json_rows(
44 | f"""\
45 | SELECT
46 | created_at,
47 | message,
48 | exception_type,
49 | exception_message,
50 | exception_stacktrace
51 | FROM records
52 | WHERE is_exception = true
53 | AND exception_stacktrace like '%{filepath}%'
54 | ORDER BY created_at DESC
55 | LIMIT 10
56 | """,
57 | min_timestamp=min_timestamp,
58 | )
59 | return result['rows']
60 |
61 |
62 | async def arbitrary_query(
63 | ctx: Context[ServerSession, MCPState],
64 | query: Annotated[str, Field(description='The query to run, as a SQL string.')],
65 | age: Age,
66 | ) -> list[Any]:
67 | """Run an arbitrary query on the Pydantic Logfire database.
68 |
69 | The SQL reference is available via the `sql_reference` tool.
70 | """
71 | logfire_client = ctx.request_context.lifespan_context.logfire_client
72 | min_timestamp = datetime.now(UTC) - timedelta(minutes=age)
73 | result = await logfire_client.query_json_rows(query, min_timestamp=min_timestamp)
74 | return result['rows']
75 |
76 |
77 | async def schema_reference(ctx: Context[ServerSession, MCPState]) -> str:
78 | """The database schema for the Logfire DataFusion database.
79 |
80 | This includes all tables, columns, and their types as well as descriptions.
81 | For example:
82 |
83 | ```sql
84 | -- The records table contains spans and logs.
85 | CREATE TABLE records (
86 | message TEXT, -- The message of the record
87 | span_name TEXT, -- The name of the span, message is usually templated from this
88 | trace_id TEXT, -- The trace ID, identifies a group of spans in a trace
89 | exception_type TEXT, -- The type of the exception
90 | exception_message TEXT, -- The message of the exception
91 | -- other columns...
92 | );
93 | ```
94 | The SQL syntax is similar to Postgres, although the query engine is actually Apache DataFusion.
95 |
96 | To access nested JSON fields e.g. in the `attributes` column use the `->` and `->>` operators.
97 | You may need to cast the result of these operators e.g. `(attributes->'cost')::float + 10`.
98 |
99 | You should apply as much filtering as reasonable to reduce the amount of data queried.
100 | Filters on `start_timestamp`, `service_name`, `span_name`, `metric_name`, `trace_id` are efficient.
101 | """
102 | logfire_client = ctx.request_context.lifespan_context.logfire_client
103 | response = await logfire_client.client.get('/v1/schemas')
104 | schema_data = response.json()
105 |
106 | def schema_to_sql(schema_json: dict[str, Any]) -> str:
107 | sql_commands: list[str] = []
108 | for table in schema_json.get('tables', []):
109 | table_name = table['name']
110 | columns: list[str] = []
111 |
112 | for col_name, col_info in table['schema'].items():
113 | data_type = col_info['data_type']
114 | nullable = col_info.get('nullable', True)
115 | description = col_info.get('description', '').strip()
116 |
117 | column_def = f'{col_name} {data_type}'
118 | if not nullable:
119 | column_def += ' NOT NULL'
120 | if description:
121 | column_def += f' -- {description}'
122 |
123 | columns.append(column_def)
124 |
125 | create_table = f'CREATE TABLE {table_name} (\n ' + ',\n '.join(columns) + '\n);'
126 | sql_commands.append(create_table)
127 |
128 | return '\n\n'.join(sql_commands)
129 |
130 | return schema_to_sql(schema_data)
131 |
132 |
133 | async def logfire_link(
134 | ctx: Context[ServerSession, MCPState],
135 | trace_id: Annotated[str, Field(description='The trace ID to link to.')],
136 | ) -> str:
137 | """Creates a link to help the user to view the trace in the Logfire UI."""
138 | logfire_client = ctx.request_context.lifespan_context.logfire_client
139 | response = await logfire_client.client.get('/api/read-token-info')
140 | read_token_info = cast(ReadTokenInfo, response.json())
141 | organization_name = read_token_info['organization_name']
142 | project_name = read_token_info['project_name']
143 |
144 | url = logfire_client.client.base_url
145 | url = url.join(f'{organization_name}/{project_name}')
146 | url = url.copy_add_param('q', f"trace_id='{trace_id}'")
147 | return str(url)
148 |
149 |
150 | def app_factory(logfire_read_token: str, logfire_base_url: str | None = None) -> FastMCP:
151 | @asynccontextmanager
152 | async def lifespan(server: FastMCP) -> AsyncIterator[MCPState]:
153 | # print to stderr so we this message doesn't get read by the MCP client
154 | headers = {'User-Agent': f'logfire-mcp/{__version__}'}
155 | async with AsyncLogfireQueryClient(logfire_read_token, headers=headers, base_url=logfire_base_url) as client:
156 | yield MCPState(logfire_client=client)
157 |
158 | mcp = FastMCP('Logfire', lifespan=lifespan)
159 | mcp.tool()(find_exceptions_in_file)
160 | mcp.tool()(arbitrary_query)
161 | mcp.tool()(logfire_link)
162 | mcp.tool()(schema_reference)
163 |
164 | return mcp
165 |
166 |
167 | class ReadTokenInfo(TypedDict):
168 | token_id: str
169 | organization_id: str
170 | project_id: str
171 | organization_name: str
172 | project_name: str
173 |
```