# Directory Structure
```
├── .gitignore
├── .python-version
├── Dockerfile
├── LICENSE
├── pyproject.toml
├── README.md
├── smithery.yaml
├── src
│ └── kagi_mcp
│ ├── __init__.py
│ ├── config.py
│ ├── kagi.py
│ ├── server.py
│ └── test_api.py
└── uv.lock
```
# Files
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
```
1 | 3.12
2 |
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | # Python-generated files
2 | __pycache__/
3 | *.py[oc]
4 | build/
5 | dist/
6 | wheels/
7 | *.egg-info
8 |
9 | # Virtual environments
10 | .venv
11 |
12 | # Env vars
13 | .env
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | # Kagi MCP server
2 |
3 | [](https://smithery.ai/client/kagi-mcp)
4 | MCP server that allows to search web using Kagi API
5 |
6 | <a href="https://glama.ai/mcp/servers/rl6yu8g58l"><img width="380" height="200" src="https://glama.ai/mcp/servers/rl6yu8g58l/badge" alt="Kagi Server MCP server" /></a>
7 |
8 | ## Components
9 |
10 | ### Resources
11 |
12 | The server implements calls of [API methods](https://help.kagi.com/kagi/api/overview.html):
13 | - fastgpt
14 | - enrich/web
15 | - enrich/news
16 |
17 | ### Prompts
18 |
19 | The server provides doesn't provide any prompts:
20 |
21 | ### Tools
22 |
23 | The server implements several tools:
24 | - ask_fastgpt to search web and find an answer
25 | - enrich_web to enrich model context with web content
26 | - enrich_news to enrich model context with latest news
27 |
28 | ## Configuration
29 |
30 | ## Quickstart
31 |
32 | ### Install
33 |
34 | ### Installing via Smithery
35 |
36 | To install the Kagi MCP server for Claude Desktop automatically via [Smithery](https://smithery.ai/client/kagi-mcp):
37 |
38 | ```bash
39 | npx -y @smithery/cli install kagi-mcp --client claude
40 | ```
41 |
42 | #### Claude Desktop
43 |
44 | On MacOS: `~/Library/Application\ Support/Claude/claude_desktop_config.json`
45 |
46 | <details>
47 | <summary>Development/Unpublished Servers Configuration</summary>
48 | ```
49 | "mcpServers": {
50 | "kagi-mcp": {
51 | "command": "uv",
52 | "args": [
53 | "--directory",
54 | "path_to_project",
55 | "run",
56 | "kagi-mcp"
57 | ],
58 | "env": {
59 | "KAGI_API_KEY": "YOUR API KEY"
60 | }
61 | }
62 | }
63 | ```
64 | </details>
65 |
66 | ## Development
67 |
68 | ### Building and Publishing
69 |
70 | To prepare the package for distribution:
71 |
72 | 1. Sync dependencies and update lockfile:
73 | ```bash
74 | uv sync
75 | ```
76 |
77 | 2. Build package distributions:
78 | ```bash
79 | uv build
80 | ```
81 |
82 | This will create source and wheel distributions in the `dist/` directory.
83 |
84 | 3. Publish to PyPI:
85 | ```bash
86 | uv publish
87 | ```
88 |
89 | Note: You'll need to set PyPI credentials via environment variables or command flags:
90 | - Token: `--token` or `UV_PUBLISH_TOKEN`
91 | - Or username/password: `--username`/`UV_PUBLISH_USERNAME` and `--password`/`UV_PUBLISH_PASSWORD`
92 |
93 | ### Debugging
94 |
95 | ```bash
96 | npx @modelcontextprotocol/inspector uv --directory path_to_project run kagi-mcp
97 | ```
98 |
```
--------------------------------------------------------------------------------
/src/kagi_mcp/__init__.py:
--------------------------------------------------------------------------------
```python
1 | from . import server
2 | import asyncio
3 |
4 | def main():
5 | """Main entry point for the package."""
6 | asyncio.run(server.main())
7 |
8 | # Optionally expose other important items at package level
9 | __all__ = ['main', 'server']
```
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
```toml
1 | [project]
2 | name = "kagi-mcp"
3 | version = "0.1.0"
4 | description = "MCP server that allows to search web using Kagi API"
5 | readme = "README.md"
6 | requires-python = ">=3.12"
7 | dependencies = [
8 | "httpx>=0.28.1",
9 | "mcp>=1.1.1",
10 | "python-dotenv>=1.0.1",
11 | ]
12 |
13 | [[project.authors]]
14 | name = "Aleksey Pridachin"
15 | email = "[email protected]"
16 |
17 | [build-system]
18 | requires = [ "hatchling",]
19 | build-backend = "hatchling.build"
20 |
21 | [project.scripts]
22 | kagi-mcp = "kagi_mcp:main"
23 |
```
--------------------------------------------------------------------------------
/src/kagi_mcp/test_api.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 |
3 | from kagi_mcp.config import Config
4 | from kagi_mcp.kagi import ask_fastgpt, enrich_web, enrich_news
5 |
6 | config = Config()
7 |
8 | if __name__ == "__main__":
9 | ask = asyncio.run(ask_fastgpt("current price of NVDA stock on NYSE"))
10 | enrich_web = asyncio.run(enrich_web("NVDA stock on NYSE"))
11 | enrich_news = asyncio.run(enrich_news("NVDA stock on NYSE"))
12 | print(json.dumps(ask, indent=2))
13 | print(json.dumps(enrich_news, indent=2))
14 | print(json.dumps(enrich_web, indent=2))
15 |
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
2 |
3 | startCommand:
4 | type: stdio
5 | configSchema:
6 | # JSON Schema defining the configuration options for the MCP.
7 | type: object
8 | required:
9 | - kagiApiKey
10 | properties:
11 | kagiApiKey:
12 | type: string
13 | description: The API key for the Kagi Search server.
14 | commandFunction:
15 | # A function that produces the CLI command to start the MCP on stdio.
16 | |-
17 | (config) => ({command:'uv', args:['run', 'kagi-mcp'], env:{KAGI_API_KEY:config.kagiApiKey}})
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
1 | # Use the official Python image with uv pre-installed
2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
3 |
4 | # Set working directory
5 | WORKDIR /app
6 |
7 | # Copy necessary files for installation
8 | COPY pyproject.toml uv.lock README.md /app/
9 |
10 | # Install the project's dependencies using uv
11 | RUN --mount=type=cache,target=/root/.cache/uv \
12 | uv sync --frozen --no-install-project --no-dev --no-editable
13 |
14 | # Copy the project source code
15 | COPY src/ /app/src/
16 |
17 | # Install the project itself
18 | RUN --mount=type=cache,target=/root/.cache/uv \
19 | uv sync --frozen --no-dev --no-editable
20 |
21 | # Final stage
22 | FROM python:3.12-slim-bookworm
23 |
24 | WORKDIR /app
25 |
26 | # COPY --from=uv /root/.local /root/.local
27 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
28 |
29 | # Set PATH and PYTHONPATH
30 | ENV PATH="/app/.venv/bin:$PATH" \
31 | KAGI_API_KEY=YOUR_API_KEY_HERE
32 |
33 | # Command to run the server
34 | ENTRYPOINT ["/app/.venv/bin/kagi-mcp"]
```
--------------------------------------------------------------------------------
/src/kagi_mcp/config.py:
--------------------------------------------------------------------------------
```python
1 | import os
2 | from dotenv import load_dotenv
3 |
4 | load_dotenv()
5 |
6 |
7 | class Config:
8 | """
9 | Configuration class to manage environment variables.
10 | """
11 |
12 | def __init__(self):
13 | self.KAGI_API_KEY = self.get_env_var("KAGI_API_KEY", required=True)
14 | self.LOG_LEVEL = self.get_env_var("LOG_LEVEL", default="INFO")
15 | self.KAGI_URL = self.get_env_var("LOG_LEVEL", default="https://kagi.com/api/v0/")
16 |
17 | @staticmethod
18 | def get_env_var(var_name, default=None, required=False):
19 | """
20 | Fetches an environment variable.
21 |
22 | :param var_name: Name of the environment variable
23 | :param default: Default value if the variable is not set (optional)
24 | :param required: Whether the variable is required. Raises an error if not found (default: False)
25 | :return: The value of the environment variable
26 | """
27 | value = os.getenv(var_name, default)
28 | if required and not value:
29 | raise ValueError(
30 | f"{var_name} environment variable is required but not set."
31 | )
32 | return value
33 |
```
--------------------------------------------------------------------------------
/src/kagi_mcp/kagi.py:
--------------------------------------------------------------------------------
```python
1 | from urllib.parse import urljoin
2 |
3 | import httpx
4 | from mcp import McpError, types as types
5 |
6 | from kagi_mcp.config import Config
7 |
8 | config = Config()
9 |
10 |
11 | async def _call_kagi(
12 | method: str, url: str, params: dict | None = None, json: dict | None = None
13 | ) -> httpx.Response:
14 | try:
15 | async with httpx.AsyncClient() as client:
16 | response = await client.request(
17 | method=method,
18 | url=urljoin(config.KAGI_URL, url),
19 | headers={
20 | "Authorization": f"Bot {config.KAGI_API_KEY}",
21 | "Content-Type": "application/json",
22 | },
23 | params=params,
24 | json=json,
25 | )
26 | response.raise_for_status()
27 |
28 | return response
29 |
30 | except httpx.HTTPError as e:
31 | raise McpError(types.INTERNAL_ERROR, f"Kagi API error: {str(e)}")
32 |
33 |
34 | async def ask_fastgpt(query: str) -> str:
35 | response = await _call_kagi(method="POST", url="fastgpt", json={"query": query})
36 | data = response.json()
37 | return data["data"]["output"]
38 |
39 |
40 | async def enrich_web(query: str) -> str:
41 | response = await _call_kagi(
42 | method="GET",
43 | url="enrich/web",
44 | params={"q": query},
45 | )
46 | return response.text
47 |
48 |
49 | async def enrich_news(query: str) -> str:
50 | response = await _call_kagi(
51 | method="GET",
52 | url="enrich/news",
53 | params={"q": query},
54 | )
55 | return response.text
56 |
```
--------------------------------------------------------------------------------
/src/kagi_mcp/server.py:
--------------------------------------------------------------------------------
```python
1 | import logging
2 |
3 | import mcp.server.stdio
4 | import mcp.types as types
5 | from mcp.server import NotificationOptions, Server
6 | from mcp.server.models import InitializationOptions
7 |
8 | from kagi_mcp.config import Config
9 | from kagi_mcp.kagi import ask_fastgpt, enrich_web, enrich_news
10 |
11 | config = Config()
12 | logging.basicConfig(level=config.LOG_LEVEL)
13 | logger = logging.getLogger("kagi-mcp")
14 | server = Server("kagi-mcp")
15 |
16 |
17 | @server.list_tools()
18 | async def handle_list_tools() -> list[types.Tool]:
19 | pattern = r"^\s*(\b\w+\b\s*){1,3}$"
20 | return [
21 | types.Tool(
22 | name="ask_fastgpt",
23 | description="Ask fastgpt to search web and give an answer with references",
24 | inputSchema={
25 | "type": "object",
26 | "properties": {
27 | "query": {"type": "string"},
28 | },
29 | "required": ["query"],
30 | },
31 | ),
32 | types.Tool(
33 | name="enrich_web",
34 | description="Enrich context with web content focused on general, non-commercial web content.",
35 | inputSchema={
36 | "type": "object",
37 | "properties": {
38 | "query": {"type": "string", "pattern": pattern},
39 | },
40 | "required": ["query"],
41 | },
42 | ),
43 | types.Tool(
44 | name="enrich_news",
45 | description="Enrich context with web content focused on non-commercial news and discussions.",
46 | inputSchema={
47 | "type": "object",
48 | "properties": {
49 | "query": {"type": "string", "pattern": pattern},
50 | },
51 | "required": ["query"],
52 | },
53 | ),
54 | ]
55 |
56 |
57 | @server.call_tool()
58 | async def handle_call_tool(
59 | name: str,
60 | arguments: dict,
61 | ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
62 | tools = {
63 | "ask_fastgpt": ask_fastgpt,
64 | "enrich_web": enrich_web,
65 | "enrich_news": enrich_news,
66 | }
67 | if name not in tools.keys():
68 | raise ValueError(f"Unknown tool: {name}")
69 |
70 | if not arguments:
71 | raise ValueError("Missing arguments")
72 |
73 | query = arguments.get("query")
74 |
75 | if not query:
76 | raise ValueError("Missing query")
77 |
78 | tool_function = tools[name]
79 | result = await tool_function(query)
80 |
81 | return [
82 | types.TextContent(
83 | type="text",
84 | text=result,
85 | )
86 | ]
87 |
88 |
89 | async def main():
90 | async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
91 | await server.run(
92 | read_stream,
93 | write_stream,
94 | InitializationOptions(
95 | server_name="kagi-mcp",
96 | server_version="0.1.0",
97 | capabilities=server.get_capabilities(
98 | notification_options=NotificationOptions(),
99 | experimental_capabilities={},
100 | ),
101 | ),
102 | )
103 |
```