# Directory Structure
```
├── .gitignore
├── .python-version
├── Dockerfile
├── LICENSE
├── pyproject.toml
├── README.md
├── smithery.yaml
├── src
│ └── kagi_mcp
│ ├── __init__.py
│ ├── config.py
│ ├── kagi.py
│ ├── server.py
│ └── test_api.py
└── uv.lock
```
# Files
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
```
3.12
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv
# Env vars
.env
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
# Kagi MCP server
[](https://smithery.ai/client/kagi-mcp)
MCP server that allows to search web using Kagi API
<a href="https://glama.ai/mcp/servers/rl6yu8g58l"><img width="380" height="200" src="https://glama.ai/mcp/servers/rl6yu8g58l/badge" alt="Kagi Server MCP server" /></a>
## Components
### Resources
The server implements calls of [API methods](https://help.kagi.com/kagi/api/overview.html):
- fastgpt
- enrich/web
- enrich/news
### Prompts
The server provides doesn't provide any prompts:
### Tools
The server implements several tools:
- ask_fastgpt to search web and find an answer
- enrich_web to enrich model context with web content
- enrich_news to enrich model context with latest news
## Configuration
## Quickstart
### Install
### Installing via Smithery
To install the Kagi MCP server for Claude Desktop automatically via [Smithery](https://smithery.ai/client/kagi-mcp):
```bash
npx -y @smithery/cli install kagi-mcp --client claude
```
#### Claude Desktop
On MacOS: `~/Library/Application\ Support/Claude/claude_desktop_config.json`
<details>
<summary>Development/Unpublished Servers Configuration</summary>
```
"mcpServers": {
"kagi-mcp": {
"command": "uv",
"args": [
"--directory",
"path_to_project",
"run",
"kagi-mcp"
],
"env": {
"KAGI_API_KEY": "YOUR API KEY"
}
}
}
```
</details>
## Development
### Building and Publishing
To prepare the package for distribution:
1. Sync dependencies and update lockfile:
```bash
uv sync
```
2. Build package distributions:
```bash
uv build
```
This will create source and wheel distributions in the `dist/` directory.
3. Publish to PyPI:
```bash
uv publish
```
Note: You'll need to set PyPI credentials via environment variables or command flags:
- Token: `--token` or `UV_PUBLISH_TOKEN`
- Or username/password: `--username`/`UV_PUBLISH_USERNAME` and `--password`/`UV_PUBLISH_PASSWORD`
### Debugging
```bash
npx @modelcontextprotocol/inspector uv --directory path_to_project run kagi-mcp
```
```
--------------------------------------------------------------------------------
/src/kagi_mcp/__init__.py:
--------------------------------------------------------------------------------
```python
from . import server
import asyncio
def main():
"""Main entry point for the package."""
asyncio.run(server.main())
# Optionally expose other important items at package level
__all__ = ['main', 'server']
```
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
```toml
[project]
name = "kagi-mcp"
version = "0.1.0"
description = "MCP server that allows to search web using Kagi API"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"httpx>=0.28.1",
"mcp>=1.1.1",
"python-dotenv>=1.0.1",
]
[[project.authors]]
name = "Aleksey Pridachin"
email = "[email protected]"
[build-system]
requires = [ "hatchling",]
build-backend = "hatchling.build"
[project.scripts]
kagi-mcp = "kagi_mcp:main"
```
--------------------------------------------------------------------------------
/src/kagi_mcp/test_api.py:
--------------------------------------------------------------------------------
```python
import asyncio
from kagi_mcp.config import Config
from kagi_mcp.kagi import ask_fastgpt, enrich_web, enrich_news
config = Config()
if __name__ == "__main__":
ask = asyncio.run(ask_fastgpt("current price of NVDA stock on NYSE"))
enrich_web = asyncio.run(enrich_web("NVDA stock on NYSE"))
enrich_news = asyncio.run(enrich_news("NVDA stock on NYSE"))
print(json.dumps(ask, indent=2))
print(json.dumps(enrich_news, indent=2))
print(json.dumps(enrich_web, indent=2))
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
startCommand:
type: stdio
configSchema:
# JSON Schema defining the configuration options for the MCP.
type: object
required:
- kagiApiKey
properties:
kagiApiKey:
type: string
description: The API key for the Kagi Search server.
commandFunction:
# A function that produces the CLI command to start the MCP on stdio.
|-
(config) => ({command:'uv', args:['run', 'kagi-mcp'], env:{KAGI_API_KEY:config.kagiApiKey}})
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
# Use the official Python image with uv pre-installed
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
# Set working directory
WORKDIR /app
# Copy necessary files for installation
COPY pyproject.toml uv.lock README.md /app/
# Install the project's dependencies using uv
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-install-project --no-dev --no-editable
# Copy the project source code
COPY src/ /app/src/
# Install the project itself
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-dev --no-editable
# Final stage
FROM python:3.12-slim-bookworm
WORKDIR /app
# COPY --from=uv /root/.local /root/.local
COPY --from=uv --chown=app:app /app/.venv /app/.venv
# Set PATH and PYTHONPATH
ENV PATH="/app/.venv/bin:$PATH" \
KAGI_API_KEY=YOUR_API_KEY_HERE
# Command to run the server
ENTRYPOINT ["/app/.venv/bin/kagi-mcp"]
```
--------------------------------------------------------------------------------
/src/kagi_mcp/config.py:
--------------------------------------------------------------------------------
```python
import os
from dotenv import load_dotenv
load_dotenv()
class Config:
"""
Configuration class to manage environment variables.
"""
def __init__(self):
self.KAGI_API_KEY = self.get_env_var("KAGI_API_KEY", required=True)
self.LOG_LEVEL = self.get_env_var("LOG_LEVEL", default="INFO")
self.KAGI_URL = self.get_env_var("LOG_LEVEL", default="https://kagi.com/api/v0/")
@staticmethod
def get_env_var(var_name, default=None, required=False):
"""
Fetches an environment variable.
:param var_name: Name of the environment variable
:param default: Default value if the variable is not set (optional)
:param required: Whether the variable is required. Raises an error if not found (default: False)
:return: The value of the environment variable
"""
value = os.getenv(var_name, default)
if required and not value:
raise ValueError(
f"{var_name} environment variable is required but not set."
)
return value
```
--------------------------------------------------------------------------------
/src/kagi_mcp/kagi.py:
--------------------------------------------------------------------------------
```python
from urllib.parse import urljoin
import httpx
from mcp import McpError, types as types
from kagi_mcp.config import Config
config = Config()
async def _call_kagi(
method: str, url: str, params: dict | None = None, json: dict | None = None
) -> httpx.Response:
try:
async with httpx.AsyncClient() as client:
response = await client.request(
method=method,
url=urljoin(config.KAGI_URL, url),
headers={
"Authorization": f"Bot {config.KAGI_API_KEY}",
"Content-Type": "application/json",
},
params=params,
json=json,
)
response.raise_for_status()
return response
except httpx.HTTPError as e:
raise McpError(types.INTERNAL_ERROR, f"Kagi API error: {str(e)}")
async def ask_fastgpt(query: str) -> str:
response = await _call_kagi(method="POST", url="fastgpt", json={"query": query})
data = response.json()
return data["data"]["output"]
async def enrich_web(query: str) -> str:
response = await _call_kagi(
method="GET",
url="enrich/web",
params={"q": query},
)
return response.text
async def enrich_news(query: str) -> str:
response = await _call_kagi(
method="GET",
url="enrich/news",
params={"q": query},
)
return response.text
```
--------------------------------------------------------------------------------
/src/kagi_mcp/server.py:
--------------------------------------------------------------------------------
```python
import logging
import mcp.server.stdio
import mcp.types as types
from mcp.server import NotificationOptions, Server
from mcp.server.models import InitializationOptions
from kagi_mcp.config import Config
from kagi_mcp.kagi import ask_fastgpt, enrich_web, enrich_news
config = Config()
logging.basicConfig(level=config.LOG_LEVEL)
logger = logging.getLogger("kagi-mcp")
server = Server("kagi-mcp")
@server.list_tools()
async def handle_list_tools() -> list[types.Tool]:
pattern = r"^\s*(\b\w+\b\s*){1,3}$"
return [
types.Tool(
name="ask_fastgpt",
description="Ask fastgpt to search web and give an answer with references",
inputSchema={
"type": "object",
"properties": {
"query": {"type": "string"},
},
"required": ["query"],
},
),
types.Tool(
name="enrich_web",
description="Enrich context with web content focused on general, non-commercial web content.",
inputSchema={
"type": "object",
"properties": {
"query": {"type": "string", "pattern": pattern},
},
"required": ["query"],
},
),
types.Tool(
name="enrich_news",
description="Enrich context with web content focused on non-commercial news and discussions.",
inputSchema={
"type": "object",
"properties": {
"query": {"type": "string", "pattern": pattern},
},
"required": ["query"],
},
),
]
@server.call_tool()
async def handle_call_tool(
name: str,
arguments: dict,
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
tools = {
"ask_fastgpt": ask_fastgpt,
"enrich_web": enrich_web,
"enrich_news": enrich_news,
}
if name not in tools.keys():
raise ValueError(f"Unknown tool: {name}")
if not arguments:
raise ValueError("Missing arguments")
query = arguments.get("query")
if not query:
raise ValueError("Missing query")
tool_function = tools[name]
result = await tool_function(query)
return [
types.TextContent(
type="text",
text=result,
)
]
async def main():
async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
await server.run(
read_stream,
write_stream,
InitializationOptions(
server_name="kagi-mcp",
server_version="0.1.0",
capabilities=server.get_capabilities(
notification_options=NotificationOptions(),
experimental_capabilities={},
),
),
)
```