#
tokens: 18573/50000 32/32 files
lines: on (toggle) GitHub
raw markdown copy reset
# Directory Structure

```
├── .env.example
├── .github
│   └── workflows
│       ├── ruff.yml
│       └── workflow.yml
├── .gitignore
├── .python-version
├── CHANGELOG.md
├── chatmcp.yaml
├── Dockerfile
├── docs
│   └── images
│       └── logo.png
├── LICENSE
├── pyproject.toml
├── README.md
├── src
│   └── mcp_server
│       ├── __init__.py
│       ├── application.py
│       ├── config
│       │   ├── __init__.py
│       │   └── config.py
│       ├── consts
│       │   ├── __init__.py
│       │   └── consts.py
│       ├── core
│       │   ├── __init__.py
│       │   ├── cdn
│       │   │   ├── __init__.py
│       │   │   ├── cdn.py
│       │   │   └── tools.py
│       │   ├── media_processing
│       │   │   ├── __init__.py
│       │   │   ├── processing.py
│       │   │   ├── tools.py
│       │   │   └── utils.py
│       │   ├── storage
│       │   │   ├── __init__.py
│       │   │   ├── resource.py
│       │   │   ├── storage.py
│       │   │   └── tools.py
│       │   └── version
│       │       ├── __init__.py
│       │       ├── tools.py
│       │       └── version.py
│       ├── README.md
│       ├── resource
│       │   ├── __init__.py
│       │   └── resource.py
│       ├── server.py
│       └── tools
│           ├── __init__.py
│           └── tools.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------

```
1 | 3.12
2 | 
```

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
 1 | # Python-generated files
 2 | __pycache__/
 3 | *.py[oc]
 4 | build/
 5 | dist/
 6 | wheels/
 7 | *.egg-info
 8 | 
 9 | # Virtual environments
10 | .venv
11 | .env
12 | .env.dora
13 | .env.kodo
14 | 
15 | src/mcp_server/test.py
```

--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------

```
1 | # S3/Kodo 认证信息
2 | QINIU_ACCESS_KEY=your_access_key
3 | QINIU_SECRET_KEY=your_secret_key
4 | QINIU_REGION_NAME=your_region
5 | QINIU_ENDPOINT_URL=endpoint_url # eg:https://s3.your_region.qiniucs.com
6 | QINIU_BUCKETS=bucket1,bucket2,bucket3
7 | 
```

--------------------------------------------------------------------------------
/.github/workflows/workflow.yml:
--------------------------------------------------------------------------------

```yaml
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/config/__init__.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/consts/__init__.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/resource/__init__.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/tools/__init__.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/version/version.py:
--------------------------------------------------------------------------------

```python
1 | 
2 | VERSION = '1.2.3'
```

--------------------------------------------------------------------------------
/src/mcp_server/consts/consts.py:
--------------------------------------------------------------------------------

```python
1 | LOGGER_NAME = "qiniu-mcp"
2 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/version/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | from .tools import register_tools
 2 | from ...config import config
 3 | 
 4 | 
 5 | def load(cfg: config.Config):
 6 |     register_tools()
 7 | 
 8 | 
 9 | __all__ = ["load"]
10 | 
```

--------------------------------------------------------------------------------
/.github/workflows/ruff.yml:
--------------------------------------------------------------------------------

```yaml
1 | name: Ruff
2 | on: [push, pull_request]
3 | jobs:
4 |   ruff:
5 |     runs-on: ubuntu-latest
6 |     steps:
7 |       - uses: actions/checkout@v4
8 |       - uses: astral-sh/ruff-action@v3
9 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/cdn/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | from .tools import register_tools
 2 | from ...config import config
 3 | from .cdn import CDNService
 4 | 
 5 | 
 6 | def load(cfg: config.Config):
 7 |     cdn = CDNService(cfg)
 8 |     register_tools(cdn)
 9 | 
10 | 
11 | __all__ = [
12 |     "load",
13 | ]
14 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/media_processing/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | from . import processing
 2 | from .tools import register_tools
 3 | from ...config import config
 4 | 
 5 | 
 6 | def load(cfg: config.Config):
 7 |     cli = processing.MediaProcessingService(cfg)
 8 |     register_tools(cfg, cli)
 9 | 
10 | 
11 | __all__ = [
12 |     "load",
13 | ]
14 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | import logging
 2 | 
 3 | from .consts import consts
 4 | from .server import main
 5 | 
 6 | # Configure logging
 7 | logging.basicConfig(level=logging.ERROR)
 8 | logger = logging.getLogger(consts.LOGGER_NAME)
 9 | logger.info("Initializing MCP server package")
10 | 
11 | __all__ = ["main"]
12 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/storage/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | from .storage import StorageService
 2 | from .tools import register_tools
 3 | from .resource import register_resource_provider
 4 | from ...config import config
 5 | 
 6 | 
 7 | def load(cfg: config.Config):
 8 |     storage = StorageService(cfg)
 9 |     register_tools(storage)
10 |     register_resource_provider(storage)
11 | 
12 | 
13 | __all__ = ["load"]
14 | 
```

--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------

```toml
 1 | [project]
 2 | name = "qiniu-mcp-server"
 3 | version = "1.2.3"
 4 | description = "A MCP server project of Qiniu."
 5 | requires-python = ">=3.12"
 6 | authors = [
 7 |     { name = "Qiniu", email = "[email protected]" },
 8 | ]
 9 | keywords = ["qiniu", "mcp", "llm"]
10 | dependencies = [
11 |     "aioboto3>=13.2.0",
12 |     "fastjsonschema>=2.21.1",
13 |     "httpx>=0.28.1",
14 |     "mcp[cli]>=1.0.0",
15 |     "openai>=1.66.3",
16 |     "pip>=25.0.1",
17 |     "python-dotenv>=1.0.1",
18 |     "qiniu>=7.16.0",
19 | ]
20 | 
21 | [build-system]
22 | requires = [ "hatchling",]
23 | build-backend = "hatchling.build"
24 | 
25 | [project.scripts]
26 | qiniu-mcp-server = "mcp_server:main"
27 | 
28 | [tool.hatch.build.targets.wheel]
29 | packages = ["src/mcp_server"]
```

--------------------------------------------------------------------------------
/src/mcp_server/core/version/tools.py:
--------------------------------------------------------------------------------

```python
 1 | 
 2 | from mcp import types
 3 | 
 4 | from . import version
 5 | from ...tools import tools
 6 | 
 7 | 
 8 | class _ToolImpl:
 9 |     def __init__(self):
10 |         pass
11 | 
12 |     @tools.tool_meta(
13 |         types.Tool(
14 |             name="version",
15 |             description="qiniu mcp server version info.",
16 |             inputSchema={
17 |                 "type": "object",
18 |                 "required": [],
19 |             }
20 |         )
21 |     )
22 |     def version(self, **kwargs) -> list[types.TextContent]:
23 |         return [types.TextContent(type="text", text=version.VERSION)]
24 | 
25 | def register_tools():
26 |     tool_impl = _ToolImpl()
27 |     tools.auto_register_tools(
28 |         [
29 |             tool_impl.version,
30 |         ]
31 |     )
```

--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------

```dockerfile
 1 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
 2 | 
 3 | WORKDIR /app
 4 | 
 5 | ENV UV_COMPILE_BYTECODE=1
 6 | ENV UV_LINK_MODE=copy
 7 | 
 8 | RUN --mount=type=cache,target=/root/.cache/uv \
 9 |     --mount=type=bind,source=uv.lock,target=uv.lock \
10 |     --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
11 |     uv sync --frozen --no-install-project --no-dev --no-editable
12 | 
13 | ADD . /app
14 | RUN --mount=type=cache,target=/root/.cache/uv \
15 |     uv sync --frozen --no-dev --no-editable
16 | 
17 | FROM python:3.12-slim-bookworm
18 | 
19 | WORKDIR /app
20 | 
21 | #COPY --from=uv /root/.local /root/.local
22 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
23 | 
24 | # Place executables in the environment at the front of the path
25 | ENV PATH="/app/.venv/bin:$PATH"
26 | 
27 | # when running the container, add --db-path and a bind mount to the host's db file
28 | ENTRYPOINT ["qiniu-mcp-server"]
```

--------------------------------------------------------------------------------
/chatmcp.yaml:
--------------------------------------------------------------------------------

```yaml
 1 | params:
 2 |   type: object
 3 |   properties:
 4 |     QINIU_ACCESS_KEY:
 5 |       type: string
 6 |       description: The access key for your Qiniu account.
 7 |     QINIU_SECRET_KEY:
 8 |       type: string
 9 |       description: The secret key for your Qiniu account.
10 |     QINIU_REGION_NAME:
11 |       type: string
12 |       description: The region name for your config of Qiniu buckets.
13 |     QINIU_ENDPOINT_URL:
14 |       type: string
15 |       description: The endpoint URL for your config of Qiniu buckets. eg:https://s3.your_region.qiniucs.com.
16 |     QINIU_BUCKETS:
17 |       type: string
18 |       description: The buckets of Qiniu, If there are multiple extra items, separate them with commas. eg:bucket1,bucket2.
19 |   required:
20 |     - QINIU_ACCESS_KEY
21 |     - QINIU_SECRET_KEY
22 |     - QINIU_REGION_NAME
23 |     - QINIU_ENDPOINT_URL
24 |     - QINIU_BUCKETS
25 | 
26 | uvx:
27 |   command:
28 |     | uvx qiniu-mcp-server
29 |   config:
30 |     | {
31 |         "mcpServers": {
32 |           "qiniu-mcp-server": {
33 |             "command": "uvx",
34 |             "args": [
35 |               "qiniu-mcp-server"
36 |             ],
37 |             "env": {
38 |               "QINIU_ACCESS_KEY": "YOUR QINIU ACCESS KEY",
39 |               "QINIU_SECRET_KEY": "YOUR QINIU SECRET KEY",
40 |               "QINIU_REGION_NAME": "YOUR QINIU REGION NAME",
41 |               "QINIU_ENDPOINT_URL": "YOUR QINIU ENDPOINT URL"
42 |             }
43 |           }
44 |         }
45 |       }
```

--------------------------------------------------------------------------------
/src/mcp_server/application.py:
--------------------------------------------------------------------------------

```python
 1 | import logging
 2 | from contextlib import aclosing
 3 | 
 4 | import mcp.types as types
 5 | from mcp.types import EmptyResult
 6 | 
 7 | from mcp import LoggingLevel
 8 | from mcp.server.lowlevel import Server
 9 | from mcp.types import Tool, AnyUrl
10 | 
11 | from . import core
12 | from .consts import consts
13 | from .resource import resource
14 | from .tools import tools
15 | 
16 | 
17 | logger = logging.getLogger(consts.LOGGER_NAME)
18 | 
19 | core.load()
20 | server = Server("qiniu-mcp-server")
21 | 
22 | 
23 | @server.set_logging_level()
24 | async def set_logging_level(level: LoggingLevel) -> EmptyResult:
25 |     logger.setLevel(level.lower())
26 |     await server.request_context.session.send_log_message(
27 |         level="warning", data=f"Log level set to {level}", logger=consts.LOGGER_NAME
28 |     )
29 |     return EmptyResult()
30 | 
31 | 
32 | @server.list_resources()
33 | async def list_resources(**kwargs) -> list[types.Resource]:
34 |     resource_list = []
35 |     async with aclosing(resource.list_resources(**kwargs)) as results:
36 |         async for result in results:
37 |             resource_list.append(result)
38 |     return resource_list
39 | 
40 | 
41 | @server.read_resource()
42 | async def read_resource(uri: AnyUrl) -> str:
43 |     return await resource.read_resource(uri)
44 | 
45 | 
46 | @server.list_tools()
47 | async def handle_list_tools() -> list[Tool]:
48 |     return tools.all_tools()
49 | 
50 | 
51 | @server.call_tool()
52 | async def call_tool(name: str, arguments: dict):
53 |     return await tools.call_tool(name, arguments)
54 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/media_processing/processing.py:
--------------------------------------------------------------------------------

```python
 1 | import qiniu
 2 | 
 3 | from ...config import config
 4 | 
 5 | 
 6 | class MediaProcessingService:
 7 |     def __init__(self, cfg: config.Config):
 8 |         self.cfg = cfg
 9 |         self.auth = qiniu.Auth(cfg.access_key, cfg.secret_key)
10 | 
11 |     def execute_fop(
12 |         self,
13 |         bucket: str,
14 |         key: str,
15 |         fops: str = None,
16 |         persistent_type: int = None,
17 |         workflow_template_id: str = None,
18 |         pipeline: str = None,
19 |         notify_url: str = None,
20 |     ) -> dict:
21 |         """
22 |         执行持久化处理
23 |         :param bucket:
24 |         :param key:
25 |         :param fops:
26 |         :param persistent_type:
27 |         :param workflow_template_id:
28 |         :param pipeline:
29 |         :param notify_url:
30 |         :return: 返回字典 dict
31 |             获取 persistentId key 为 persistentId
32 |         """
33 | 
34 |         persistent_fop = qiniu.PersistentFop(
35 |             auth=self.auth, bucket=bucket, pipeline=pipeline, notify_url=notify_url
36 |         )
37 |         result, info = persistent_fop.execute(
38 |             key=key,
39 |             fops=fops,
40 |             persistent_type=persistent_type,
41 |             workflow_template_id=workflow_template_id,
42 |         )
43 |         return result
44 | 
45 |     def get_fop_status(self, persistent_id: str) -> dict:
46 |         """
47 |         查询 fop 执行状态
48 |         :param persistent_id:
49 |         :return: dict
50 |             持久化处理的状态,详见 https://developer.qiniu.com/dora/1294/persistent-processing-status-query-prefop
51 |         """
52 |         persistent_fop = qiniu.PersistentFop(auth=self.auth, bucket="")
53 |         result, info = persistent_fop.get_status(persistent_id=persistent_id)
54 |         return result
55 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/resource/resource.py:
--------------------------------------------------------------------------------

```python
 1 | import logging
 2 | from abc import abstractmethod
 3 | from typing import Dict, AsyncGenerator, Iterable
 4 | 
 5 | from mcp import types
 6 | from mcp.server.lowlevel.helper_types import ReadResourceContents
 7 | 
 8 | from ..consts import consts
 9 | 
10 | logger = logging.getLogger(consts.LOGGER_NAME)
11 | 
12 | ResourceContents = str | bytes | Iterable[ReadResourceContents]
13 | 
14 | class ResourceProvider:
15 |     def __init__(self, scheme: str):
16 |         self.scheme = scheme
17 | 
18 |     @abstractmethod
19 |     async def list_resources(self, **kwargs) -> list[types.Resource]:
20 |         pass
21 | 
22 |     @abstractmethod
23 |     async def read_resource(self, uri: types.AnyUrl, **kwargs) -> ResourceContents:
24 |         pass
25 | 
26 | 
27 | _all_resource_providers: Dict[str, ResourceProvider] = {}
28 | 
29 | 
30 | async def list_resources(**kwargs) -> AsyncGenerator[types.Resource, None]:
31 |     if len(_all_resource_providers) == 0:
32 |         return
33 | 
34 |     for provider in _all_resource_providers.values():
35 |         resources = await provider.list_resources(**kwargs)
36 |         for resource in resources:
37 |             yield resource
38 |     return
39 | 
40 | 
41 | async def read_resource(uri: types.AnyUrl, **kwargs) -> ResourceContents:
42 |     if len(_all_resource_providers) == 0:
43 |         return ""
44 | 
45 |     provider = _all_resource_providers.get(uri.scheme)
46 |     return await provider.read_resource(uri=uri, **kwargs)
47 | 
48 | 
49 | def register_resource_provider(provider: ResourceProvider):
50 |     """注册工具,禁止重复名称"""
51 |     name = provider.scheme
52 |     if name in _all_resource_providers:
53 |         raise ValueError(f"Resource Provider {name} already registered")
54 |     _all_resource_providers[name] = provider
55 | 
56 | 
57 | __all__ = [
58 |     "ResourceContents",
59 |     "ResourceProvider",
60 |     "list_resources",
61 |     "read_resource",
62 |     "register_resource_provider",
63 | ]
64 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/server.py:
--------------------------------------------------------------------------------

```python
 1 | import asyncio
 2 | import logging
 3 | 
 4 | import anyio
 5 | import click
 6 | 
 7 | from . import application
 8 | from .consts import consts
 9 | 
10 | logger = logging.getLogger(consts.LOGGER_NAME)
11 | logger.info("Starting MCP server")
12 | 
13 | SAMPLE_RESOURCES = {
14 |     "greeting": "Hello! This is a MCP Server for Qiniu.",
15 |     "help": "This server provides a few resources and tools for Qiniu.",
16 |     "about": "This is the MCP server implementation.",
17 | }
18 | 
19 | 
20 | @click.command()
21 | @click.option("--port", default=8000, help="Port to listen on for SSE")
22 | @click.option(
23 |     "--transport",
24 |     type=click.Choice(["stdio", "sse"]),
25 |     default="stdio",
26 |     help="Transport type",
27 | )
28 | def main(port: int, transport: str) -> int:
29 |     app = application.server
30 | 
31 |     if transport == "sse":
32 |         from mcp.server.sse import SseServerTransport
33 |         from starlette.applications import Starlette
34 |         from starlette.routing import Mount, Route
35 | 
36 |         sse = SseServerTransport("/messages/")
37 | 
38 |         async def handle_sse(request):
39 |             async with sse.connect_sse(
40 |                 request.scope, request.receive, request._send
41 |             ) as streams:
42 |                 await app.run(
43 |                     streams[0], streams[1], app.create_initialization_options()
44 |                 )
45 | 
46 |         starlette_app = Starlette(
47 |             debug=True,
48 |             routes=[
49 |                 Route("/sse", endpoint=handle_sse),
50 |                 Mount("/messages/", app=sse.handle_post_message),
51 |             ],
52 |         )
53 | 
54 |         import uvicorn
55 | 
56 |         uvicorn.run(starlette_app, host="0.0.0.0", port=port)
57 |     else:
58 |         from mcp.server.stdio import stdio_server
59 | 
60 |         async def arun():
61 |             async with stdio_server() as streams:
62 |                 await app.run(
63 |                     streams[0], streams[1], app.create_initialization_options()
64 |                 )
65 | 
66 |         anyio.run(arun)
67 | 
68 |     return 0
69 | 
70 | 
71 | if __name__ == "__main__":
72 |     asyncio.run(main())
73 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/config/config.py:
--------------------------------------------------------------------------------

```python
 1 | import logging
 2 | import os
 3 | from typing import List
 4 | from attr import dataclass
 5 | from dotenv import load_dotenv
 6 | 
 7 | from ..consts import consts
 8 | 
 9 | _CONFIG_ENV_KEY_ACCESS_KEY = "QINIU_ACCESS_KEY"
10 | _CONFIG_ENV_KEY_SECRET_KEY = "QINIU_SECRET_KEY"
11 | _CONFIG_ENV_KEY_ENDPOINT_URL = "QINIU_ENDPOINT_URL"
12 | _CONFIG_ENV_KEY_REGION_NAME = "QINIU_REGION_NAME"
13 | _CONFIG_ENV_KEY_BUCKETS = "QINIU_BUCKETS"
14 | 
15 | logger = logging.getLogger(consts.LOGGER_NAME)
16 | 
17 | # Load environment variables at package initialization
18 | load_dotenv()
19 | 
20 | 
21 | @dataclass
22 | class Config:
23 |     access_key: str
24 |     secret_key: str
25 |     endpoint_url: str
26 |     region_name: str
27 |     buckets: List[str]
28 | 
29 | 
30 | def load_config() -> Config:
31 |     config = Config(
32 |         access_key=os.getenv(_CONFIG_ENV_KEY_ACCESS_KEY),
33 |         secret_key=os.getenv(_CONFIG_ENV_KEY_SECRET_KEY),
34 |         endpoint_url=os.getenv(_CONFIG_ENV_KEY_ENDPOINT_URL),
35 |         region_name=os.getenv(_CONFIG_ENV_KEY_REGION_NAME),
36 |         buckets=_get_configured_buckets_from_env(),
37 |     )
38 | 
39 |     if not config.access_key or len(config.access_key) == 0:
40 |         config.access_key = "YOUR_QINIU_ACCESS_KEY"
41 |     if not config.secret_key or len(config.access_key) == 0:
42 |         config.secret_key = "YOUR_QINIU_SECRET_KEY"
43 |     if not config.endpoint_url or len(config.access_key) == 0:
44 |         config.endpoint_url = "YOUR_QINIU_ENDPOINT_URL"
45 |     if not config.region_name or len(config.access_key) == 0:
46 |         config.region_name = "YOUR_QINIU_REGION_NAME"
47 | 
48 |     logger.info(f"Configured   access_key: {config.access_key}")
49 |     logger.info(f"Configured endpoint_url: {config.endpoint_url}")
50 |     logger.info(f"Configured  region_name: {config.region_name}")
51 |     logger.info(f"Configured      buckets: {config.buckets}")
52 |     return config
53 | 
54 | 
55 | def _get_configured_buckets_from_env() -> List[str]:
56 |     bucket_list = os.getenv(_CONFIG_ENV_KEY_BUCKETS)
57 |     if bucket_list:
58 |         buckets = [b.strip() for b in bucket_list.split(",")]
59 |         return buckets
60 |     else:
61 |         return []
62 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/cdn/cdn.py:
--------------------------------------------------------------------------------

```python
 1 | import logging
 2 | 
 3 | from qiniu import CdnManager, Auth
 4 | from qiniu.http import ResponseInfo
 5 | from typing import List, Optional, Dict
 6 | from pydantic import BaseModel
 7 | from dataclasses import dataclass
 8 | 
 9 | from ...consts import consts
10 | from ...config import config
11 | 
12 | logger = logging.getLogger(consts.LOGGER_NAME)
13 | 
14 | 
15 | @dataclass
16 | class PrefetchUrlsResult(BaseModel):
17 |     code: Optional[int] = None
18 |     error: Optional[str] = None
19 |     requestId: Optional[str] = None
20 |     invalidUrls: Optional[List[str]] = None
21 |     quotaDay: Optional[int] = None
22 |     surplusDay: Optional[int] = None
23 | 
24 | 
25 | @dataclass
26 | class RefreshResult(BaseModel):
27 |     code: Optional[int] = None
28 |     error: Optional[str] = None
29 |     requestId: Optional[str] = None
30 |     taskIds: Optional[Dict[str, str]] = None
31 |     invalidUrls: Optional[List[str]] = None
32 |     invalidDirs: Optional[List[str]] = None
33 |     urlQuotaDay: Optional[int] = None
34 |     urlSurplusDay: Optional[int] = None
35 |     dirQuotaDay: Optional[int] = None
36 |     dirSurplusDay: Optional[int] = None
37 | 
38 | 
39 | def _raise_if_resp_error(resp: ResponseInfo):
40 |     if resp.ok():
41 |         return
42 |     raise RuntimeError(f"qiniu response error: {str(resp)}")
43 | 
44 | 
45 | class CDNService:
46 |     def __init__(self, cfg: config.Config):
47 |         auth = Auth(access_key=cfg.access_key, secret_key=cfg.secret_key)
48 |         self._cdn_manager = CdnManager(auth)
49 | 
50 |     def prefetch_urls(self, urls: List[str] = []) -> PrefetchUrlsResult:
51 |         if not urls:
52 |             raise ValueError("urls is empty")
53 |         info, resp = self._cdn_manager.prefetch_urls(urls)
54 |         _raise_if_resp_error(resp)
55 |         return PrefetchUrlsResult.model_validate(info)
56 | 
57 |     def refresh(self, urls: List[str] = [], dirs: List[str] = []) -> RefreshResult:
58 |         if not urls and not dirs:
59 |             raise ValueError("urls and dirs cannot be empty")
60 |         info, resp = self._cdn_manager.refresh_urls_and_dirs(urls, dirs)
61 |         _raise_if_resp_error(resp)
62 |         return RefreshResult.model_validate(info)
63 | 
64 | 
65 | __all__ = [
66 |     "PrefetchUrlsResult",
67 |     "RefreshResult",
68 |     "CDNService",
69 | ]
70 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/tools/tools.py:
--------------------------------------------------------------------------------

```python
  1 | import functools
  2 | import inspect
  3 | import asyncio
  4 | import logging
  5 | import fastjsonschema
  6 | 
  7 | from typing import List, Dict, Callable, Optional, Union, Awaitable
  8 | from dataclasses import dataclass
  9 | from mcp import types
 10 | 
 11 | from .. import consts
 12 | 
 13 | logger = logging.getLogger(consts.LOGGER_NAME)
 14 | 
 15 | ToolResult = list[types.TextContent | types.ImageContent | types.EmbeddedResource]
 16 | ToolFunc = Callable[..., ToolResult]
 17 | AsyncToolFunc = Callable[..., Awaitable[ToolResult]]
 18 | 
 19 | 
 20 | @dataclass
 21 | class _ToolEntry:
 22 |     meta: types.Tool
 23 |     func: Optional[ToolFunc]
 24 |     async_func: Optional[AsyncToolFunc]
 25 |     input_validator: Optional[Callable[..., None]]
 26 | 
 27 | 
 28 | # 初始化全局工具字典
 29 | _all_tools: Dict[str, _ToolEntry] = {}
 30 | 
 31 | 
 32 | def all_tools() -> List[types.Tool]:
 33 |     """获取所有工具"""
 34 |     if not _all_tools:
 35 |         raise ValueError("No tools registered")
 36 |     return list(map(lambda x: x.meta, _all_tools.values()))
 37 | 
 38 | 
 39 | def register_tool(
 40 |         meta: types.Tool,
 41 |         func: Union[ToolFunc, AsyncToolFunc],
 42 | ) -> None:
 43 |     """注册工具,禁止重复名称"""
 44 |     name = meta.name
 45 |     if name in _all_tools:
 46 |         raise ValueError(f"Tool {name} already registered")
 47 | 
 48 |     # 判断是否为异步函数
 49 |     if inspect.iscoroutinefunction(func):
 50 |         async_func = func
 51 |         func = None
 52 |     else:
 53 |         async_func = None
 54 |     entry = _ToolEntry(
 55 |         meta=meta,
 56 |         func=func,
 57 |         async_func=async_func,
 58 |         input_validator=fastjsonschema.compile(meta.inputSchema),
 59 |     )
 60 |     _all_tools[name] = entry
 61 | 
 62 | 
 63 | def tool_meta(meta: types.Tool):
 64 |     def _add_metadata(**kwargs):
 65 |         def decorator(func):
 66 |             if inspect.iscoroutinefunction(func):
 67 | 
 68 |                 @functools.wraps(func)
 69 |                 async def async_wrapper(*args, **kwargs):
 70 |                     return await func(*args, **kwargs)
 71 | 
 72 |                 wrapper = async_wrapper
 73 |             else:
 74 | 
 75 |                 @functools.wraps(func)
 76 |                 def sync_wrapper(*args, **kwargs):
 77 |                     return func(*args, **kwargs)
 78 | 
 79 |                 wrapper = sync_wrapper
 80 |             for key, value in kwargs.items():
 81 |                 setattr(wrapper, key, value)
 82 |             return wrapper
 83 | 
 84 |         return decorator
 85 | 
 86 |     return _add_metadata(tool_meta=meta)
 87 | 
 88 | 
 89 | def auto_register_tools(func_list: list[Union[ToolFunc, AsyncToolFunc]]):
 90 |     """尝试自动注册带有 tool_meta 的工具"""
 91 |     for func in func_list:
 92 |         if hasattr(func, "tool_meta"):
 93 |             meta = getattr(func, "tool_meta")
 94 |             register_tool(meta=meta, func=func)
 95 |         else:
 96 |             raise ValueError("func must have tool_meta attribute")
 97 | 
 98 | 
 99 | async def call_tool(name: str, arguments: dict) -> ToolResult:
100 |     """执行工具并处理异常"""
101 | 
102 |     # 工具存在性校验
103 |     if (tool_entry := _all_tools.get(name)) is None:
104 |         raise ValueError(f"Tool {name} not found")
105 | 
106 |     # 工具输入参数校验
107 |     # 把 None 移除否则校验不过
108 |     arguments = {k: v for k, v in arguments.items() if v is not None}
109 |     try:
110 |         tool_entry.input_validator(arguments)
111 |     except fastjsonschema.JsonSchemaException as e:
112 |         raise ValueError(f"Invalid arguments for tool {name}: {e}")
113 | 
114 |     try:
115 |         if tool_entry.async_func is not None:
116 |             # 异步函数直接执行
117 |             result = await tool_entry.async_func(**arguments)
118 |             return result
119 |         elif tool_entry.func is not None:
120 |             # 同步函数需要到线程池中转化为异步函数执行
121 |             loop = asyncio.get_event_loop()
122 |             result = await loop.run_in_executor(
123 |                 executor=None,  # 使用全局线程池
124 |                 func=lambda: tool_entry.func(**arguments),
125 |             )
126 |             return result
127 |         else:
128 |             raise ValueError(f"Unexpected tool entry: {tool_entry}")
129 |     except Exception as e:
130 |         raise RuntimeError(f"Tool {name} execution error: {str(e)}") from e
131 | 
132 | 
133 | # 明确导出接口
134 | __all__ = [
135 |     "all_tools",
136 |     "register_tool",
137 |     "call_tool",
138 |     "tool_meta",
139 |     "auto_register_tools",
140 | ]
141 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/storage/resource.py:
--------------------------------------------------------------------------------

```python
  1 | import asyncio
  2 | import logging
  3 | import base64
  4 | 
  5 | from mcp import types
  6 | from urllib.parse import unquote
  7 | 
  8 | from mcp.server.lowlevel.helper_types import ReadResourceContents
  9 | 
 10 | from .storage import StorageService
 11 | from ...consts import consts
 12 | from ...resource import resource
 13 | from ...resource.resource import ResourceContents
 14 | 
 15 | logger = logging.getLogger(consts.LOGGER_NAME)
 16 | 
 17 | 
 18 | class _ResourceProvider(resource.ResourceProvider):
 19 |     def __init__(self, storage: StorageService):
 20 |         super().__init__("s3")
 21 |         self.storage = storage
 22 | 
 23 |     async def list_resources(
 24 |         self, prefix: str = "", max_keys: int = 20, **kwargs
 25 |     ) -> list[types.Resource]:
 26 |         """
 27 |         List S3 buckets and their contents as resources with pagination
 28 |         Args:
 29 |             prefix: Prefix listing after this bucket name
 30 |             max_keys: Returns the maximum number of keys (up to 100), default 20
 31 |         """
 32 |         resources = []
 33 |         logger.debug("Starting to list resources")
 34 |         logger.debug(f"Configured buckets: {self.storage.config.buckets}")
 35 | 
 36 |         try:
 37 |             # Get limited number of buckets
 38 |             buckets = await self.storage.list_buckets(prefix)
 39 | 
 40 |             # limit concurrent operations
 41 |             async def process_bucket(bucket):
 42 |                 bucket_name = bucket["Name"]
 43 |                 logger.debug(f"Processing bucket: {bucket_name}")
 44 | 
 45 |                 try:
 46 |                     # List objects in the bucket with a reasonable limit
 47 |                     objects = await self.storage.list_objects(
 48 |                         bucket_name, max_keys=max_keys
 49 |                     )
 50 | 
 51 |                     for obj in objects:
 52 |                         if "Key" in obj and not obj["Key"].endswith("/"):
 53 |                             object_key = obj["Key"]
 54 |                             if self.storage.is_markdown_file(object_key):
 55 |                                 mime_type = "text/markdown"
 56 |                             elif self.storage.is_image_file(object_key):
 57 |                                 mime_type = "image/png"
 58 |                             else:
 59 |                                 mime_type = "text/plain"
 60 | 
 61 |                             resource_entry = types.Resource(
 62 |                                 uri=f"s3://{bucket_name}/{object_key}",
 63 |                                 name=object_key,
 64 |                                 mimeType=mime_type,
 65 |                                 description=str(obj),
 66 |                             )
 67 |                             resources.append(resource_entry)
 68 |                             logger.debug(f"Added resource: {resource_entry.uri}")
 69 | 
 70 |                 except Exception as e:
 71 |                     logger.error(
 72 |                         f"Error listing objects in bucket {bucket_name}: {str(e)}"
 73 |                     )
 74 | 
 75 |             # Use semaphore to limit concurrent bucket processing
 76 |             semaphore = asyncio.Semaphore(3)  # Limit concurrent bucket processing
 77 | 
 78 |             async def process_bucket_with_semaphore(bucket):
 79 |                 async with semaphore:
 80 |                     await process_bucket(bucket)
 81 | 
 82 |             # Process buckets concurrently
 83 |             await asyncio.gather(
 84 |                 *[process_bucket_with_semaphore(bucket) for bucket in buckets]
 85 |             )
 86 | 
 87 |         except Exception as e:
 88 |             logger.error(f"Error listing buckets: {str(e)}")
 89 |             raise
 90 | 
 91 |         logger.info(f"Returning {len(resources)} resources")
 92 |         return resources
 93 | 
 94 |     async def read_resource(self, uri: types.AnyUrl, **kwargs) -> ResourceContents:
 95 |         """
 96 |         Read content from an S3 resource and return structured response
 97 | 
 98 |         Returns:
 99 |             Dict containing 'contents' list with uri, mimeType, and text for each resource
100 |         """
101 |         uri_str = str(uri)
102 |         logger.debug(f"Reading resource: {uri_str}")
103 | 
104 |         if not uri_str.startswith("s3://"):
105 |             raise ValueError("Invalid S3 URI")
106 | 
107 |         # Parse the S3 URI
108 |         path = uri_str[5:]  # Remove "s3://"
109 |         path = unquote(path)  # Decode URL-encoded characters
110 |         parts = path.split("/", 1)
111 | 
112 |         if len(parts) < 2:
113 |             raise ValueError("Invalid S3 URI format")
114 | 
115 |         bucket = parts[0]
116 |         key = parts[1]
117 | 
118 |         response = await self.storage.get_object(bucket, key)
119 |         file_content = response["Body"]
120 | 
121 |         content_type = response.get("ContentType", "application/octet-stream")
122 |         # 根据内容类型返回不同的响应
123 |         if content_type.startswith("image/"):
124 |             file_content = base64.b64encode(file_content).decode("utf-8")
125 | 
126 |         return [ReadResourceContents(mime_type=content_type, content=file_content)]
127 | 
128 | 
129 | def register_resource_provider(storage: StorageService):
130 |     resource_provider = _ResourceProvider(storage)
131 |     resource.register_resource_provider(resource_provider)
132 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/cdn/tools.py:
--------------------------------------------------------------------------------

```python
  1 | from .cdn import CDNService
  2 | from ...consts import consts
  3 | from ...tools import tools
  4 | import logging
  5 | from mcp import types
  6 | from typing import Optional, List
  7 | 
  8 | logger = logging.getLogger(consts.LOGGER_NAME)
  9 | 
 10 | 
 11 | def _build_base_list(
 12 |     code: Optional[int],
 13 |     error: Optional[str],
 14 |     request_id: Optional[str],
 15 | ) -> List[str]:
 16 |     rets = []
 17 |     if code:
 18 |         rets.append(f"Status Code: {code}")
 19 |     if error:
 20 |         rets.append(f"Message: {error}")
 21 |     if request_id:
 22 |         rets.append(f"RequestID: {request_id}")
 23 |     return rets
 24 | 
 25 | 
 26 | class _ToolImpl:
 27 |     def __init__(self, cdn: CDNService):
 28 |         self._cdn = cdn
 29 | 
 30 |     @tools.tool_meta(
 31 |         types.Tool(
 32 |             name="cdn_prefetch_urls",
 33 |             description="Newly added resources are proactively retrieved by the CDN and stored on its cache nodes in advance. Users simply submit the resource URLs, and the CDN automatically triggers the prefetch process.",
 34 |             inputSchema={
 35 |                 "type": "object",
 36 |                 "additionalProperties": False,
 37 |                 "properties": {
 38 |                     "urls": {
 39 |                         "type": "array",
 40 |                         "description": "List of individual URLs to prefetch (max 60 items). Must be full URLs with protocol, e.g. 'http://example.com/file.zip'",
 41 |                         "items": {
 42 |                             "type": "string",
 43 |                             "format": "uri",
 44 |                             "pattern": "^https?://",
 45 |                             "examples": [
 46 |                                 "https://cdn.example.com/images/photo.jpg",
 47 |                                 "http://static.example.com/downloads/app.exe",
 48 |                             ],
 49 |                         },
 50 |                         "maxItems": 60,
 51 |                         "minItems": 1,
 52 |                     }
 53 |                 },
 54 |                 "required": ["urls"],
 55 |             },
 56 |         )
 57 |     )
 58 |     def prefetch_urls(self, **kwargs) -> list[types.TextContent]:
 59 |         ret = self._cdn.prefetch_urls(**kwargs)
 60 | 
 61 |         rets = _build_base_list(ret.code, ret.error, ret.requestId)
 62 |         if ret.invalidUrls:
 63 |             rets.append(f"Invalid URLs: {ret.invalidUrls}")
 64 |         if ret.code // 100 == 2:
 65 |             if ret.quotaDay is not None:
 66 |                 rets.append(f"Today's prefetch quota: {ret.quotaDay}")
 67 |             if ret.surplusDay is not None:
 68 |                 rets.append(f"Today's remaining quota: {ret.surplusDay}")
 69 | 
 70 |         return [
 71 |             types.TextContent(
 72 |                 type="text",
 73 |                 text="\n".join(rets),
 74 |             )
 75 |         ]
 76 | 
 77 |     @tools.tool_meta(
 78 |         types.Tool(
 79 |             name="cdn_refresh",
 80 |             description="This function marks resources cached on CDN nodes as expired. When users access these resources again, the CDN nodes will fetch the latest version from the origin server and store them anew.",
 81 |             inputSchema={
 82 |                 "type": "object",
 83 |                 "additionalProperties": False,  # 不允许出现未定义的属性
 84 |                 "properties": {
 85 |                     "urls": {
 86 |                         "type": "array",
 87 |                         "items": {
 88 |                             "type": "string",
 89 |                             "format": "uri",
 90 |                             "pattern": "^https?://",  # 匹配http://或https://开头的URL
 91 |                             "examples": ["http://bar.foo.com/index.html"],
 92 |                         },
 93 |                         "maxItems": 60,
 94 |                         "description": "List of exact URLs to refresh (max 60 items). Must be full URLs with protocol, e.g. 'http://example.com/path/page.html'",
 95 |                     },
 96 |                     "dirs": {
 97 |                         "type": "array",
 98 |                         "items": {
 99 |                             "type": "string",
100 |                             "pattern": "^https?://.*/(\\*|$)",  # 匹配以http://或https://开头的URL,并以/或者以/*结尾的字符串
101 |                             "examples": [
102 |                                 "http://bar.foo.com/dir/",
103 |                                 "http://bar.foo.com/images/*",
104 |                             ],
105 |                         },
106 |                         "maxItems": 10,
107 |                         "description": "List of directory patterns to refresh (max 10 items). Must end with '/' or '/*' to indicate directory scope",
108 |                     },
109 |                 }
110 |             },
111 |         )
112 |     )
113 |     def refresh(self, **kwargs) -> list[types.TextContent]:
114 |         ret = self._cdn.refresh(**kwargs)
115 |         rets = _build_base_list(ret.code, ret.error, ret.requestId)
116 |         if ret.taskIds is not None:
117 |             # 这个可能暂时用不到
118 |             pass
119 |         if ret.invalidUrls:
120 |             rets.append(f"Invalid URLs list: {ret.invalidUrls}")
121 |         if ret.invalidDirs:
122 |             rets.append(f"Invalid dirs: {ret.invalidDirs}")
123 | 
124 |         if ret.code // 100 == 2:
125 |             if ret.urlQuotaDay is not None:
126 |                 rets.append(f"Today's URL refresh quota: {ret.urlQuotaDay}")
127 |             if ret.urlSurplusDay is not None:
128 |                 rets.append(f"Today's remaining URL refresh quota: {ret.urlSurplusDay}")
129 |             if ret.dirQuotaDay is not None:
130 |                 rets.append(f"Today's directory refresh quota: {ret.dirQuotaDay}")
131 |             if ret.dirSurplusDay is not None:
132 |                 rets.append(
133 |                     f"Today's remaining directory refresh quota: {ret.dirSurplusDay}"
134 |                 )
135 |         return [
136 |             types.TextContent(
137 |                 type="text",
138 |                 text="\n".join(rets),
139 |             )
140 |         ]
141 | 
142 | 
143 | def register_tools(cdn: CDNService):
144 |     tool_impl = _ToolImpl(cdn)
145 |     tools.auto_register_tools(
146 |         [
147 |             tool_impl.refresh,
148 |             tool_impl.prefetch_urls,
149 |         ]
150 |     )
151 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/storage/storage.py:
--------------------------------------------------------------------------------

```python
  1 | import aioboto3
  2 | import logging
  3 | import qiniu
  4 | 
  5 | from typing import List, Dict, Any, Optional
  6 | from botocore.config import Config as S3Config
  7 | 
  8 | from ...config import config
  9 | from ...consts import consts
 10 | 
 11 | logger = logging.getLogger(consts.LOGGER_NAME)
 12 | 
 13 | 
 14 | class StorageService:
 15 |     def __init__(self, cfg: config.Config = None):
 16 |         # Configure boto3 with retries and timeouts
 17 |         self.s3_config = S3Config(
 18 |             retries=dict(max_attempts=2, mode="adaptive"),
 19 |             connect_timeout=30,
 20 |             read_timeout=60,
 21 |             max_pool_connections=50,
 22 |         )
 23 |         self.config = cfg
 24 |         self.s3_session = aioboto3.Session()
 25 |         self.auth = qiniu.Auth(cfg.access_key, cfg.secret_key)
 26 |         self.bucket_manager = qiniu.BucketManager(self.auth, preferred_scheme="https")
 27 | 
 28 |     def get_object_url(
 29 |             self, bucket: str, key: str, disable_ssl: bool = False, expires: int = 3600
 30 |     ) -> list[dict[str:Any]]:
 31 |         # 获取下载域名
 32 |         domains_getter = getattr(self.bucket_manager, "_BucketManager__uc_do_with_retrier")
 33 |         domains_list, domain_response = domains_getter('/v3/domains?tbl={0}'.format(bucket))
 34 |         if domain_response.status_code != 200:
 35 |             raise Exception(
 36 |                 f"get bucket domain error:{domain_response.exception} reqId:{domain_response.req_id}"
 37 |             )
 38 | 
 39 |         if not domains_list or len(domains_list) == 0:
 40 |             raise Exception(
 41 |                 f"get bucket domain error:domains_list is empty reqId:{domain_response.req_id}"
 42 |             )
 43 | 
 44 |         http_schema = "https" if not disable_ssl else "http"
 45 |         object_public_urls = []
 46 |         for domain in domains_list:
 47 |             # 被冻结
 48 |             freeze_types = domain.get("freeze_types")
 49 |             if freeze_types is not None:
 50 |                 continue
 51 | 
 52 |             domain_url = domain.get("domain")
 53 |             if domain_url is None:
 54 |                 continue
 55 | 
 56 |             object_public_urls.append({
 57 |                 "object_url": f"{http_schema}://{domain_url}/{key}",
 58 |                 "domain_type": "cdn" if domain.get("domaintype") is None or domain.get("domaintype") == 0 else "origin"
 59 |             })
 60 | 
 61 |         object_urls = []
 62 |         bucket_info, bucket_info_response = self.bucket_manager.bucket_info(bucket)
 63 |         if domain_response.status_code != 200:
 64 |             raise Exception(
 65 |                 f"get bucket domain error:{bucket_info_response.exception} reqId:{bucket_info_response.req_id}"
 66 |             )
 67 |         if bucket_info["private"] != 0:
 68 |             for url_info in object_public_urls:
 69 |                 public_url = url_info.get("object_url")
 70 |                 if public_url is None:
 71 |                     continue
 72 |                 url_info["object_url"] = self.auth.private_download_url(public_url, expires=expires)
 73 |                 object_urls.append(url_info)
 74 |         else:
 75 |             for url_info in object_public_urls:
 76 |                 object_urls.append(url_info)
 77 |         return object_urls
 78 | 
 79 |     async def list_buckets(self, prefix: Optional[str] = None) -> List[dict]:
 80 |         if not self.config.buckets or len(self.config.buckets) == 0:
 81 |             return []
 82 | 
 83 |         max_buckets = 50
 84 | 
 85 |         async with self.s3_session.client(
 86 |                 "s3",
 87 |                 aws_access_key_id=self.config.access_key,
 88 |                 aws_secret_access_key=self.config.secret_key,
 89 |                 endpoint_url=self.config.endpoint_url,
 90 |                 region_name=self.config.region_name,
 91 |         ) as s3:
 92 |             # If buckets are configured, only return those
 93 |             response = await s3.list_buckets()
 94 |             all_buckets = response.get("Buckets", [])
 95 | 
 96 |             configured_bucket_list = [
 97 |                 bucket
 98 |                 for bucket in all_buckets
 99 |                 if bucket["Name"] in self.config.buckets
100 |             ]
101 | 
102 |             if prefix:
103 |                 configured_bucket_list = [
104 |                     b for b in configured_bucket_list if b["Name"] > prefix
105 |                 ]
106 | 
107 |             return configured_bucket_list[:max_buckets]
108 | 
109 |     async def list_objects(
110 |             self, bucket: str, prefix: str = "", max_keys: int = 20, start_after: str = ""
111 |     ) -> List[dict]:
112 |         if self.config.buckets and bucket not in self.config.buckets:
113 |             logger.warning(f"Bucket {bucket} not in configured bucket list")
114 |             return []
115 | 
116 |         if isinstance(max_keys, str):
117 |             max_keys = int(max_keys)
118 | 
119 |         if max_keys > 100:
120 |             max_keys = 100
121 | 
122 |         async with self.s3_session.client(
123 |                 "s3",
124 |                 aws_access_key_id=self.config.access_key,
125 |                 aws_secret_access_key=self.config.secret_key,
126 |                 endpoint_url=self.config.endpoint_url,
127 |                 region_name=self.config.region_name,
128 |         ) as s3:
129 |             response = await s3.list_objects_v2(
130 |                 Bucket=bucket,
131 |                 Prefix=prefix,
132 |                 MaxKeys=max_keys,
133 |                 StartAfter=start_after,
134 |             )
135 |             return response.get("Contents", [])
136 | 
137 |     async def get_object(self, bucket: str, key: str) -> Dict[str, Any]:
138 |         if self.config.buckets and bucket not in self.config.buckets:
139 |             logger.warning(f"Bucket {bucket} not in configured bucket list")
140 |             return {}
141 | 
142 |         async with self.s3_session.client(
143 |                 "s3",
144 |                 aws_access_key_id=self.config.access_key,
145 |                 aws_secret_access_key=self.config.secret_key,
146 |                 endpoint_url=self.config.endpoint_url,
147 |                 region_name=self.config.region_name,
148 |                 config=self.s3_config,
149 |         ) as s3:
150 |             # Get the object and its stream
151 |             response = await s3.get_object(Bucket=bucket, Key=key)
152 |             stream = response["Body"]
153 | 
154 |             # Read the entire stream in chunks
155 |             chunks = []
156 |             async for chunk in stream:
157 |                 chunks.append(chunk)
158 | 
159 |             # Replace the stream with the complete data
160 |             response["Body"] = b"".join(chunks)
161 |             return response
162 | 
163 |     def upload_text_data(self, bucket: str, key: str, data: str, overwrite: bool = False) -> list[dict[str:Any]]:
164 |         policy = {
165 |             "insertOnly": 1,
166 |         }
167 | 
168 |         if overwrite:
169 |             policy["insertOnly"] = 0
170 |             policy["scope"] = f"{bucket}:{key}"
171 | 
172 |         token = self.auth.upload_token(bucket=bucket, key=key, policy=policy)
173 |         ret, info = qiniu.put_data(up_token=token, key=key, data=bytes(data, encoding="utf-8"))
174 |         if info.status_code != 200:
175 |             raise Exception(f"Failed to upload object: {info}")
176 | 
177 |         return self.get_object_url(bucket, key)
178 | 
179 |     def upload_local_file(self, bucket: str, key: str, file_path: str, overwrite: bool = False) -> list[dict[str:Any]]:
180 |         policy = {
181 |             "insertOnly": 1,
182 |         }
183 | 
184 |         if overwrite:
185 |             policy["insertOnly"] = 0
186 |             policy["scope"] = f"{bucket}:{key}"
187 | 
188 |         token = self.auth.upload_token(bucket=bucket, key=key, policy=policy)
189 |         ret, info = qiniu.put_file(up_token=token, key=key, file_path=file_path)
190 |         if info.status_code != 200:
191 |             raise Exception(f"Failed to upload object: {info}")
192 | 
193 |         return self.get_object_url(bucket, key)
194 | 
195 |     def fetch_object(self, bucket: str, key: str, url: str):
196 |         ret, info = self.bucket_manager.fetch(url, bucket, key=key)
197 |         if info.status_code != 200:
198 |             raise Exception(f"Failed to fetch object: {info}")
199 | 
200 |         return self.get_object_url(bucket, key)
201 | 
202 |     def is_text_file(self, key: str) -> bool:
203 |         text_extensions = {
204 |             ".ini",
205 |             ".conf",
206 |             ".py",
207 |             ".js",
208 |             ".xml",
209 |             ".yml",
210 |             ".properties",
211 |             ".txt",
212 |             ".log",
213 |             ".json",
214 |             ".yaml",
215 |             ".md",
216 |             ".csv",
217 |             ".html",
218 |             ".css",
219 |             ".sh",
220 |             ".bash",
221 |             ".cfg",
222 |         }
223 |         return any(key.lower().endswith(ext) for ext in text_extensions)
224 | 
225 | 
226 |     def is_image_file(self, key: str) -> bool:
227 |         """Determine if a file is text-based by its extension"""
228 |         text_extensions = {
229 |             ".gif",
230 |             ".png",
231 |             ".jpg",
232 |             ".bmp",
233 |             ".jpeg",
234 |             ".tiff",
235 |             ".webp",
236 |             ".svg",
237 |         }
238 |         return any(key.lower().endswith(ext) for ext in text_extensions)
239 | 
240 | 
241 |     def is_markdown_file(self, key: str) -> bool:
242 |         text_extensions = {
243 |             ".md",
244 |         }
245 |         return any(key.lower().endswith(ext) for ext in text_extensions)
246 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/storage/tools.py:
--------------------------------------------------------------------------------

```python
  1 | import logging
  2 | import base64
  3 | 
  4 | from mcp import types
  5 | from mcp.types import ImageContent, TextContent
  6 | 
  7 | from .storage import StorageService
  8 | from ...consts import consts
  9 | from ...tools import tools
 10 | 
 11 | logger = logging.getLogger(consts.LOGGER_NAME)
 12 | 
 13 | _BUCKET_DESC = "Qiniu Cloud Storage bucket Name"
 14 | 
 15 | class _ToolImpl:
 16 |     def __init__(self, storage: StorageService):
 17 |         self.storage = storage
 18 | 
 19 |     @tools.tool_meta(
 20 |         types.Tool(
 21 |             name="list_buckets",
 22 |             description="Return the Bucket you configured based on the conditions.",
 23 |             inputSchema={
 24 |                 "type": "object",
 25 |                 "properties": {
 26 |                     "prefix": {
 27 |                         "type": "string",
 28 |                         "description": "Bucket prefix. The listed Buckets will be filtered based on this prefix, and only those matching the prefix will be output.",
 29 |                     },
 30 |                 },
 31 |                 "required": [],
 32 |             },
 33 |         )
 34 |     )
 35 |     async def list_buckets(self, **kwargs) -> list[types.TextContent]:
 36 |         buckets = await self.storage.list_buckets(**kwargs)
 37 |         return [types.TextContent(type="text", text=str(buckets))]
 38 | 
 39 |     @tools.tool_meta(
 40 |         types.Tool(
 41 |             name="list_objects",
 42 |             description="List objects in Qiniu Cloud, list a part each time, you can set start_after to continue listing, when the number of listed objects is less than max_keys, it means that all files are listed. start_after can be the key of the last file in the previous listing.",
 43 |             inputSchema={
 44 |                 "type": "object",
 45 |                 "properties": {
 46 |                     "bucket": {
 47 |                         "type": "string",
 48 |                         "description": _BUCKET_DESC,
 49 |                     },
 50 |                     "max_keys": {
 51 |                         "type": "integer",
 52 |                         "description": "Sets the max number of keys returned, default: 20",
 53 |                     },
 54 |                     "prefix": {
 55 |                         "type": "string",
 56 |                         "description": "Specify the prefix of the operation response key. Only keys that meet this prefix will be listed.",
 57 |                     },
 58 |                     "start_after": {
 59 |                         "type": "string",
 60 |                         "description": "start_after is where you want Qiniu Cloud to start listing from. Qiniu Cloud starts listing after this specified key. start_after can be any key in the bucket.",
 61 |                     },
 62 |                 },
 63 |                 "required": ["bucket"],
 64 |             },
 65 |         )
 66 |     )
 67 |     async def list_objects(self, **kwargs) -> list[types.TextContent]:
 68 |         objects = await self.storage.list_objects(**kwargs)
 69 |         return [types.TextContent(type="text", text=str(objects))]
 70 | 
 71 |     @tools.tool_meta(
 72 |         types.Tool(
 73 |             name="get_object",
 74 |             description="Get an object contents from Qiniu Cloud bucket. In the GetObject request, specify the full key name for the object.",
 75 |             inputSchema={
 76 |                 "type": "object",
 77 |                 "properties": {
 78 |                     "bucket": {
 79 |                         "type": "string",
 80 |                         "description": _BUCKET_DESC,
 81 |                     },
 82 |                     "key": {
 83 |                         "type": "string",
 84 |                         "description": "Key of the object to get.",
 85 |                     },
 86 |                 },
 87 |                 "required": ["bucket", "key"],
 88 |             },
 89 |         )
 90 |     )
 91 |     async def get_object(self, **kwargs) -> list[ImageContent] | list[TextContent]:
 92 |         response = await self.storage.get_object(**kwargs)
 93 |         file_content = response["Body"]
 94 |         content_type = response.get("ContentType", "application/octet-stream")
 95 | 
 96 |         # 根据内容类型返回不同的响应
 97 |         if content_type.startswith("image/"):
 98 |             base64_data = base64.b64encode(file_content).decode("utf-8")
 99 |             return [
100 |                 types.ImageContent(
101 |                     type="image", data=base64_data, mimeType=content_type
102 |                 )
103 |             ]
104 | 
105 |         if isinstance(file_content, bytes):
106 |             text_content = file_content.decode("utf-8")
107 |         else:
108 |             text_content = str(file_content)
109 |         return [types.TextContent(type="text", text=text_content)]
110 | 
111 |     @tools.tool_meta(
112 |         types.Tool(
113 |             name="upload_text_data",
114 |             description="Upload text data to Qiniu bucket.",
115 |             inputSchema={
116 |                 "type": "object",
117 |                 "properties": {
118 |                     "bucket": {
119 |                         "type": "string",
120 |                         "description": _BUCKET_DESC,
121 |                     },
122 |                     "key": {
123 |                         "type": "string",
124 |                         "description": "The key under which a file is saved in Qiniu Cloud Storage serves as the unique identifier for the file within that space, typically using the filename.",
125 |                     },
126 |                     "data": {
127 |                         "type": "string",
128 |                         "description": "The data to upload.",
129 |                     },
130 |                     "overwrite": {
131 |                         "type": "boolean",
132 |                         "description": "Whether to overwrite the existing object if it already exists.",
133 |                     },
134 |                 },
135 |                 "required": ["bucket", "key", "data"],
136 |             }
137 |         )
138 |     )
139 |     def upload_text_data(self, **kwargs) -> list[types.TextContent]:
140 |         urls = self.storage.upload_text_data(**kwargs)
141 |         return [types.TextContent(type="text", text=str(urls))]
142 | 
143 |     @tools.tool_meta(
144 |         types.Tool(
145 |             name="upload_local_file",
146 |             description="Upload a local file to Qiniu bucket.",
147 |             inputSchema={
148 |                 "type": "object",
149 |                 "properties": {
150 |                     "bucket": {
151 |                         "type": "string",
152 |                         "description": _BUCKET_DESC,
153 |                     },
154 |                     "key": {
155 |                         "type": "string",
156 |                         "description": "The key under which a file is saved in Qiniu Cloud Storage serves as the unique identifier for the file within that space, typically using the filename.",
157 |                     },
158 |                     "file_path": {
159 |                         "type": "string",
160 |                         "description": "The file path of file to upload.",
161 |                     },
162 |                     "overwrite": {
163 |                         "type": "boolean",
164 |                         "description": "Whether to overwrite the existing object if it already exists.",
165 |                     },
166 |                 },
167 |                 "required": ["bucket", "key", "file_path"],
168 |             }
169 |         )
170 |     )
171 |     def upload_local_file(self, **kwargs) -> list[types.TextContent]:
172 |         urls = self.storage.upload_local_file(**kwargs)
173 |         return [types.TextContent(type="text", text=str(urls))]
174 | 
175 |     @tools.tool_meta(
176 |         types.Tool(
177 |             name="fetch_object",
178 |             description="Fetch a http object to Qiniu bucket.",
179 |             inputSchema={
180 |                 "type": "object",
181 |                 "properties": {
182 |                     "bucket": {
183 |                         "type": "string",
184 |                         "description": _BUCKET_DESC,
185 |                     },
186 |                     "key": {
187 |                         "type": "string",
188 |                         "description": "The key under which a file is saved in Qiniu Cloud Storage serves as the unique identifier for the file within that space, typically using the filename.",
189 |                     },
190 |                     "url": {
191 |                         "type": "string",
192 |                         "description": "The URL of the object to fetch.",
193 |                     },
194 |                 },
195 |                 "required": ["bucket", "key", "url"],
196 |             }
197 |         )
198 |     )
199 |     def fetch_object(self, **kwargs) -> list[types.TextContent]:
200 |         urls = self.storage.fetch_object(**kwargs)
201 |         return [types.TextContent(type="text", text=str(urls))]
202 | 
203 |     @tools.tool_meta(
204 |         types.Tool(
205 |             name="get_object_url",
206 |             description="Get the file download URL, and note that the Bucket where the file is located must be bound to a domain name. If using Qiniu Cloud test domain, HTTPS access will not be available, and users need to make adjustments for this themselves.",
207 |             inputSchema={
208 |                 "type": "object",
209 |                 "properties": {
210 |                     "bucket": {
211 |                         "type": "string",
212 |                         "description": _BUCKET_DESC,
213 |                     },
214 |                     "key": {
215 |                         "type": "string",
216 |                         "description": "Key of the object to get.",
217 |                     },
218 |                     "disable_ssl": {
219 |                         "type": "boolean",
220 |                         "description": "Whether to disable SSL. By default, it is not disabled (HTTP protocol is used). If disabled, the HTTP protocol will be used.",
221 |                     },
222 |                     "expires": {
223 |                         "type": "integer",
224 |                         "description": "Token expiration time (in seconds) for download links. When the bucket is private, a signed Token is required to access file objects. Public buckets do not require Token signing.",
225 |                     },
226 |                 },
227 |                 "required": ["bucket", "key"],
228 |             },
229 |         )
230 |     )
231 |     def get_object_url(self, **kwargs) -> list[types.TextContent]:
232 |         urls = self.storage.get_object_url(**kwargs)
233 |         return [types.TextContent(type="text", text=str(urls))]
234 | 
235 | 
236 | def register_tools(storage: StorageService):
237 |     tool_impl = _ToolImpl(storage)
238 |     tools.auto_register_tools(
239 |         [
240 |             tool_impl.list_buckets,
241 |             tool_impl.list_objects,
242 |             tool_impl.get_object,
243 |             tool_impl.upload_text_data,
244 |             tool_impl.upload_local_file,
245 |             tool_impl.get_object_url,
246 |         ]
247 |     )
248 | 
```

--------------------------------------------------------------------------------
/src/mcp_server/core/media_processing/tools.py:
--------------------------------------------------------------------------------

```python
  1 | import logging
  2 | 
  3 | import qiniu
  4 | from mcp import types
  5 | 
  6 | from . import utils
  7 | from .processing import MediaProcessingService
  8 | from ...config import config
  9 | from ...consts import consts
 10 | from ...tools import tools
 11 | 
 12 | logger = logging.getLogger(consts.LOGGER_NAME)
 13 | 
 14 | _OBJECT_URL_DESC = "The URL of the image. This can be a URL obtained via the GetObjectURL tool or a URL generated by other Fop tools. Length Constraints: Minimum length of 1."
 15 | 
 16 | 
 17 | class _ToolImpl:
 18 |     def __init__(self, cfg: config.Config, cli: MediaProcessingService):
 19 |         self.auth = qiniu.Auth(cfg.access_key, cfg.secret_key)
 20 |         self.client = cli
 21 | 
 22 |     @tools.tool_meta(
 23 |         types.Tool(
 24 |             name="image_scale_by_percent",
 25 |             description="""Image scaling tool that resizes images based on a percentage and returns information about the scaled image.
 26 |             The information includes the object_url of the scaled image, which users can directly use for HTTP GET requests to retrieve the image content or open in a browser to view the file.
 27 |             The image must be stored in a Qiniu Cloud Bucket.
 28 |             Supported original image formats: psd, jpeg, png, gif, webp, tiff, bmp, avif, heic. Image width and height cannot exceed 30,000 pixels, and total pixels cannot exceed 150 million.
 29 |             """,
 30 |             inputSchema={
 31 |                 "type": "object",
 32 |                 "properties": {
 33 |                     "object_url": {
 34 |                         "type": "string", 
 35 |                         "description": _OBJECT_URL_DESC
 36 |                     },
 37 |                     "percent": {
 38 |                         "type": "integer",
 39 |                         "description": "Scaling percentage, range [1,999]. For example: 90 means the image width and height are reduced to 90% of the original; 200 means the width and height are enlarged to 200% of the original.",
 40 |                         "minimum": 1,
 41 |                         "maximum": 999
 42 |                     },
 43 |                 },
 44 |                 "required": ["object_url", "percent"],
 45 |             },
 46 |         )
 47 |     )
 48 |     def image_scale_by_percent(
 49 |             self, **kwargs
 50 |     ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
 51 |         object_url = kwargs.get("object_url", "")
 52 |         percent = kwargs.get("percent", "")
 53 |         if object_url is None or len(object_url) == 0:
 54 |             return [types.TextContent(type="text", text="object_url is required")]
 55 | 
 56 |         percent_int = int(percent)
 57 |         if percent_int < 1 or percent_int > 999:
 58 |             return [
 59 |                 types.TextContent(type="text", text="percent must be between 1 and 999")
 60 |             ]
 61 | 
 62 |         func = f"imageMogr2/thumbnail/!{percent}p"
 63 |         object_url = utils.url_add_processing_func(auth=self.auth, url=object_url, func=func)
 64 |         return [
 65 |             types.TextContent(
 66 |                 type="text",
 67 |                 text=str(
 68 |                     {
 69 |                         "object_url": object_url,
 70 |                     }
 71 |                 ),
 72 |             )
 73 |         ]
 74 | 
 75 |     @tools.tool_meta(
 76 |         types.Tool(
 77 |             name="image_scale_by_size",
 78 |             description="""Image scaling tool that resizes images based on a specified width or height and returns information about the scaled image.
 79 |             The information includes the object_url of the scaled image, which users can directly use for HTTP GET requests to retrieve the image content or open in a browser to view the file.
 80 |             The image must be stored in a Qiniu Cloud Bucket.
 81 |             Supported original image formats: psd, jpeg, png, gif, webp, tiff, bmp, avif, heic. Image width and height cannot exceed 30,000 pixels, and total pixels cannot exceed 150 million.
 82 |             """,
 83 |             inputSchema={
 84 |                 "type": "object",
 85 |                 "properties": {
 86 |                     "object_url": {
 87 |                         "type": "string", 
 88 |                         "description": _OBJECT_URL_DESC
 89 |                     },
 90 |                     "width": {
 91 |                         "type": "integer",
 92 |                         "description": "Specifies the width for image scaling. The image will be scaled to the specified width, and the height will be adjusted proportionally.",
 93 |                         "minimum": 1
 94 |                     },
 95 |                     "height": {
 96 |                         "type": "integer",
 97 |                         "description": "Specifies the height for image scaling. The image will be scaled to the specified height, and the width will be adjusted proportionally.",
 98 |                         "minimum": 1
 99 |                     },
100 |                 },
101 |                 "required": ["object_url"]
102 |             },
103 |         )
104 |     )
105 |     def image_scale_by_size(
106 |             self, **kwargs
107 |     ) -> list[types.TextContent]:
108 |         object_url = kwargs.get("object_url", "")
109 |         width = kwargs.get("width", "")
110 |         height = kwargs.get("height", "")
111 |         if object_url is None or len(object_url) == 0:
112 |             return [types.TextContent(type="text", text="object_url is required")]
113 | 
114 |         func = f"{width}x{height}"
115 |         if len(func) == 1:
116 |             return [
117 |                 types.TextContent(
118 |                     type="text", text="At least one width or height must be set"
119 |                 )
120 |             ]
121 | 
122 |         func = f"imageMogr2/thumbnail/{func}"
123 |         object_url = utils.url_add_processing_func(auth=self.auth, url=object_url, func=func)
124 |         return [
125 |             types.TextContent(
126 |                 type="text",
127 |                 text=str(
128 |                     {
129 |                         "object_url": object_url,
130 |                     }
131 |                 ),
132 |             )
133 |         ]
134 | 
135 |     @tools.tool_meta(
136 |         types.Tool(
137 |             name="image_round_corner",
138 |             description="""Image rounded corner tool that processes images based on width, height, and corner radius, returning information about the processed image.
139 |             If only radius_x or radius_y is set, the other parameter will be assigned the same value, meaning horizontal and vertical parameters will be identical.
140 |             The information includes the object_url of the processed image, which users can directly use for HTTP GET requests to retrieve the image content or open in a browser to view the file.
141 |             The image must be stored in a Qiniu Cloud Bucket.
142 |             Supported original image formats: psd, jpeg, png, gif, webp, tiff, bmp, avif, heic. Image width and height cannot exceed 30,000 pixels, and total pixels cannot exceed 150 million.
143 |             Corner radius supports pixels and percentages, but cannot be negative. Pixels are represented by numbers, e.g., 200 means 200px; percentages use !xp, e.g., !25p means 25%.""",
144 |             inputSchema={
145 |                 "type": "object",
146 |                 "properties": {
147 |                     "object_url": {
148 |                         "type": "string",
149 |                         "description": _OBJECT_URL_DESC
150 |                     },
151 |                     "radius_x": {
152 |                         "type": "string",
153 |                         "description": "Parameter for horizontal corner size. Can use: pixel values (e.g., 200 for 200px) or percentages (e.g., !25p for 25%), all non-negative values."
154 |                     },
155 |                     "radius_y": {
156 |                         "type": "string",
157 |                         "description": "Parameter for vertical corner size. Can use: pixel values (e.g., 200 for 200px) or percentages (e.g., !25p for 25%), all non-negative values."
158 |                     },
159 |                 },
160 |                 "required": ["object_url"],
161 |             }
162 |         )
163 |     )
164 |     def image_round_corner(self, **kwargs) -> list[types.TextContent]:
165 |         object_url = kwargs.get("object_url", "")
166 |         radius_x = kwargs.get("radius_x", "")
167 |         radius_y = kwargs.get("radius_y", "")
168 |         if object_url is None or len(object_url) == 0:
169 |             return [
170 |                 types.TextContent(
171 |                     type="text",
172 |                     text="object_url is required"
173 |                 )
174 |             ]
175 | 
176 |         if (radius_x is None or len(radius_x) == 0) and (radius_y is None or len(radius_y) == 0) is None:
177 |             return [
178 |                 types.TextContent(
179 |                     type="text",
180 |                     text="At least one of radius_x or radius_y must be set"
181 |                 )
182 |             ]
183 | 
184 |         if radius_x is None or len(radius_x) == 0:
185 |             radius_x = radius_y
186 |         elif radius_y is None or len(radius_y) == 0:
187 |             radius_y = radius_x
188 | 
189 |         func = f"roundPic/radiusx/{radius_x}/radiusy/{radius_y}"
190 |         object_url = utils.url_add_processing_func(auth=self.auth, url=object_url, func=func)
191 |         return [
192 |             types.TextContent(
193 |                 type="text",
194 |                 text=str({
195 |                     "object_url": object_url,
196 |                 })
197 |             )
198 |         ]
199 | 
200 |     @tools.tool_meta(
201 |         types.Tool(
202 |             name="image_info",
203 |             description="Retrieves basic image information, including image format, size, and color model.",
204 |             inputSchema={
205 |                 "type": "object",
206 |                 "properties": {
207 |                     "object_url": {
208 |                         "type": "string",
209 |                         "description": _OBJECT_URL_DESC
210 |                     },
211 |                 },
212 |                 "required": ["object_url"],
213 |             },
214 |         )
215 |     )
216 |     def image_info(self, **kwargs) -> list[types.TextContent]:
217 |         object_url = kwargs.get("object_url", "")
218 |         if object_url is None or len(object_url) == 0:
219 |             return [
220 |                 types.TextContent(
221 |                     type="text",
222 |                     text="object_url is required"
223 |                 )
224 |             ]
225 | 
226 |         func = "imageInfo"
227 |         object_url = utils.url_add_processing_func(auth=self.auth, url=object_url, func=func)
228 |         return [
229 |             types.TextContent(
230 |                 type="text",
231 |                 text=str({
232 |                     "object_url": object_url,
233 |                 })
234 |             )
235 |         ]
236 | 
237 |     @tools.tool_meta(
238 |         types.Tool(
239 |             name="get_fop_status",
240 |             description="Retrieves the execution status of a Fop operation.",
241 |             inputSchema={
242 |                 "type": "object",
243 |                 "properties": {
244 |                     "persistent_id": {
245 |                         "type": "string",
246 |                         "description": "Operation ID returned from executing a Fop operation",
247 |                     },
248 |                 },
249 |                 "required": ["persistent_id"],
250 |             },
251 |         )
252 |     )
253 |     def get_fop_status(self, **kwargs) -> list[types.TextContent]:
254 |         status = self.client.get_fop_status(**kwargs)
255 |         return [types.TextContent(type="text", text=str(status))]
256 | 
257 | 
258 | def register_tools(cfg: config.Config, cli: MediaProcessingService):
259 |     tool_impl = _ToolImpl(cfg, cli)
260 |     tools.auto_register_tools(
261 |         [
262 |             tool_impl.image_scale_by_percent,
263 |             tool_impl.image_scale_by_size,
264 |             tool_impl.image_round_corner,
265 |             tool_impl.image_info,
266 |         ]
267 |     )
268 | 
```