#
tokens: 6957/50000 2/126 files (page 2/2)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 2 of 2. Use http://codebase.md/darpai/darp_engine?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .gitignore
├── .gitlab-ci.yml
├── .pre-commit-config.yaml
├── alembic
│   ├── env.py
│   ├── README
│   ├── script.py.mako
│   └── versions
│       ├── 2025_03_24_0945-ffe7a88d5b15_initial_migration.py
│       ├── 2025_04_09_1117-e25328dbfe80_set_logo_to_none.py
│       ├── 2025_04_22_0854-1c6edbdb4642_add_status_to_servers.py
│       ├── 2025_04_25_1158-49e1ab420276_host_type.py
│       ├── 2025_04_30_1049-387bed3ce6ae_server_title.py
│       ├── 2025_05_12_1404-735603f5e4d5_creator_id.py
│       ├── 2025_05_13_0634-3b8b8a5b99ad_deployment_fields.py
│       ├── 2025_05_13_1326-4b751cb703e2_timestamps.py
│       ├── 2025_05_15_1414-7a3913a3bb4c_logs.py
│       ├── 2025_05_27_0757-7e8c31ddb2db_json_logs.py
│       ├── 2025_05_28_0632-f2298e6acd39_base_image.py
│       ├── 2025_05_30_1231-4b5c0d56e1ca_build_instructions.py
│       ├── 2025_05_31_1156-c98f44cea12f_add_transport_protocol_column_to_servers.py
│       ├── 2025_06_07_1322-aa9a14a698c5_jsonb_logs.py
│       └── 2025_06_17_1353-667874dd7dee_added_alias.py
├── alembic.ini
├── common
│   ├── __init__.py
│   ├── llm
│   │   ├── __init__.py
│   │   └── client.py
│   └── notifications
│       ├── __init__.py
│       ├── client.py
│       ├── enums.py
│       └── exceptions.py
├── deploy
│   ├── docker-version
│   ├── push-and-run
│   ├── ssh-agent
│   └── ssh-passphrase
├── docker-compose-debug.yaml
├── docker-compose.yaml
├── healthchecker
│   ├── __init__.py
│   ├── Dockerfile
│   ├── scripts
│   │   ├── worker-start-debug.sh
│   │   └── worker-start.sh
│   └── src
│       ├── __init__.py
│       ├── __main__.py
│       └── checker.py
├── LICENSE
├── mcp_server
│   ├── __init__.py
│   ├── Dockerfile
│   ├── requirements.txt
│   ├── scripts
│   │   ├── __init__.py
│   │   ├── mcp_health_check.py
│   │   ├── start-debug.sh
│   │   └── start.sh
│   ├── server.json
│   └── src
│       ├── __init__.py
│       ├── decorators.py
│       ├── llm
│       │   ├── __init__.py
│       │   ├── prompts.py
│       │   └── tool_manager.py
│       ├── logger.py
│       ├── main.py
│       ├── registry_client.py
│       ├── schemas.py
│       ├── settings.py
│       └── tools.py
├── pyproject.toml
├── README.md
├── registry
│   ├── __init__.py
│   ├── Dockerfile
│   ├── requirements.txt
│   ├── scripts
│   │   ├── requirements
│   │   ├── start-debug.sh
│   │   └── start.sh
│   └── src
│       ├── __init__.py
│       ├── base_schema.py
│       ├── database
│       │   ├── __init__.py
│       │   ├── models
│       │   │   ├── __init__.py
│       │   │   ├── base.py
│       │   │   ├── log.py
│       │   │   ├── mixins
│       │   │   │   ├── __init__.py
│       │   │   │   ├── has_created_at.py
│       │   │   │   ├── has_server_id.py
│       │   │   │   └── has_updated_at.py
│       │   │   ├── server.py
│       │   │   └── tool.py
│       │   └── session.py
│       ├── deployments
│       │   ├── __init__.py
│       │   ├── logs_repository.py
│       │   ├── router.py
│       │   ├── schemas.py
│       │   └── service.py
│       ├── errors.py
│       ├── logger.py
│       ├── main.py
│       ├── producer.py
│       ├── search
│       │   ├── __init__.py
│       │   ├── prompts.py
│       │   ├── schemas.py
│       │   └── service.py
│       ├── servers
│       │   ├── __init__.py
│       │   ├── repository.py
│       │   ├── router.py
│       │   ├── schemas.py
│       │   └── service.py
│       ├── settings.py
│       ├── types.py
│       └── validator
│           ├── __init__.py
│           ├── constants.py
│           ├── prompts.py
│           ├── schemas.py
│           ├── service.py
│           └── ssl.py
├── scripts
│   ├── darp-add.py
│   ├── darp-router.py
│   └── darp-search.py
└── worker
    ├── __init__.py
    ├── Dockerfile
    ├── requirements.txt
    ├── scripts
    │   ├── worker-start-debug.sh
    │   └── worker-start.sh
    └── src
        ├── __init__.py
        ├── constants.py
        ├── consumer.py
        ├── deployment_service.py
        ├── docker_service.py
        ├── dockerfile_generators
        │   ├── __init__.py
        │   ├── base.py
        │   ├── factory.py
        │   ├── python
        │   │   ├── __init__.py
        │   │   └── generic.py
        │   ├── typescript
        │   │   ├── __init__.py
        │   │   └── generic.py
        │   └── user_provided.py
        ├── errors.py
        ├── main.py
        ├── schemas.py
        └── user_logger.py
```

# Files

--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------

```yaml
  1 | services:
  2 | 
  3 |   registry:
  4 |     build:
  5 |       context: .
  6 |       dockerfile: registry/Dockerfile
  7 |     environment:
  8 |       UVICORN_PORT: ${UVICORN_PORT:-80}
  9 |       POSTGRES_HOST: ${POSTGRES_HOST:-registry_postgres}
 10 |       POSTGRES_USER: ${POSTGRES_USER:-change_me}
 11 |       POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change_me}
 12 |       POSTGRES_DB: ${POSTGRES_DB:-registry}
 13 |       POSTGRES_PORT: ${POSTGRES_PORT:-}
 14 |       OPENAI_API_KEY: ${OPENAI_API_KEY}
 15 |       OPENAI_BASE_URL: ${OPENAI_BASE_URL:-}
 16 |       LLM_PROXY: ${LLM_PROXY:-}
 17 |       LLM_MODEL: ${LLM_MODEL:-}
 18 |       HEALTHCHECK_RUNNING_INTERVAL: ${HEALTHCHECK_RUNNING_INTERVAL:-}
 19 |       DEEP_RESEARCH_SERVER_NAME: ${DEEP_RESEARCH_SERVER_NAME:-}
 20 |       TOOL_RETRY_COUNT: ${TOOL_RETRY_COUNT:-}
 21 |       TOOL_WAIT_INTERVAL: ${TOOL_WAIT_INTERVAL:-}
 22 |     image: ${REGISTRY_IMAGE:-registry}:${CI_COMMIT_BRANCH:-local}
 23 |     container_name: registry
 24 |     pull_policy: always
 25 |     restart: always
 26 |     depends_on:
 27 |       registry_postgres:
 28 |         condition: service_healthy
 29 |         restart: true
 30 |     profiles:
 31 |       - main
 32 |     volumes:
 33 |       - /var/lib/hipasus/registry/api/logs:/workspace/logs
 34 |     healthcheck:
 35 |       test: curl -f http://localhost:${UVICORN_PORT:-80}/healthcheck
 36 |       interval: 15s
 37 |       timeout: 5s
 38 |       retries: 3
 39 |       start_period: 15s
 40 |     command: start.sh
 41 | 
 42 |   registry_mcp_server:
 43 |     build:
 44 |       context: .
 45 |       dockerfile: mcp_server/Dockerfile
 46 |     environment:
 47 |       - MCP_PORT
 48 |       - LOG_LEVEL
 49 |       - REGISTRY_URL
 50 |       - OPENAI_API_KEY
 51 |       - OPENAI_BASE_URL
 52 |       - LLM_PROXY
 53 |       - LLM_MODEL
 54 |     image: ${MCP_REGISTRY_IMAGE:-registry_mcp}:${CI_COMMIT_BRANCH:-local}
 55 |     container_name: registry_mcp_server
 56 |     pull_policy: always
 57 |     restart: always
 58 |     depends_on:
 59 |       registry:
 60 |         condition: service_healthy
 61 |         restart: true
 62 |     healthcheck:
 63 |       test: python3 -m mcp_server.scripts.mcp_health_check
 64 |       interval: 15s
 65 |       timeout: 5s
 66 |       retries: 3
 67 |       start_period: 15s
 68 |     profiles:
 69 |       - main
 70 |     volumes:
 71 |       - /var/lib/hipasus/registry/mcp_server/logs:/workspace/logs
 72 |     command: start.sh
 73 | 
 74 |   registry_deploy_worker:
 75 |     container_name: registry_deploy_worker
 76 |     image: ${WORKER_IMAGE_NAME:-registry_worker}:${CI_COMMIT_BRANCH:-local}
 77 |     build:
 78 |       context: .
 79 |       dockerfile: worker/Dockerfile
 80 |     pull_policy: always
 81 |     environment:
 82 |       LOG_LEVEL: ${LOG_LEVEL}
 83 |       POSTGRES_HOST: ${POSTGRES_HOST:-registry_postgres}
 84 |       POSTGRES_USER: ${POSTGRES_USER:-change_me}
 85 |       POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change_me}
 86 |       POSTGRES_DB: ${POSTGRES_DB:-registry}
 87 |       POSTGRES_PORT: ${POSTGRES_PORT:-5432}
 88 |       OPENAI_API_KEY: ${OPENAI_API_KEY}
 89 |       OPENAI_BASE_URL: ${OPENAI_BASE_URL:-}
 90 |       LLM_PROXY: ${LLM_PROXY:-}
 91 |       LLM_MODEL: ${LLM_MODEL:-}
 92 |       DOCKERHUB_LOGIN: ${DOCKERHUB_LOGIN}
 93 |       DOCKERHUB_PASSWORD: ${DOCKERHUB_PASSWORD}
 94 |       DEPLOYMENT_NETWORK: ${DOCKER_NETWORK:-portal_network}
 95 |       HEALTHCHECK_RUNNING_INTERVAL: ${HEALTHCHECK_RUNNING_INTERVAL:-}
 96 |       DEEP_RESEARCH_SERVER_NAME: ${DEEP_RESEARCH_SERVER_NAME:-}
 97 |       TOOL_RETRY_COUNT: ${TOOL_RETRY_COUNT:-}
 98 |       TOOL_WAIT_INTERVAL: ${TOOL_WAIT_INTERVAL:-}
 99 |     profiles:
100 |       - main
101 |     depends_on:
102 |       registry_postgres:
103 |         condition: service_healthy
104 |         restart: true
105 |       portal_kafka:
106 |         condition: service_healthy
107 |         restart: true
108 |     volumes:
109 |       - /var/lib/hipasus/registry/worker/logs:/workspace/logs
110 |       - /var/run/docker.sock:/var/run/docker.sock
111 |     restart: always
112 |     command: worker-start.sh
113 | 
114 |   portal_zookeeper:
115 |     image: confluentinc/cp-zookeeper:7.3.2
116 |     container_name: portal_zookeeper
117 |     ports:
118 |       - "2181:2181"
119 |     profiles:
120 |       - main
121 |     environment:
122 |       ZOOKEEPER_CLIENT_PORT: 2181
123 |       ZOOKEEPER_SERVER_ID: 1
124 |       ZOOKEEPER_SERVERS: portal_zookeeper:2888:3888
125 |     healthcheck:
126 |       test: nc -z localhost 2181 || exit -1
127 |       interval: 30s
128 |       timeout: 20s
129 |       retries: 5
130 |     restart: always
131 | 
132 | 
133 |   portal_kafka:
134 |     image: confluentinc/cp-kafka:7.3.2
135 |     container_name: portal_kafka
136 |     profiles:
137 |       - main
138 |     environment:
139 |       KAFKA_ADVERTISED_LISTENERS: INTERNAL://portal_kafka:19092,DOCKER://127.0.0.1:29092
140 |       KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,DOCKER:PLAINTEXT
141 |       KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
142 |       KAFKA_ZOOKEEPER_CONNECT: "portal_zookeeper:2181"
143 |       KAFKA_BROKER_ID: 1
144 |       KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
145 |       KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
146 |       KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
147 |       KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
148 |       KAFKA_JMX_PORT: 9999
149 |       KAFKA_JMX_HOSTNAME: ${DOCKER_HOST_IP:-127.0.0.1}
150 |       KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
151 |       KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
152 |     healthcheck:
153 |       test: kafka-cluster cluster-id --bootstrap-server 127.0.0.1:29092 || exit 1
154 |       interval: 30s
155 |       timeout: 20s
156 |       retries: 5
157 |     depends_on:
158 |       portal_zookeeper:
159 |         condition: service_healthy
160 |         restart: true
161 |     restart: always
162 | 
163 |   registry_postgres:
164 |     container_name: registry_postgres
165 |     image: postgres:17
166 |     environment:
167 |       POSTGRES_USER: ${POSTGRES_USER:-change_me}
168 |       POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change_me}
169 |       POSTGRES_DB: ${POSTGRES_DB:-registry}
170 |     volumes:
171 |       - /var/lib/hipasus/registry/pg_data:/var/lib/postgresql/data
172 |     command: postgres -c 'max_connections=500'
173 |     profiles:
174 |       - main
175 |     healthcheck:
176 |       test: pg_isready -U ${POSTGRES_USER:-change_me} -d ${POSTGRES_DB:-registry}
177 |       interval: 7s
178 |       timeout: 5s
179 |       retries: 5
180 |       start_period: 5s
181 |     restart: always
182 | 
183 |   registry_healthchecker:
184 |     container_name: registry_healthchecker
185 |     image: ${HEALTHCHECKER_IMAGE_NAME:-registry_healthchecker}:${CI_COMMIT_BRANCH:-local}
186 |     build:
187 |       context: .
188 |       dockerfile: healthchecker/Dockerfile
189 |     pull_policy: always
190 |     environment:
191 |       LOG_LEVEL: ${LOG_LEVEL}
192 |       POSTGRES_HOST: ${POSTGRES_HOST:-registry_postgres}
193 |       POSTGRES_USER: ${POSTGRES_USER:-change_me}
194 |       POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change_me}
195 |       POSTGRES_DB: ${POSTGRES_DB:-registry}
196 |       POSTGRES_PORT: ${POSTGRES_PORT:-5432}
197 |       OPENAI_API_KEY: ${OPENAI_API_KEY}
198 |       OPENAI_BASE_URL: ${OPENAI_BASE_URL:-}
199 |       LLM_PROXY: ${LLM_PROXY:-}
200 |       LLM_MODEL: ${LLM_MODEL:-}
201 |       DOCKERHUB_LOGIN: ${DOCKERHUB_LOGIN}
202 |       DOCKERHUB_PASSWORD: ${DOCKERHUB_PASSWORD}
203 |       DEPLOYMENT_NETWORK: ${DOCKER_NETWORK:-portal_network}
204 |       HEALTHCHECK_RUNNING_INTERVAL: ${HEALTHCHECK_RUNNING_INTERVAL:-}
205 |       DEEP_RESEARCH_SERVER_NAME: ${DEEP_RESEARCH_SERVER_NAME:-}
206 |       TOOL_RETRY_COUNT: ${TOOL_RETRY_COUNT:-}
207 |       TOOL_WAIT_INTERVAL: ${TOOL_WAIT_INTERVAL:-}
208 |     profiles:
209 |       - main
210 |     depends_on:
211 |       registry:
212 |         condition: service_healthy
213 |         restart: true
214 |     restart: always
215 |     command: worker-start.sh
216 | 
217 | networks:
218 |   default:
219 |     name: ${DOCKER_NETWORK:-portal_network}
220 |     external: true
221 | 
```

--------------------------------------------------------------------------------
/registry/src/servers/service.py:
--------------------------------------------------------------------------------

```python
  1 | import re
  2 | from contextlib import _AsyncGeneratorContextManager  # noqa
  3 | from typing import Self
  4 | 
  5 | from fastapi import Depends
  6 | from mcp import ClientSession
  7 | from mcp.client.sse import sse_client
  8 | from mcp.client.streamable_http import streamablehttp_client
  9 | from pydantic import ValidationError
 10 | from sqlalchemy import Select
 11 | 
 12 | from ..settings import settings
 13 | from .repository import ServerRepository
 14 | from .schemas import MCP
 15 | from .schemas import MCPJson
 16 | from .schemas import ServerCreate
 17 | from .schemas import ServerRead
 18 | from .schemas import ServerUpdate
 19 | from .schemas import Tool
 20 | from .schemas import tool_name_regex
 21 | from registry.src.database import Server
 22 | from registry.src.errors import InvalidData
 23 | from registry.src.errors import InvalidServerNameError
 24 | from registry.src.errors import NotAllowedError
 25 | from registry.src.errors import RemoteServerError
 26 | from registry.src.errors import ServerAlreadyExistsError
 27 | from registry.src.errors import ServersNotFoundError
 28 | from registry.src.logger import logger
 29 | from registry.src.types import ServerStatus
 30 | from registry.src.types import ServerTransportProtocol
 31 | 
 32 | BASE_SERVER_NAME = "base_darp_server#REPLACEME"
 33 | ID_REGEX_PATTERN = re.compile(rf"^[a-zA-Z_][a-zA-Z0-9_]*|{BASE_SERVER_NAME}$")
 34 | 
 35 | 
 36 | class ServerService:
 37 |     def __init__(self, repo: ServerRepository) -> None:
 38 |         self.repo = repo
 39 | 
 40 |     async def create(self, data: ServerCreate) -> Server:
 41 |         self._assure_server_name_is_valid_id(data.data.name)
 42 |         await self._assure_server_not_exists(name=data.data.name, url=data.data.url)
 43 | 
 44 |         server_transport_protocol = await self.detect_transport_protocol(
 45 |             server_url=data.data.url
 46 |         )
 47 | 
 48 |         logger.info(
 49 |             "Detected server's transport protocol %s", server_transport_protocol
 50 |         )
 51 | 
 52 |         tools = await self.get_tools(
 53 |             server_url=data.data.url,
 54 |             server_name=data.data.name,
 55 |             transport=server_transport_protocol,
 56 |         )
 57 |         return await self.repo.create_server(
 58 |             data.data,
 59 |             tools,
 60 |             creator_id=data.current_user_id,
 61 |             transport_protocol=server_transport_protocol,
 62 |         )
 63 | 
 64 |     @classmethod
 65 |     async def get_tools(
 66 |         cls,
 67 |         server_url: str,
 68 |         server_name: str,
 69 |         transport: ServerTransportProtocol | None = None,
 70 |     ) -> list[Tool]:
 71 | 
 72 |         if transport is None:
 73 |             transport = await cls.detect_transport_protocol(server_url)
 74 | 
 75 |         try:
 76 |             return await cls._fetch_tools(
 77 |                 server_url=server_url, server_name=server_name, transport=transport
 78 |             )
 79 |         except Exception as e:
 80 |             if isinstance(e, InvalidData):
 81 |                 raise
 82 |             logger.warning(
 83 |                 f"Error while getting tools from server {server_url}",
 84 |                 exc_info=e,
 85 |             )
 86 |             raise InvalidData(
 87 |                 "Server is unhealthy or URL does not lead to a valid MCP server",
 88 |                 url=server_url,
 89 |             )
 90 | 
 91 |     async def delete_server(self, id: int, current_user_id: str) -> None:
 92 |         server = await self.get_server_by_id(id=id)
 93 |         if server.creator_id != current_user_id:
 94 |             raise NotAllowedError("Only server creator can delete it")
 95 |         await self.repo.delete_server(id=id)
 96 | 
 97 |     async def get_all_servers(
 98 |         self,
 99 |         status: ServerStatus | None = None,
100 |         search_query: str | None = None,
101 |         creator_id: str | None = None,
102 |     ) -> Select:
103 |         return await self.repo.get_all_servers(
104 |             search_query=search_query, status=status, creator_id=creator_id
105 |         )
106 | 
107 |     async def get_server_by_id(self, id: int) -> Server:
108 |         await self._assure_server_found(id)
109 |         return await self.repo.get_server(id=id)
110 | 
111 |     async def get_servers_by_ids(self, ids: list[int]) -> list[Server]:
112 |         servers = await self.repo.get_servers_by_ids(ids=ids)
113 |         if len(ids) != len(servers):
114 |             retrieved_server_ids = {server.id for server in servers}
115 |             missing_server_ids = set(ids) - retrieved_server_ids
116 |             raise ServersNotFoundError(ids=list(missing_server_ids))
117 |         return servers
118 | 
119 |     async def update_server(self, id: int, data: ServerUpdate) -> Server:
120 |         server = await self.get_server_by_id(id=id)
121 |         transport_protocol = None
122 |         if server.creator_id != data.current_user_id:
123 |             raise NotAllowedError("Only server creator can update it")
124 |         if data.data.name or data.data.url:
125 |             await self._assure_server_not_exists(
126 |                 name=data.data.name, url=data.data.url, ignore_id=id
127 |             )
128 |         updated_server_url = data.data.url or server.url
129 |         if updated_server_url is not None:
130 |             transport_protocol = await self.detect_transport_protocol(
131 |                 server_url=updated_server_url
132 |             )
133 |             tools = await self.get_tools(
134 |                 server_url=updated_server_url,
135 |                 server_name=data.data.name or server.name,
136 |                 transport=transport_protocol,
137 |             )
138 |         else:
139 |             tools = []
140 | 
141 |         return await self.repo.update_server(
142 |             id,
143 |             data.data,
144 |             tools,
145 |             transport_protocol=transport_protocol,
146 |         )
147 | 
148 |     async def _assure_server_found(self, id: int) -> None:
149 |         if not await self.repo.find_servers(id=id):
150 |             raise ServersNotFoundError([id])
151 | 
152 |     def _assure_server_name_is_valid_id(self, name: str) -> None:
153 |         if not re.match(ID_REGEX_PATTERN, name):
154 |             raise InvalidServerNameError(name=name)
155 | 
156 |     async def _assure_server_not_exists(
157 |         self,
158 |         id: int | None = None,
159 |         name: str | None = None,
160 |         url: str | None = None,
161 |         ignore_id: int | None = None,
162 |     ) -> None:
163 |         if servers := await self.repo.find_servers(id, name, url, ignore_id):
164 |             dict_servers = [
165 |                 ServerRead.model_validate(server).model_dump() for server in servers
166 |             ]
167 |             raise ServerAlreadyExistsError(dict_servers)
168 | 
169 |     async def get_search_servers(self) -> list[Server]:
170 |         servers_query = await self.repo.get_all_servers()
171 |         servers_query = servers_query.where(Server.url != None)  # noqa
172 |         servers = (await self.repo.session.execute(servers_query)).scalars().all()
173 |         return list(servers)
174 | 
175 |     async def get_servers_by_urls(self, server_urls: list[str]) -> list[Server]:
176 |         servers = await self.repo.get_servers_by_urls(urls=server_urls)
177 |         if len(server_urls) != len(servers):
178 |             retrieved_server_urls = {server.url for server in servers}
179 |             missing_server_urls = set(server_urls) - retrieved_server_urls
180 |             logger.warning(
181 |                 f"One or more server urls are incorrect {missing_server_urls=}"
182 |             )
183 |         return servers
184 | 
185 |     async def get_mcp_json(self) -> MCPJson:
186 |         servers = await self.repo.get_all_servers()
187 |         servers = (await self.repo.session.execute(servers)).scalars().all()
188 |         return MCPJson(servers=[MCP.model_validate(server) for server in servers])
189 | 
190 |     async def get_deep_research(self) -> list[Server]:
191 |         servers = await self.repo.find_servers(name=settings.deep_research_server_name)
192 |         if len(servers) == 0:
193 |             raise RemoteServerError(
194 |                 f"{settings.deep_research_server_name} MCP server does not exist in registry"
195 |             )
196 |         assert len(servers) == 1, "Invalid state. Multiple deepresearch servers found"
197 |         if servers[0].status != ServerStatus.active:
198 |             raise RemoteServerError(
199 |                 f"{settings.deep_research_server_name} MCP server is down."
200 |             )
201 |         return servers
202 | 
203 |     @classmethod
204 |     def get_new_instance(
205 |         cls, repo: ServerRepository = Depends(ServerRepository.get_new_instance)
206 |     ) -> Self:
207 |         return cls(repo=repo)
208 | 
209 |     @classmethod
210 |     async def _fetch_tools(
211 |         cls, server_url: str, server_name: str, transport: ServerTransportProtocol
212 |     ) -> list[Tool]:
213 |         client_ctx = cls._get_client_context(server_url, transport)
214 | 
215 |         async with client_ctx as (read, write, *_):
216 |             async with ClientSession(read, write) as session:
217 |                 await session.initialize()
218 |                 tools_response = await session.list_tools()
219 |         try:
220 |             tools = [
221 |                 Tool(
222 |                     **tool.model_dump(exclude_none=True),
223 |                     alias=f"{tool.name}__{server_name}",
224 |                 )
225 |                 for tool in tools_response.tools
226 |             ]
227 |         except ValidationError:
228 |             raise InvalidData(
229 |                 message=f"Invalid tool names. {{tool_name}}__{{server_name}} must fit {tool_name_regex}"
230 |             )
231 |         return tools
232 | 
233 |     @classmethod
234 |     def _get_client_context(
235 |         cls,
236 |         server_url: str,
237 |         transport_protocol: ServerTransportProtocol,
238 |     ) -> _AsyncGeneratorContextManager:
239 |         if transport_protocol == ServerTransportProtocol.STREAMABLE_HTTP:
240 |             client_ctx = streamablehttp_client(server_url)
241 |         elif transport_protocol == ServerTransportProtocol.SSE:
242 |             client_ctx = sse_client(server_url)
243 |         else:
244 |             raise RuntimeError(
245 |                 "Unsupported transport protocol: %s", transport_protocol.name
246 |             )
247 | 
248 |         return client_ctx
249 | 
250 |     @classmethod
251 |     async def detect_transport_protocol(
252 |         cls,
253 |         server_url: str,
254 |     ) -> ServerTransportProtocol:
255 |         for protocol in (
256 |             ServerTransportProtocol.STREAMABLE_HTTP,
257 |             ServerTransportProtocol.SSE,
258 |         ):
259 |             if await cls._is_transport_supported(server_url, protocol):
260 |                 return protocol
261 | 
262 |         raise InvalidData(
263 |             "Can't detect server's transport protocol, maybe server is unhealthy?",
264 |             url=server_url,
265 |         )
266 | 
267 |     @classmethod
268 |     async def _is_transport_supported(
269 |         cls,
270 |         server_url: str,
271 |         protocol: ServerTransportProtocol,
272 |     ) -> bool:
273 |         try:
274 |             client_ctx = cls._get_client_context(server_url, protocol)
275 |             async with client_ctx as (read, write, *_):
276 |                 return await cls._can_initialize_session(read, write)
277 | 
278 |         except Exception as e:
279 |             logger.error(
280 |                 "Failed to create %s client",
281 |                 protocol.name,
282 |                 exc_info=e,
283 |             )
284 |             return False
285 | 
286 |     @staticmethod
287 |     async def _can_initialize_session(read, write) -> bool:
288 |         try:
289 |             async with ClientSession(read, write) as session:
290 |                 await session.initialize()
291 |             return True
292 |         except Exception:
293 |             return False
294 | 
```
Page 2/2FirstPrevNextLast