This is page 4 of 8. Use http://codebase.md/cyanheads/atlas-mcp-server?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .clinerules
├── .dockerignore
├── .env.example
├── .github
│ ├── FUNDING.yml
│ └── workflows
│ └── publish.yml
├── .gitignore
├── .ncurc.json
├── .repomixignore
├── automated-tests
│ └── AGENT_TEST_05282025.md
├── CHANGELOG.md
├── CLAUDE.md
├── docker-compose.yml
├── docs
│ └── tree.md
├── examples
│ ├── backup-example
│ │ ├── knowledges.json
│ │ ├── projects.json
│ │ ├── relationships.json
│ │ └── tasks.json
│ ├── deep-research-example
│ │ ├── covington_community_grant_research.md
│ │ └── full-export.json
│ ├── README.md
│ └── webui-example.png
├── LICENSE
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── repomix.config.json
├── scripts
│ ├── clean.ts
│ ├── fetch-openapi-spec.ts
│ ├── make-executable.ts
│ └── tree.ts
├── smithery.yaml
├── src
│ ├── config
│ │ └── index.ts
│ ├── index.ts
│ ├── mcp
│ │ ├── resources
│ │ │ ├── index.ts
│ │ │ ├── knowledge
│ │ │ │ └── knowledgeResources.ts
│ │ │ ├── projects
│ │ │ │ └── projectResources.ts
│ │ │ ├── tasks
│ │ │ │ └── taskResources.ts
│ │ │ └── types.ts
│ │ ├── server.ts
│ │ ├── tools
│ │ │ ├── atlas_database_clean
│ │ │ │ ├── cleanDatabase.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_deep_research
│ │ │ │ ├── deepResearch.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_knowledge_add
│ │ │ │ ├── addKnowledge.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_knowledge_delete
│ │ │ │ ├── deleteKnowledge.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_knowledge_list
│ │ │ │ ├── index.ts
│ │ │ │ ├── listKnowledge.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_project_create
│ │ │ │ ├── createProject.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_project_delete
│ │ │ │ ├── deleteProject.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_project_list
│ │ │ │ ├── index.ts
│ │ │ │ ├── listProjects.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_project_update
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ ├── types.ts
│ │ │ │ └── updateProject.ts
│ │ │ ├── atlas_task_create
│ │ │ │ ├── createTask.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_task_delete
│ │ │ │ ├── deleteTask.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_task_list
│ │ │ │ ├── index.ts
│ │ │ │ ├── listTasks.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ └── types.ts
│ │ │ ├── atlas_task_update
│ │ │ │ ├── index.ts
│ │ │ │ ├── responseFormat.ts
│ │ │ │ ├── types.ts
│ │ │ │ └── updateTask.ts
│ │ │ └── atlas_unified_search
│ │ │ ├── index.ts
│ │ │ ├── responseFormat.ts
│ │ │ ├── types.ts
│ │ │ └── unifiedSearch.ts
│ │ └── transports
│ │ ├── authentication
│ │ │ └── authMiddleware.ts
│ │ ├── httpTransport.ts
│ │ └── stdioTransport.ts
│ ├── services
│ │ └── neo4j
│ │ ├── backupRestoreService
│ │ │ ├── backupRestoreTypes.ts
│ │ │ ├── backupUtils.ts
│ │ │ ├── exportLogic.ts
│ │ │ ├── importLogic.ts
│ │ │ ├── index.ts
│ │ │ └── scripts
│ │ │ ├── db-backup.ts
│ │ │ └── db-import.ts
│ │ ├── driver.ts
│ │ ├── events.ts
│ │ ├── helpers.ts
│ │ ├── index.ts
│ │ ├── knowledgeService.ts
│ │ ├── projectService.ts
│ │ ├── searchService
│ │ │ ├── fullTextSearchLogic.ts
│ │ │ ├── index.ts
│ │ │ ├── searchTypes.ts
│ │ │ └── unifiedSearchLogic.ts
│ │ ├── taskService.ts
│ │ ├── types.ts
│ │ └── utils.ts
│ ├── types
│ │ ├── errors.ts
│ │ ├── mcp.ts
│ │ └── tool.ts
│ ├── utils
│ │ ├── index.ts
│ │ ├── internal
│ │ │ ├── errorHandler.ts
│ │ │ ├── index.ts
│ │ │ ├── logger.ts
│ │ │ └── requestContext.ts
│ │ ├── metrics
│ │ │ ├── index.ts
│ │ │ └── tokenCounter.ts
│ │ ├── parsing
│ │ │ ├── dateParser.ts
│ │ │ ├── index.ts
│ │ │ └── jsonParser.ts
│ │ └── security
│ │ ├── idGenerator.ts
│ │ ├── index.ts
│ │ ├── rateLimiter.ts
│ │ └── sanitization.ts
│ └── webui
│ ├── index.html
│ ├── logic
│ │ ├── api-service.js
│ │ ├── app-state.js
│ │ ├── config.js
│ │ ├── dom-elements.js
│ │ ├── main.js
│ │ └── ui-service.js
│ └── styling
│ ├── base.css
│ ├── components.css
│ ├── layout.css
│ └── theme.css
├── tsconfig.json
├── tsconfig.typedoc.json
└── typedoc.json
```
# Files
--------------------------------------------------------------------------------
/src/mcp/transports/authentication/authMiddleware.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * @fileoverview MCP Authentication Middleware for Bearer Token Validation (JWT).
3 | *
4 | * This middleware validates JSON Web Tokens (JWT) passed via the 'Authorization' header
5 | * using the 'Bearer' scheme (e.g., "Authorization: Bearer <your_token>").
6 | * It verifies the token's signature and expiration using the secret key defined
7 | * in the configuration (`config.mcpAuthSecretKey`).
8 | *
9 | * If the token is valid, an object conforming to the MCP SDK's `AuthInfo` type
10 | * (expected to contain `token`, `clientId`, and `scopes`) is attached to `req.auth`.
11 | * If the token is missing, invalid, or expired, it sends an HTTP 401 Unauthorized response.
12 | *
13 | * @see {@link https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/docs/specification/2025-03-26/basic/authorization.mdx | MCP Authorization Specification}
14 | * @module src/mcp-server/transports/authentication/authMiddleware
15 | */
16 |
17 | import { AuthInfo } from "@modelcontextprotocol/sdk/server/auth/types.js"; // Import from SDK
18 | import { NextFunction, Request, Response } from "express";
19 | import jwt from "jsonwebtoken";
20 | import { config, environment } from "../../../config/index.js";
21 | import { logger, requestContextService } from "../../../utils/index.js";
22 |
23 | // Extend the Express Request interface to include the optional 'auth' property
24 | // using the imported AuthInfo type from the SDK.
25 | declare global {
26 | // eslint-disable-next-line @typescript-eslint/no-namespace
27 | namespace Express {
28 | interface Request {
29 | /** Authentication information derived from the JWT, conforming to MCP SDK's AuthInfo. */
30 | auth?: AuthInfo;
31 | }
32 | }
33 | }
34 |
35 | // Startup Validation: Validate secret key presence on module load.
36 | if (environment === "production" && !config.mcpAuthSecretKey) {
37 | logger.fatal(
38 | "CRITICAL: MCP_AUTH_SECRET_KEY is not set in production environment. Authentication cannot proceed securely.",
39 | );
40 | throw new Error(
41 | "MCP_AUTH_SECRET_KEY must be set in production environment for JWT authentication.",
42 | );
43 | } else if (!config.mcpAuthSecretKey) {
44 | logger.warning(
45 | "MCP_AUTH_SECRET_KEY is not set. Authentication middleware will bypass checks (DEVELOPMENT ONLY). This is insecure for production.",
46 | );
47 | }
48 |
49 | /**
50 | * Express middleware for verifying JWT Bearer token authentication.
51 | */
52 | export function mcpAuthMiddleware(
53 | req: Request,
54 | res: Response,
55 | next: NextFunction,
56 | ): void {
57 | const context = requestContextService.createRequestContext({
58 | operation: "mcpAuthMiddleware",
59 | method: req.method,
60 | path: req.path,
61 | });
62 | logger.debug(
63 | "Running MCP Authentication Middleware (Bearer Token Validation)...",
64 | context,
65 | );
66 |
67 | // Development Mode Bypass
68 | if (!config.mcpAuthSecretKey) {
69 | if (environment !== "production") {
70 | logger.warning(
71 | "Bypassing JWT authentication: MCP_AUTH_SECRET_KEY is not set (DEVELOPMENT ONLY).",
72 | context,
73 | );
74 | // Populate req.auth strictly according to SDK's AuthInfo
75 | req.auth = {
76 | token: "dev-mode-placeholder-token",
77 | clientId: "dev-client-id",
78 | scopes: ["dev-scope"],
79 | };
80 | // Log dev mode details separately, not attaching to req.auth if not part of AuthInfo
81 | logger.debug("Dev mode auth object created.", {
82 | ...context,
83 | authDetails: req.auth,
84 | });
85 | return next();
86 | } else {
87 | logger.error(
88 | "FATAL: MCP_AUTH_SECRET_KEY is missing in production. Cannot bypass auth.",
89 | context,
90 | );
91 | res.status(500).json({
92 | error: "Server configuration error: Authentication key missing.",
93 | });
94 | return;
95 | }
96 | }
97 |
98 | const authHeader = req.headers.authorization;
99 | if (!authHeader || !authHeader.startsWith("Bearer ")) {
100 | logger.warning(
101 | "Authentication failed: Missing or malformed Authorization header (Bearer scheme required).",
102 | context,
103 | );
104 | res.status(401).json({
105 | error: "Unauthorized: Missing or invalid authentication token format.",
106 | });
107 | return;
108 | }
109 |
110 | const tokenParts = authHeader.split(" ");
111 | if (tokenParts.length !== 2 || tokenParts[0] !== "Bearer" || !tokenParts[1]) {
112 | logger.warning("Authentication failed: Malformed Bearer token.", context);
113 | res
114 | .status(401)
115 | .json({ error: "Unauthorized: Malformed authentication token." });
116 | return;
117 | }
118 | const rawToken = tokenParts[1];
119 |
120 | try {
121 | const decoded = jwt.verify(rawToken, config.mcpAuthSecretKey);
122 |
123 | if (typeof decoded === "string") {
124 | logger.warning(
125 | "Authentication failed: JWT decoded to a string, expected an object payload.",
126 | context,
127 | );
128 | res
129 | .status(401)
130 | .json({ error: "Unauthorized: Invalid token payload format." });
131 | return;
132 | }
133 |
134 | // Extract and validate fields for SDK's AuthInfo
135 | const clientIdFromToken =
136 | typeof decoded.cid === "string"
137 | ? decoded.cid
138 | : typeof decoded.client_id === "string"
139 | ? decoded.client_id
140 | : undefined;
141 | if (!clientIdFromToken) {
142 | logger.warning(
143 | "Authentication failed: JWT 'cid' or 'client_id' claim is missing or not a string.",
144 | { ...context, jwtPayloadKeys: Object.keys(decoded) },
145 | );
146 | res.status(401).json({
147 | error: "Unauthorized: Invalid token, missing client identifier.",
148 | });
149 | return;
150 | }
151 |
152 | let scopesFromToken: string[];
153 | if (
154 | Array.isArray(decoded.scp) &&
155 | decoded.scp.every((s) => typeof s === "string")
156 | ) {
157 | scopesFromToken = decoded.scp as string[];
158 | } else if (
159 | typeof decoded.scope === "string" &&
160 | decoded.scope.trim() !== ""
161 | ) {
162 | scopesFromToken = decoded.scope.split(" ").filter((s) => s);
163 | if (scopesFromToken.length === 0 && decoded.scope.trim() !== "") {
164 | // handles case " " -> [""]
165 | scopesFromToken = [decoded.scope.trim()];
166 | } else if (scopesFromToken.length === 0 && decoded.scope.trim() === "") {
167 | // If scope is an empty string, treat as no scopes.
168 | // This will now lead to an error if scopes are considered mandatory.
169 | logger.debug(
170 | "JWT 'scope' claim was an empty string, resulting in empty scopes array.",
171 | context,
172 | );
173 | }
174 | } else {
175 | // If scopes are strictly mandatory and not found or invalid format
176 | logger.warning(
177 | "Authentication failed: JWT 'scp' or 'scope' claim is missing, not an array of strings, or not a valid space-separated string.",
178 | { ...context, jwtPayloadKeys: Object.keys(decoded) },
179 | );
180 | res.status(401).json({
181 | error: "Unauthorized: Invalid token, missing or invalid scopes.",
182 | });
183 | return;
184 | }
185 |
186 | // If, after parsing, scopesFromToken is empty and scopes are considered mandatory for any operation.
187 | // This check assumes that all valid tokens must have at least one scope.
188 | // If some tokens are legitimately allowed to have no scopes for certain operations,
189 | // this check might need to be adjusted or handled downstream.
190 | if (scopesFromToken.length === 0) {
191 | logger.warning(
192 | "Authentication failed: Token resulted in an empty scope array, and scopes are required.",
193 | { ...context, jwtPayloadKeys: Object.keys(decoded) },
194 | );
195 | res.status(401).json({
196 | error: "Unauthorized: Token must contain valid, non-empty scopes.",
197 | });
198 | return;
199 | }
200 |
201 | // Construct req.auth with only the properties defined in SDK's AuthInfo
202 | // All other claims from 'decoded' are not part of req.auth for type safety.
203 | req.auth = {
204 | token: rawToken,
205 | clientId: clientIdFromToken,
206 | scopes: scopesFromToken,
207 | };
208 |
209 | // Log separately if other JWT claims like 'sub' (sessionId) are needed for app logic
210 | const subClaimForLogging =
211 | typeof decoded.sub === "string" ? decoded.sub : undefined;
212 | logger.debug("JWT verified successfully. AuthInfo attached to request.", {
213 | ...context,
214 | mcpSessionIdContext: subClaimForLogging,
215 | clientId: req.auth.clientId,
216 | scopes: req.auth.scopes,
217 | });
218 | next();
219 | } catch (error: unknown) {
220 | let errorMessage = "Invalid token";
221 | if (error instanceof jwt.TokenExpiredError) {
222 | errorMessage = "Token expired";
223 | logger.warning("Authentication failed: Token expired.", {
224 | ...context,
225 | expiredAt: error.expiredAt,
226 | });
227 | } else if (error instanceof jwt.JsonWebTokenError) {
228 | errorMessage = `Invalid token: ${error.message}`;
229 | logger.warning(`Authentication failed: ${errorMessage}`, { ...context });
230 | } else if (error instanceof Error) {
231 | errorMessage = `Verification error: ${error.message}`;
232 | logger.error(
233 | "Authentication failed: Unexpected error during token verification.",
234 | { ...context, error: error.message },
235 | );
236 | } else {
237 | errorMessage = "Unknown verification error";
238 | logger.error(
239 | "Authentication failed: Unexpected non-error exception during token verification.",
240 | { ...context, error },
241 | );
242 | }
243 | res.status(401).json({ error: `Unauthorized: ${errorMessage}.` });
244 | }
245 | }
246 |
```
--------------------------------------------------------------------------------
/examples/backup-example/relationships.json:
--------------------------------------------------------------------------------
```json
1 | [
2 | {
3 | "startNodeId": "know_8edd2c00340b4959b5dd7bd493484a78",
4 | "endNodeId": "cite_575e1e35306748cf92c76825c093da8c",
5 | "type": "CITES",
6 | "properties": {}
7 | },
8 | {
9 | "startNodeId": "know_8edd2c00340b4959b5dd7bd493484a78",
10 | "endNodeId": "cite_595e7750df1e473289e4dd2439df5671",
11 | "type": "CITES",
12 | "properties": {}
13 | },
14 | {
15 | "startNodeId": "know_8edd2c00340b4959b5dd7bd493484a78",
16 | "endNodeId": "cite_119039866696470493f9547fe5cd072e",
17 | "type": "CITES",
18 | "properties": {}
19 | },
20 | {
21 | "startNodeId": "know_8edd2c00340b4959b5dd7bd493484a78",
22 | "endNodeId": "cite_c52fc132a7714274bf81ab6108b81700",
23 | "type": "CITES",
24 | "properties": {}
25 | },
26 | {
27 | "startNodeId": "know_ea8b3222e36d4e26b73a0d865312413f",
28 | "endNodeId": "cite_877e56d5c98f48e99776b81c8820a171",
29 | "type": "CITES",
30 | "properties": {}
31 | },
32 | {
33 | "startNodeId": "know_ea8b3222e36d4e26b73a0d865312413f",
34 | "endNodeId": "cite_9886a57991f542738b0b239e86f0cb5b",
35 | "type": "CITES",
36 | "properties": {}
37 | },
38 | {
39 | "startNodeId": "know_ea8b3222e36d4e26b73a0d865312413f",
40 | "endNodeId": "cite_1d64e2512f8c455e9ba902f1d1c7e60d",
41 | "type": "CITES",
42 | "properties": {}
43 | },
44 | {
45 | "startNodeId": "task_5d3304ef2c9c4d6b9288fea38ed6ba84",
46 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
47 | "type": "DEPENDS_ON",
48 | "properties": {
49 | "id": "tdep_fdfd015829824c42b3085605550ebff0",
50 | "createdAt": "2025-03-26T18:40:55.603Z"
51 | }
52 | },
53 | {
54 | "startNodeId": "task_4a224bcddf5246afaa732834d84b4b73",
55 | "endNodeId": "task_4da902b4e18a46c4b5b5d1043c7d0665",
56 | "type": "DEPENDS_ON",
57 | "properties": {
58 | "id": "tdep_b40da62436794468b489040802ba8492",
59 | "createdAt": "2025-03-26T18:40:19.614Z"
60 | }
61 | },
62 | {
63 | "startNodeId": "task_3f5dd265abf04cc3983550a7b2a5f7fd",
64 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
65 | "type": "DEPENDS_ON",
66 | "properties": {
67 | "id": "tdep_e707d3e3d21c4de89a7a5880a29864b3",
68 | "createdAt": "2025-03-26T18:40:46.252Z"
69 | }
70 | },
71 | {
72 | "startNodeId": "task_fba80d348f274e908de4ee988df754cc",
73 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
74 | "type": "DEPENDS_ON",
75 | "properties": {
76 | "id": "tdep_6a0183a939a5404dbbfa57c042b1a72c",
77 | "createdAt": "2025-03-26T18:40:29.392Z"
78 | }
79 | },
80 | {
81 | "startNodeId": "task_0fa6a009306b41108f5292475754c33a",
82 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
83 | "type": "DEPENDS_ON",
84 | "properties": {
85 | "id": "tdep_ff36bf29d5144c86af9e0e10a1a93d01",
86 | "createdAt": "2025-03-26T18:40:37.820Z"
87 | }
88 | },
89 | {
90 | "startNodeId": "task_30dc34e190cf4c6690e8e4002269b8d5",
91 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
92 | "type": "DEPENDS_ON",
93 | "properties": {
94 | "id": "tdep_7fdd83efc94446eb9511cd13c0d2f8f3",
95 | "createdAt": "2025-03-26T18:41:03.780Z"
96 | }
97 | },
98 | {
99 | "startNodeId": "know_1d9b88ea4f2f448c926382e7d899a27c",
100 | "endNodeId": "cite_0fdaff7b58d044d4a9a4ec8445c1f334",
101 | "type": "CITES",
102 | "properties": {}
103 | },
104 | {
105 | "startNodeId": "know_1d9b88ea4f2f448c926382e7d899a27c",
106 | "endNodeId": "cite_74b89ff0621743babbb520d6b3728c2c",
107 | "type": "CITES",
108 | "properties": {}
109 | },
110 | {
111 | "startNodeId": "know_1d9b88ea4f2f448c926382e7d899a27c",
112 | "endNodeId": "cite_67c17ede43d54a6d9126395922136c6d",
113 | "type": "CITES",
114 | "properties": {}
115 | },
116 | {
117 | "startNodeId": "portfolio-main",
118 | "endNodeId": "know_5894c5e7cd674206b82470c4d46265e7",
119 | "type": "CONTAINS_KNOWLEDGE",
120 | "properties": {}
121 | },
122 | {
123 | "startNodeId": "portfolio-main",
124 | "endNodeId": "know_1d9b88ea4f2f448c926382e7d899a27c",
125 | "type": "CONTAINS_KNOWLEDGE",
126 | "properties": {}
127 | },
128 | {
129 | "startNodeId": "portfolio-main",
130 | "endNodeId": "know_8edd2c00340b4959b5dd7bd493484a78",
131 | "type": "CONTAINS_KNOWLEDGE",
132 | "properties": {}
133 | },
134 | {
135 | "startNodeId": "portfolio-main",
136 | "endNodeId": "know_ea8b3222e36d4e26b73a0d865312413f",
137 | "type": "CONTAINS_KNOWLEDGE",
138 | "properties": {}
139 | },
140 | {
141 | "startNodeId": "portfolio-main",
142 | "endNodeId": "know_24f1739b78e1430e90a89cfe6c208d63",
143 | "type": "CONTAINS_KNOWLEDGE",
144 | "properties": {}
145 | },
146 | {
147 | "startNodeId": "portfolio-main",
148 | "endNodeId": "know_b3881c98f99c41a0992686aee015ad57",
149 | "type": "CONTAINS_KNOWLEDGE",
150 | "properties": {}
151 | },
152 | {
153 | "startNodeId": "portfolio-main",
154 | "endNodeId": "know_111b5c3bbc3d416883aab006ac1b8f2c",
155 | "type": "CONTAINS_KNOWLEDGE",
156 | "properties": {}
157 | },
158 | {
159 | "startNodeId": "portfolio-main",
160 | "endNodeId": "know_c9ebf9d01d0841b28fbdfc508a3754ef",
161 | "type": "CONTAINS_KNOWLEDGE",
162 | "properties": {}
163 | },
164 | {
165 | "startNodeId": "portfolio-main",
166 | "endNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
167 | "type": "CONTAINS_TASK",
168 | "properties": {}
169 | },
170 | {
171 | "startNodeId": "portfolio-main",
172 | "endNodeId": "task_30dc34e190cf4c6690e8e4002269b8d5",
173 | "type": "CONTAINS_TASK",
174 | "properties": {}
175 | },
176 | {
177 | "startNodeId": "portfolio-main",
178 | "endNodeId": "task_5d3304ef2c9c4d6b9288fea38ed6ba84",
179 | "type": "CONTAINS_TASK",
180 | "properties": {}
181 | },
182 | {
183 | "startNodeId": "portfolio-main",
184 | "endNodeId": "task_3f5dd265abf04cc3983550a7b2a5f7fd",
185 | "type": "CONTAINS_TASK",
186 | "properties": {}
187 | },
188 | {
189 | "startNodeId": "portfolio-main",
190 | "endNodeId": "task_0fa6a009306b41108f5292475754c33a",
191 | "type": "CONTAINS_TASK",
192 | "properties": {}
193 | },
194 | {
195 | "startNodeId": "portfolio-main",
196 | "endNodeId": "task_fba80d348f274e908de4ee988df754cc",
197 | "type": "CONTAINS_TASK",
198 | "properties": {}
199 | },
200 | {
201 | "startNodeId": "portfolio-main",
202 | "endNodeId": "task_4a224bcddf5246afaa732834d84b4b73",
203 | "type": "CONTAINS_TASK",
204 | "properties": {}
205 | },
206 | {
207 | "startNodeId": "portfolio-main",
208 | "endNodeId": "task_4da902b4e18a46c4b5b5d1043c7d0665",
209 | "type": "CONTAINS_TASK",
210 | "properties": {}
211 | },
212 | {
213 | "startNodeId": "portfolio-main",
214 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
215 | "type": "CONTAINS_TASK",
216 | "properties": {}
217 | },
218 | {
219 | "startNodeId": "task_4da902b4e18a46c4b5b5d1043c7d0665",
220 | "endNodeId": "task_ac11f7a5f83c4f339cae63de850ecda6",
221 | "type": "DEPENDS_ON",
222 | "properties": {
223 | "id": "tdep_421a11c847ca4882b230bc1642b58787",
224 | "createdAt": "2025-03-26T18:40:12.040Z"
225 | }
226 | },
227 | {
228 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
229 | "endNodeId": "task_30dc34e190cf4c6690e8e4002269b8d5",
230 | "type": "DEPENDS_ON",
231 | "properties": {
232 | "id": "tdep_aef200e791fe4bbc8b91c08ea85d7a28",
233 | "createdAt": "2025-03-26T18:41:18.406Z"
234 | }
235 | },
236 | {
237 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
238 | "endNodeId": "task_5d3304ef2c9c4d6b9288fea38ed6ba84",
239 | "type": "DEPENDS_ON",
240 | "properties": {
241 | "id": "tdep_b92de5768b50443ebbebf21d2f430ef4",
242 | "createdAt": "2025-03-26T18:41:18.396Z"
243 | }
244 | },
245 | {
246 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
247 | "endNodeId": "task_3f5dd265abf04cc3983550a7b2a5f7fd",
248 | "type": "DEPENDS_ON",
249 | "properties": {
250 | "id": "tdep_f8aaf167d5d647b9b55deb3238e1c43f",
251 | "createdAt": "2025-03-26T18:41:18.386Z"
252 | }
253 | },
254 | {
255 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
256 | "endNodeId": "task_0fa6a009306b41108f5292475754c33a",
257 | "type": "DEPENDS_ON",
258 | "properties": {
259 | "id": "tdep_96305fbfb61c4865bb86dcc8aa465397",
260 | "createdAt": "2025-03-26T18:41:18.371Z"
261 | }
262 | },
263 | {
264 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
265 | "endNodeId": "task_fba80d348f274e908de4ee988df754cc",
266 | "type": "DEPENDS_ON",
267 | "properties": {
268 | "id": "tdep_de6d7c885c8445578b1a3bfa45659ecf",
269 | "createdAt": "2025-03-26T18:41:18.362Z"
270 | }
271 | },
272 | {
273 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
274 | "endNodeId": "task_4a224bcddf5246afaa732834d84b4b73",
275 | "type": "DEPENDS_ON",
276 | "properties": {
277 | "id": "tdep_f547fbd917ca4f3284094281bed0cf39",
278 | "createdAt": "2025-03-26T18:41:18.352Z"
279 | }
280 | },
281 | {
282 | "startNodeId": "task_3e955c8424fa48dbb6e9bcc27c0bed92",
283 | "endNodeId": "task_4da902b4e18a46c4b5b5d1043c7d0665",
284 | "type": "DEPENDS_ON",
285 | "properties": {
286 | "id": "tdep_422dbc91eaca4a3c9fc4f52f18893914",
287 | "createdAt": "2025-03-26T18:41:18.342Z"
288 | }
289 | },
290 | {
291 | "startNodeId": "know_b3881c98f99c41a0992686aee015ad57",
292 | "endNodeId": "cite_a77ccf2317c84c15b572ee8bc565c7f9",
293 | "type": "CITES",
294 | "properties": {}
295 | },
296 | {
297 | "startNodeId": "know_b3881c98f99c41a0992686aee015ad57",
298 | "endNodeId": "cite_a8e6328e26cf45aeaf058671f49e23ed",
299 | "type": "CITES",
300 | "properties": {}
301 | },
302 | {
303 | "startNodeId": "know_24f1739b78e1430e90a89cfe6c208d63",
304 | "endNodeId": "cite_8cb216010e734c5ab9c897e289989b8c",
305 | "type": "CITES",
306 | "properties": {}
307 | },
308 | {
309 | "startNodeId": "know_24f1739b78e1430e90a89cfe6c208d63",
310 | "endNodeId": "cite_137a7d753f044d06868b5758fa22433e",
311 | "type": "CITES",
312 | "properties": {}
313 | },
314 | {
315 | "startNodeId": "know_24f1739b78e1430e90a89cfe6c208d63",
316 | "endNodeId": "cite_bcf2dd0751404fd4b8aceba3d6d14871",
317 | "type": "CITES",
318 | "properties": {}
319 | }
320 | ]
321 |
```
--------------------------------------------------------------------------------
/src/mcp/server.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Main entry point for the MCP (Model Context Protocol) server.
3 | * This file orchestrates the server's lifecycle:
4 | * 1. Initializes the core McpServer instance with its identity and capabilities.
5 | * 2. Registers available resources and tools, making them discoverable and usable by clients.
6 | * 3. Selects and starts the appropriate communication transport (stdio or Streamable HTTP)
7 | * based on configuration.
8 | * 4. Handles top-level error management during startup.
9 | *
10 | * MCP Specification References:
11 | * - Lifecycle: https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/docs/specification/2025-03-26/basic/lifecycle.mdx
12 | * - Overview (Capabilities): https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/docs/specification/2025-03-26/basic/index.mdx
13 | * - Transports: https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/docs/specification/2025-03-26/basic/transports.mdx
14 | */
15 |
16 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
17 | import http from "http";
18 | // Import validated configuration and environment details.
19 | import { config, environment } from "../config/index.js";
20 | // Import core utilities: ErrorHandler, logger, requestContextService.
21 | import { initializeNeo4jSchema } from "../services/neo4j/index.js"; // Corrected path
22 | import { ErrorHandler, logger, requestContextService } from "../utils/index.js"; // Corrected path
23 |
24 | // Import tool registrations
25 | import { registerAtlasDatabaseCleanTool } from "./tools/atlas_database_clean/index.js";
26 | import { registerAtlasDeepResearchTool } from "./tools/atlas_deep_research/index.js";
27 | import { registerAtlasKnowledgeAddTool } from "./tools/atlas_knowledge_add/index.js";
28 | import { registerAtlasKnowledgeDeleteTool } from "./tools/atlas_knowledge_delete/index.js";
29 | import { registerAtlasKnowledgeListTool } from "./tools/atlas_knowledge_list/index.js";
30 | import { registerAtlasProjectCreateTool } from "./tools/atlas_project_create/index.js";
31 | import { registerAtlasProjectDeleteTool } from "./tools/atlas_project_delete/index.js";
32 | import { registerAtlasProjectListTool } from "./tools/atlas_project_list/index.js";
33 | import { registerAtlasProjectUpdateTool } from "./tools/atlas_project_update/index.js";
34 | import { registerAtlasTaskCreateTool } from "./tools/atlas_task_create/index.js";
35 | import { registerAtlasTaskDeleteTool } from "./tools/atlas_task_delete/index.js";
36 | import { registerAtlasTaskListTool } from "./tools/atlas_task_list/index.js";
37 | import { registerAtlasTaskUpdateTool } from "./tools/atlas_task_update/index.js";
38 | import { registerAtlasUnifiedSearchTool } from "./tools/atlas_unified_search/index.js";
39 |
40 | // Import resource registrations
41 | import { registerMcpResources } from "./resources/index.js"; // Adjusted path
42 |
43 | // Import transport setup functions.
44 | import { startHttpTransport } from "./transports/httpTransport.js";
45 | import { connectStdioTransport } from "./transports/stdioTransport.js";
46 |
47 | /**
48 | * Creates and configures a new instance of the McpServer.
49 | *
50 | * This function is central to defining the server's identity and functionality
51 | * as presented to connecting clients during the MCP initialization phase.
52 | */
53 | async function createMcpServerInstance(): Promise<McpServer> {
54 | const context = requestContextService.createRequestContext({
55 | operation: "createMcpServerInstance",
56 | });
57 | logger.info("Initializing MCP server instance for ATLAS MCP Server", context);
58 |
59 | // Configure the request context service (used for correlating logs/errors).
60 | requestContextService.configure({
61 | appName: config.mcpServerName,
62 | appVersion: config.mcpServerVersion,
63 | environment,
64 | });
65 |
66 | // Initialize Neo4j database and services
67 | logger.info("Initializing Neo4j schema...", context);
68 | await initializeNeo4jSchema();
69 | logger.info("Neo4j schema initialized successfully", context);
70 |
71 | // Instantiate the core McpServer using the SDK.
72 | logger.debug("Instantiating McpServer with capabilities", {
73 | ...context,
74 | serverInfo: {
75 | name: config.mcpServerName,
76 | version: config.mcpServerVersion,
77 | },
78 | capabilities: {
79 | logging: {}, // Indicates support for logging control and notifications
80 | resources: { listChanged: true }, // Supports dynamic resource lists
81 | tools: {
82 | listChanged: true, // Supports dynamic tool lists
83 | requestContext: true, // Enable request context for all tools
84 | rateLimit: {
85 | // Default rate limit settings for tools
86 | windowMs: config.security.rateLimitWindowMs || 60 * 1000, // Use config or default
87 | maxRequests: config.security.rateLimitMaxRequests || 100, // Use config or default
88 | },
89 | permissions: {
90 | // Permissions requirements for tools
91 | required: config.security.authRequired,
92 | },
93 | },
94 | },
95 | });
96 |
97 | const server = new McpServer(
98 | { name: config.mcpServerName, version: config.mcpServerVersion },
99 | {
100 | capabilities: {
101 | logging: {},
102 | resources: { listChanged: true },
103 | tools: {
104 | listChanged: true,
105 | requestContext: true,
106 | rateLimit: {
107 | windowMs: config.security.rateLimitWindowMs || 60 * 1000,
108 | maxRequests: config.security.rateLimitMaxRequests || 100,
109 | },
110 | permissions: {
111 | required: config.security.authRequired,
112 | },
113 | },
114 | },
115 | },
116 | );
117 |
118 | try {
119 | logger.debug("Registering ATLAS resources and tools...", context);
120 | // Register Atlas resources
121 | await registerMcpResources(server);
122 |
123 | // Register Atlas tools
124 | await registerAtlasProjectCreateTool(server);
125 | await registerAtlasProjectListTool(server);
126 | await registerAtlasProjectUpdateTool(server);
127 | await registerAtlasProjectDeleteTool(server);
128 | await registerAtlasTaskCreateTool(server);
129 | await registerAtlasTaskDeleteTool(server);
130 | await registerAtlasTaskListTool(server);
131 | await registerAtlasTaskUpdateTool(server);
132 | await registerAtlasDatabaseCleanTool(server);
133 | await registerAtlasKnowledgeAddTool(server);
134 | await registerAtlasKnowledgeDeleteTool(server);
135 | await registerAtlasKnowledgeListTool(server);
136 | await registerAtlasUnifiedSearchTool(server);
137 | await registerAtlasDeepResearchTool(server);
138 |
139 | logger.info("ATLAS Resources and tools registered successfully", context);
140 | } catch (err) {
141 | logger.error("Failed to register ATLAS resources/tools", {
142 | ...context,
143 | error: err instanceof Error ? err.message : String(err),
144 | stack: err instanceof Error ? err.stack : undefined,
145 | });
146 | throw err;
147 | }
148 |
149 | return server;
150 | }
151 |
152 | /**
153 | * Selects, sets up, and starts the appropriate MCP transport layer based on configuration.
154 | */
155 | async function startTransport(): Promise<McpServer | http.Server | void> {
156 | const transportType = config.mcpTransportType;
157 | const parentContext = requestContextService.createRequestContext({
158 | operation: "startTransport",
159 | transport: transportType,
160 | });
161 | logger.info(
162 | `Starting transport for ATLAS MCP Server: ${transportType}`,
163 | parentContext,
164 | );
165 |
166 | if (transportType === "http") {
167 | logger.debug(
168 | "Delegating to startHttpTransport for ATLAS MCP Server...",
169 | parentContext,
170 | );
171 | const httpServerInstance = await startHttpTransport(createMcpServerInstance, parentContext);
172 | return httpServerInstance;
173 | }
174 |
175 | if (transportType === "stdio") {
176 | logger.debug(
177 | "Creating single McpServer instance for stdio transport (ATLAS MCP Server)...",
178 | parentContext,
179 | );
180 | const server = await createMcpServerInstance();
181 | logger.debug(
182 | "Delegating to connectStdioTransport for ATLAS MCP Server...",
183 | parentContext,
184 | );
185 | await connectStdioTransport(server, parentContext);
186 | return server;
187 | }
188 |
189 | logger.fatal(
190 | `Unsupported transport type configured for ATLAS MCP Server: ${transportType}`,
191 | parentContext,
192 | );
193 | throw new Error(
194 | `Unsupported transport type: ${transportType}. Must be 'stdio' or 'http'.`,
195 | );
196 | }
197 |
198 | /**
199 | * Main application entry point. Initializes and starts the MCP server.
200 | */
201 | export async function initializeAndStartServer(): Promise<void | McpServer | http.Server> {
202 | const context = requestContextService.createRequestContext({
203 | operation: "initializeAndStartServer",
204 | });
205 | logger.info("ATLAS MCP Server initialization sequence started.", context);
206 | try {
207 | const result = await startTransport();
208 | logger.info(
209 | "ATLAS MCP Server initialization sequence completed successfully.",
210 | context,
211 | );
212 | return result;
213 | } catch (err) {
214 | logger.fatal("Critical error during ATLAS MCP server initialization.", {
215 | ...context,
216 | error: err instanceof Error ? err.message : String(err),
217 | stack: err instanceof Error ? err.stack : undefined,
218 | });
219 | ErrorHandler.handleError(err, {
220 | operation: "initializeAndStartServer", // More specific operation
221 | context: context, // Pass the existing context
222 | critical: true, // This is a critical failure
223 | });
224 | logger.info(
225 | "Exiting process due to critical initialization error (ATLAS MCP Server).",
226 | context,
227 | );
228 | process.exit(1);
229 | }
230 | }
231 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_project_create/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2 | import { z } from "zod";
3 | import { ProjectStatus, createProjectStatusEnum } from "../../../types/mcp.js";
4 | import {
5 | createToolExample,
6 | createToolMetadata,
7 | registerTool,
8 | } from "../../../types/tool.js";
9 | import { atlasCreateProject } from "./createProject.js";
10 | import { AtlasProjectCreateSchemaShape } from "./types.js";
11 |
12 | export const registerAtlasProjectCreateTool = (server: McpServer) => {
13 | registerTool(
14 | server,
15 | "atlas_project_create",
16 | "Creates and initializes new projects within Atlas task management with comprehensive metadata, dependencies, and integration with the knowledge management system",
17 | AtlasProjectCreateSchemaShape,
18 | atlasCreateProject,
19 | createToolMetadata({
20 | examples: [
21 | createToolExample(
22 | {
23 | mode: "single",
24 | name: "Microservice Architecture Migration",
25 | description:
26 | "Refactor monolithic application into scalable microservices architecture with distributed data stores and API gateway",
27 | status: "active",
28 | urls: [
29 | {
30 | title: "MCP Server Repository",
31 | url: "https://github.com/cyanheads/atlas-mcp-server.git",
32 | },
33 | {
34 | title: "Technical Spec",
35 | url: "file:///Users/username/project_name/docs/atlas-reference.md",
36 | },
37 | ],
38 | completionRequirements:
39 | "All critical services migrated with 100% test coverage, performance metrics meeting SLAs, and zero regressions in core functionality",
40 | outputFormat:
41 | "Containerized services with CI/CD pipelines, comprehensive API documentation, and migration runbook",
42 | taskType: "integration",
43 | },
44 | `{
45 | "id": "proj_ms_migration",
46 | "name": "Microservice Architecture Migration",
47 | "description": "Refactor monolithic application into scalable microservices architecture with distributed data stores and API gateway",
48 | "status": "active",
49 | "urls": [{"title": "MCP Server Repository", "url": "https://github.com/cyanheads/atlas-mcp-server.git"}, {"title": "Technical Spec", "url": "file:///Users/username/project_name/docs/atlas-reference.md"}],
50 | "completionRequirements": "All critical services migrated with 100% test coverage, performance metrics meeting SLAs, and zero regressions in core functionality",
51 | "outputFormat": "Containerized services with CI/CD pipelines, comprehensive API documentation, and migration runbook",
52 | "taskType": "integration",
53 | "createdAt": "2025-03-23T10:11:24.123Z",
54 | "updatedAt": "2025-03-23T10:11:24.123Z"
55 | }`,
56 | "Initialize a high-complexity engineering project with detailed technical specifications and success criteria",
57 | ),
58 | createToolExample(
59 | {
60 | mode: "bulk",
61 | projects: [
62 | {
63 | name: "GraphQL API Implementation",
64 | description:
65 | "Design and implement GraphQL API layer to replace existing REST endpoints with optimized query capabilities",
66 | completionRequirements:
67 | "API supports all current use cases with n+1 query optimization, proper error handling, and 95% test coverage",
68 | outputFormat:
69 | "TypeScript-based GraphQL schema with resolvers, documentation, and integration tests",
70 | taskType: "generation",
71 | },
72 | {
73 | name: "Performance Optimization Suite",
74 | description:
75 | "Identify and resolve frontend rendering bottlenecks in React application through profiling and optimization techniques",
76 | status: "pending",
77 | completionRequirements:
78 | "Core React components meet Web Vitals thresholds with 50% reduction in LCP and TTI metrics",
79 | outputFormat:
80 | "Optimized component library, performance test suite, and technical recommendation document",
81 | taskType: "analysis",
82 | },
83 | ],
84 | },
85 | `{
86 | "success": true,
87 | "message": "Successfully created 2 projects",
88 | "created": [
89 | {
90 | "id": "proj_graphql",
91 | "name": "GraphQL API Implementation",
92 | "description": "Design and implement GraphQL API layer to replace existing REST endpoints with optimized query capabilities",
93 | "status": "active",
94 | "urls": [],
95 | "completionRequirements": "API supports all current use cases with n+1 query optimization, proper error handling, and 95% test coverage",
96 | "outputFormat": "TypeScript-based GraphQL schema with resolvers, documentation, and integration tests",
97 | "taskType": "generation",
98 | "createdAt": "2025-03-23T10:11:24.123Z",
99 | "updatedAt": "2025-03-23T10:11:24.123Z"
100 | },
101 | {
102 | "id": "proj_perf",
103 | "name": "Performance Optimization Suite",
104 | "description": "Identify and resolve frontend rendering bottlenecks in React application through profiling and optimization techniques",
105 | "status": "pending",
106 | "urls": [],
107 | "completionRequirements": "Core React components meet Web Vitals thresholds with 50% reduction in LCP and TTI metrics",
108 | "outputFormat": "Optimized component library, performance test suite, and technical recommendation document",
109 | "taskType": "analysis",
110 | "createdAt": "2025-03-23T10:11:24.456Z",
111 | "updatedAt": "2025-03-23T10:11:24.456Z"
112 | }
113 | ],
114 | "errors": []
115 | }`,
116 | "Batch-initialize multiple specialized engineering projects with distinct technical requirements",
117 | ),
118 | ],
119 | requiredPermission: "project:create",
120 | returnSchema: z.union([
121 | // Single project response
122 | z.object({
123 | id: z.string().describe("Project ID"),
124 | name: z.string().describe("Project name"),
125 | description: z.string().describe("Project description"),
126 | status: createProjectStatusEnum().describe("Project status"),
127 | urls: z
128 | .array(
129 | z.object({
130 | title: z.string(),
131 | url: z.string(),
132 | }),
133 | )
134 | .describe("Reference materials"),
135 | completionRequirements: z.string().describe("Completion criteria"),
136 | outputFormat: z.string().describe("Deliverable format"),
137 | taskType: z.string().describe("Project classification"),
138 | createdAt: z.string().describe("Creation timestamp"),
139 | updatedAt: z.string().describe("Last update timestamp"),
140 | }),
141 | // Bulk creation response
142 | z.object({
143 | success: z.boolean().describe("Operation success status"),
144 | message: z.string().describe("Result message"),
145 | created: z
146 | .array(
147 | z.object({
148 | id: z.string().describe("Project ID"),
149 | name: z.string().describe("Project name"),
150 | description: z.string().describe("Project description"),
151 | status: createProjectStatusEnum().describe("Project status"),
152 | urls: z
153 | .array(
154 | z.object({
155 | title: z.string(),
156 | url: z.string(),
157 | }),
158 | )
159 | .describe("Reference materials"),
160 | completionRequirements: z
161 | .string()
162 | .describe("Completion criteria"),
163 | outputFormat: z.string().describe("Deliverable format"),
164 | taskType: z.string().describe("Project classification"),
165 | createdAt: z.string().describe("Creation timestamp"),
166 | updatedAt: z.string().describe("Last update timestamp"),
167 | }),
168 | )
169 | .describe("Created projects"),
170 | errors: z
171 | .array(
172 | z.object({
173 | index: z.number().describe("Index in the projects array"),
174 | project: z.any().describe("Original project data"),
175 | error: z
176 | .object({
177 | code: z.string().describe("Error code"),
178 | message: z.string().describe("Error message"),
179 | details: z
180 | .any()
181 | .optional()
182 | .describe("Additional error details"),
183 | })
184 | .describe("Error information"),
185 | }),
186 | )
187 | .describe("Creation errors"),
188 | }),
189 | ]),
190 | rateLimit: {
191 | windowMs: 60 * 1000, // 1 minute
192 | maxRequests: 10, // 10 requests per minute (either single or bulk)
193 | },
194 | }),
195 | );
196 | };
197 |
```
--------------------------------------------------------------------------------
/src/webui/logic/api-service.js:
--------------------------------------------------------------------------------
```javascript
1 | /**
2 | * @fileoverview Manages all interactions with the Neo4j backend.
3 | * @module src/webui/logic/api-service
4 | */
5 |
6 | import { config } from "./config.js";
7 | import { dom } from "./dom-elements.js"; // Though not directly used, good for consistency if needed later
8 | import { state, utils } from "./app-state.js";
9 | import { uiHelpers } from "./ui-service.js";
10 | import { renderHelpers } from "./ui-service.js"; // For rendering after fetching
11 |
12 | /**
13 | * Neo4j API interaction service.
14 | * @type {object}
15 | */
16 | export const api = {
17 | /**
18 | * Connects to the Neo4j database and verifies connectivity.
19 | * Initializes `state.driver`.
20 | * @returns {Promise<boolean>} True if connection is successful, false otherwise.
21 | */
22 | connect: async () => {
23 | uiHelpers.clearError();
24 | uiHelpers.updateNeo4jStatus("Connecting...", "var(--warning-color)");
25 | try {
26 | if (typeof neo4j === "undefined") {
27 | throw new Error(
28 | "Neo4j driver not loaded. Check CDN link in index.html.",
29 | );
30 | }
31 | state.driver = neo4j.driver(
32 | config.NEO4J_URI,
33 | neo4j.auth.basic(config.NEO4J_USER, config.NEO4J_PASSWORD),
34 | );
35 | await state.driver.verifyConnectivity();
36 | uiHelpers.updateNeo4jStatus("Connected", "var(--success-color)");
37 | console.log("Successfully connected to Neo4j.");
38 | return true;
39 | } catch (error) {
40 | console.error("Neo4j Connection Error:", error);
41 | uiHelpers.showError(
42 | `Neo4j Connection Error: ${error.message}. Check console and credentials.`,
43 | true,
44 | );
45 | if (dom.projectSelect) {
46 | dom.projectSelect.innerHTML =
47 | '<option value="">Neo4j Connection Error</option>';
48 | }
49 | return false;
50 | }
51 | },
52 |
53 | /**
54 | * Runs a Cypher query against the Neo4j database.
55 | * @param {string} query - The Cypher query to execute.
56 | * @param {object} [params={}] - Parameters for the query.
57 | * @returns {Promise<Array<object>>} A promise that resolves to an array of records.
58 | * @throws {Error} If not connected to Neo4j or if query fails.
59 | */
60 | runQuery: async (query, params = {}) => {
61 | if (!state.driver) {
62 | uiHelpers.showError("Not connected to Neo4j.", true);
63 | throw new Error("Not connected to Neo4j.");
64 | }
65 | const session = state.driver.session();
66 | try {
67 | const result = await session.run(query, params);
68 | return result.records.map((record) => {
69 | const obj = {};
70 | record.keys.forEach((key) => {
71 | const value = record.get(key);
72 | if (neo4j.isInt(value)) {
73 | obj[key] = value.toNumber();
74 | } else if (value && typeof value === "object" && value.properties) {
75 | // Node
76 | const nodeProps = {};
77 | Object.keys(value.properties).forEach((propKey) => {
78 | const propValue = value.properties[propKey];
79 | nodeProps[propKey] = neo4j.isInt(propValue)
80 | ? propValue.toNumber()
81 | : propValue;
82 | });
83 | obj[key] = nodeProps;
84 | } else if (
85 | Array.isArray(value) &&
86 | value.every(
87 | (item) => item && typeof item === "object" && item.properties,
88 | )
89 | ) {
90 | // Array of Nodes
91 | obj[key] = value.map((item) => {
92 | const nodeProps = {};
93 | Object.keys(item.properties).forEach((propKey) => {
94 | const propValue = item.properties[propKey];
95 | nodeProps[propKey] = neo4j.isInt(propValue)
96 | ? propValue.toNumber()
97 | : propValue;
98 | });
99 | return nodeProps;
100 | });
101 | } else {
102 | obj[key] = value;
103 | }
104 | });
105 | return obj;
106 | });
107 | } finally {
108 | await session.close();
109 | }
110 | },
111 |
112 | /**
113 | * Fetches all projects and populates the project selection dropdown.
114 | */
115 | fetchProjects: async () => {
116 | if (dom.projectSelect)
117 | uiHelpers.showLoading(dom.projectSelect, "Loading projects...");
118 | uiHelpers.setDisplay(dom.projectDetailsContainer, false);
119 | uiHelpers.setDisplay(dom.tasksContainer, false);
120 | uiHelpers.setDisplay(dom.knowledgeContainer, false);
121 | uiHelpers.clearError();
122 |
123 | if (!state.driver) {
124 | const connected = await api.connect();
125 | if (!connected) return;
126 | }
127 |
128 | try {
129 | const projectsData = await api.runQuery(
130 | "MATCH (p:Project) RETURN p.id as id, p.name as name ORDER BY p.name",
131 | );
132 | if (dom.projectSelect) {
133 | dom.projectSelect.innerHTML =
134 | '<option value="">-- Select a Project --</option>';
135 | let autoSelectedProjectId = null;
136 | if (projectsData && projectsData.length > 0) {
137 | projectsData.forEach((project) => {
138 | const option = document.createElement("option");
139 | option.value = project.id;
140 | option.textContent = utils.escapeHtml(project.name);
141 | dom.projectSelect.appendChild(option);
142 | });
143 |
144 | const lastSelectedProjectId = localStorage.getItem(
145 | "lastSelectedProjectId",
146 | );
147 | const projectIds = projectsData.map((p) => p.id);
148 |
149 | if (
150 | lastSelectedProjectId &&
151 | projectIds.includes(lastSelectedProjectId)
152 | ) {
153 | dom.projectSelect.value = lastSelectedProjectId;
154 | autoSelectedProjectId = lastSelectedProjectId;
155 | } else if (projectIds.length > 0) {
156 | dom.projectSelect.value = projectIds[0];
157 | autoSelectedProjectId = projectIds[0];
158 | }
159 | } else {
160 | dom.projectSelect.innerHTML =
161 | '<option value="">No projects found</option>';
162 | }
163 |
164 | if (autoSelectedProjectId) {
165 | // Automatically fetch details for the selected project
166 | api.fetchProjectDetails(autoSelectedProjectId);
167 | }
168 | }
169 | } catch (error) {
170 | console.error("Failed to fetch projects:", error);
171 | if (dom.projectSelect) {
172 | dom.projectSelect.innerHTML =
173 | '<option value="">Error loading projects</option>';
174 | }
175 | uiHelpers.showError(`Error loading projects: ${error.message}`);
176 | }
177 | },
178 |
179 | /**
180 | * Fetches details for a specific project, including its tasks and knowledge items.
181 | * Updates the application state and renders the fetched data.
182 | * @param {string} projectId - The ID of the project to fetch.
183 | */
184 | fetchProjectDetails: async (projectId) => {
185 | state.currentProjectId = projectId;
186 | if (!projectId) {
187 | uiHelpers.setDisplay(dom.projectDetailsContainer, false);
188 | uiHelpers.setDisplay(dom.tasksContainer, false);
189 | uiHelpers.setDisplay(dom.knowledgeContainer, false);
190 | return;
191 | }
192 |
193 | if (dom.detailsContent)
194 | uiHelpers.showLoading(dom.detailsContent, "Loading project details...");
195 | if (dom.tasksContent)
196 | uiHelpers.showLoading(dom.tasksContent, "Loading tasks...");
197 | if (dom.knowledgeContent)
198 | uiHelpers.showLoading(dom.knowledgeContent, "Loading knowledge items...");
199 | uiHelpers.setDisplay(dom.projectDetailsContainer, true);
200 | uiHelpers.setDisplay(dom.tasksContainer, true);
201 | uiHelpers.setDisplay(dom.knowledgeContainer, true);
202 |
203 | state.showingTaskFlow = false;
204 | uiHelpers.setDisplay(dom.taskFlowContainer, false);
205 | uiHelpers.updateToggleButton(
206 | dom.taskFlowToggle,
207 | false,
208 | "View Task List",
209 | "View Task Flow",
210 | );
211 | uiHelpers.clearError();
212 |
213 | try {
214 | const projectResult = await api.runQuery(
215 | "MATCH (p:Project {id: $projectId}) RETURN p",
216 | { projectId },
217 | );
218 | state.currentProject =
219 | projectResult.length > 0 ? projectResult[0].p : null;
220 | if (dom.detailsContent)
221 | renderHelpers.projectDetails(state.currentProject, dom.detailsContent);
222 |
223 | const tasksQuery = `
224 | MATCH (proj:Project {id: $projectId})-[:CONTAINS_TASK]->(task:Task)
225 | OPTIONAL MATCH (task)-[:DEPENDS_ON]->(dependency:Task)
226 | RETURN task, collect(dependency.id) as dependencyIds
227 | ORDER BY task.title
228 | `;
229 | const tasksResult = await api.runQuery(tasksQuery, {
230 | projectId,
231 | });
232 | state.currentTasks = tasksResult.map((r) => ({
233 | ...r.task,
234 | dependencyIds: r.dependencyIds || [],
235 | }));
236 | if (dom.tasksContent)
237 | renderHelpers.tasks(
238 | state.currentTasks,
239 | dom.tasksContent,
240 | state.tasksViewMode,
241 | );
242 |
243 | const knowledgeResult = await api.runQuery(
244 | "MATCH (p:Project {id: $projectId})-[:CONTAINS_KNOWLEDGE]->(k:Knowledge) RETURN k ORDER BY k.createdAt DESC",
245 | { projectId },
246 | );
247 | state.currentKnowledgeItems = knowledgeResult.map((r) => r.k);
248 | if (dom.knowledgeContent)
249 | renderHelpers.knowledgeItems(
250 | state.currentKnowledgeItems,
251 | dom.knowledgeContent,
252 | state.knowledgeViewMode,
253 | );
254 | } catch (error) {
255 | console.error(`Failed to fetch details for project ${projectId}:`, error);
256 | uiHelpers.showError(`Error loading project data: ${error.message}`);
257 | if (dom.detailsContent)
258 | dom.detailsContent.innerHTML = `<p class="error">Error loading project details.</p>`;
259 | if (dom.tasksContent)
260 | dom.tasksContent.innerHTML = `<p class="error">Error loading tasks.</p>`;
261 | if (dom.knowledgeContent)
262 | dom.knowledgeContent.innerHTML = `<p class="error">Error loading knowledge items.</p>`;
263 | }
264 | },
265 | };
266 |
```
--------------------------------------------------------------------------------
/src/services/neo4j/driver.ts:
--------------------------------------------------------------------------------
```typescript
1 | import neo4j, { Driver, ManagedTransaction, Session } from "neo4j-driver";
2 | import { config } from "../../config/index.js";
3 | import { logger, requestContextService } from "../../utils/index.js"; // Updated import path
4 | import { exportDatabase } from "./index.js"; // Import the export function for backup trigger
5 | import { databaseEvents, DatabaseEventType } from "./events.js";
6 |
7 | /**
8 | * Neo4j connection management singleton
9 | * Responsible for creating and managing the Neo4j driver connection
10 | */
11 | class Neo4jDriver {
12 | private static instance: Neo4jDriver;
13 | private driver: Driver | null = null;
14 | private connectionPromise: Promise<Driver> | null = null;
15 | private transactionCounter: number = 0;
16 |
17 | private constructor() {}
18 |
19 | /**
20 | * Get the Neo4jDriver singleton instance
21 | */
22 | public static getInstance(): Neo4jDriver {
23 | if (!Neo4jDriver.instance) {
24 | Neo4jDriver.instance = new Neo4jDriver();
25 | }
26 | return Neo4jDriver.instance;
27 | }
28 |
29 | /**
30 | * Initialize the Neo4j driver connection
31 | * @returns Promise that resolves to the Neo4j driver
32 | */
33 | private async initDriver(): Promise<Driver> {
34 | if (this.driver) {
35 | return this.driver;
36 | }
37 |
38 | try {
39 | const { neo4jUri, neo4jUser, neo4jPassword } = config;
40 |
41 | if (!neo4jUri || !neo4jUser || !neo4jPassword) {
42 | throw new Error("Neo4j connection details are not properly configured");
43 | }
44 | const reqContext = requestContextService.createRequestContext({
45 | operation: "Neo4jDriver.initDriver",
46 | });
47 |
48 | logger.info("Initializing Neo4j driver connection", reqContext);
49 |
50 | this.driver = neo4j.driver(
51 | neo4jUri,
52 | neo4j.auth.basic(neo4jUser, neo4jPassword),
53 | {
54 | maxConnectionLifetime: 3 * 60 * 60 * 1000, // 3 hours
55 | maxConnectionPoolSize: 50,
56 | connectionAcquisitionTimeout: 2 * 60 * 1000, // 2 minutes
57 | disableLosslessIntegers: true, // Recommended for JS compatibility
58 | },
59 | );
60 |
61 | // Verify connection
62 | await this.driver.verifyConnectivity();
63 |
64 | logger.info(
65 | "Neo4j driver connection established successfully",
66 | reqContext,
67 | );
68 | return this.driver;
69 | } catch (error) {
70 | const errorMessage =
71 | error instanceof Error ? error.message : String(error);
72 | // reqContext might not be defined if error occurs before its creation, so create one if needed
73 | const errorContext = requestContextService.createRequestContext({
74 | operation: "Neo4jDriver.initDriver.error",
75 | });
76 | logger.error("Failed to initialize Neo4j driver", error as Error, {
77 | ...errorContext,
78 | detail: errorMessage,
79 | });
80 | throw new Error(`Failed to initialize Neo4j connection: ${errorMessage}`);
81 | }
82 | }
83 |
84 | /**
85 | * Get the Neo4j driver instance, initializing it if necessary
86 | * @returns Promise that resolves to the Neo4j driver
87 | */
88 | public async getDriver(): Promise<Driver> {
89 | if (!this.connectionPromise) {
90 | this.connectionPromise = this.initDriver();
91 | }
92 | return this.connectionPromise;
93 | }
94 |
95 | /**
96 | * Create a new Neo4j session
97 | * @param database Optional database name
98 | * @returns Promise that resolves to a new Neo4j session
99 | */
100 | public async getSession(database?: string): Promise<Session> {
101 | const driver = await this.getDriver();
102 | // Use the default database configured for the driver instance
103 | // Neo4j Community Edition typically uses 'neo4j' or potentially 'system'
104 | // Passing undefined lets the driver use its default.
105 | return driver.session({
106 | database: database || undefined,
107 | defaultAccessMode: neo4j.session.WRITE,
108 | });
109 | }
110 |
111 | /**
112 | * Execute a query with a transaction
113 | * @param cypher Cypher query to execute
114 | * @param params Parameters for the query
115 | * @param database Optional database name
116 | * @returns Promise that resolves to the query result records
117 | */
118 | public async executeQuery<T = any>(
119 | cypher: string,
120 | params: Record<string, any> = {},
121 | database?: string,
122 | ): Promise<T[]> {
123 | const session = await this.getSession(database);
124 |
125 | try {
126 | const result = await session.executeWrite(
127 | async (tx: ManagedTransaction) => {
128 | const queryResult = await tx.run(cypher, params);
129 | return queryResult.records;
130 | },
131 | );
132 |
133 | // Publish write operation event
134 | // Publish write operation event
135 | this.publishWriteOperation({ query: cypher, params });
136 |
137 | // Removed: Trigger background backup after successful write
138 | // this.triggerBackgroundBackup(); // This was inefficient
139 |
140 | return result as unknown as T[];
141 | } catch (error) {
142 | const errorMessage =
143 | error instanceof Error ? error.message : String(error);
144 | const errorContext = requestContextService.createRequestContext({
145 | operation: "Neo4jDriver.executeQuery",
146 | query: cypher,
147 | // params: params // Consider sanitizing or summarizing params
148 | });
149 | logger.error("Error executing Neo4j query", error as Error, {
150 | ...errorContext,
151 | detail: errorMessage,
152 | });
153 |
154 | // Publish error event
155 | databaseEvents.publish(DatabaseEventType.ERROR, {
156 | timestamp: new Date().toISOString(),
157 | operation: "executeQuery",
158 | error: errorMessage,
159 | query: cypher,
160 | });
161 |
162 | throw error; // Re-throw the original error
163 | } finally {
164 | await session.close();
165 | }
166 | }
167 |
168 | /**
169 | * Execute a read-only query
170 | * @param cypher Cypher query to execute
171 | * @param params Parameters for the query
172 | * @param database Optional database name
173 | * @returns Promise that resolves to the query result records
174 | */
175 | public async executeReadQuery<T = any>(
176 | cypher: string,
177 | params: Record<string, any> = {},
178 | database?: string,
179 | ): Promise<T[]> {
180 | const session = await this.getSession(database);
181 |
182 | try {
183 | const result = await session.executeRead(
184 | async (tx: ManagedTransaction) => {
185 | const queryResult = await tx.run(cypher, params);
186 | return queryResult.records;
187 | },
188 | );
189 |
190 | // Publish read operation event
191 | databaseEvents.publish(DatabaseEventType.READ_OPERATION, {
192 | timestamp: new Date().toISOString(),
193 | query: cypher,
194 | });
195 |
196 | return result as unknown as T[];
197 | } catch (error) {
198 | const errorMessage =
199 | error instanceof Error ? error.message : String(error);
200 | const errorContext = requestContextService.createRequestContext({
201 | operation: "Neo4jDriver.executeReadQuery",
202 | query: cypher,
203 | // params: params // Consider sanitizing or summarizing params
204 | });
205 | logger.error("Error executing Neo4j read query", error as Error, {
206 | ...errorContext,
207 | detail: errorMessage,
208 | });
209 |
210 | // Publish error event
211 | databaseEvents.publish(DatabaseEventType.ERROR, {
212 | timestamp: new Date().toISOString(),
213 | operation: "executeReadQuery",
214 | error: errorMessage,
215 | query: cypher,
216 | });
217 |
218 | throw error; // Re-throw the original error
219 | } finally {
220 | await session.close();
221 | }
222 | }
223 |
224 | /**
225 | * Publish a database write operation event
226 | * @param operation Details about the operation
227 | * @private
228 | */
229 | private publishWriteOperation(operation: {
230 | query: string;
231 | params?: Record<string, any>;
232 | }): void {
233 | this.transactionCounter++;
234 | databaseEvents.publish(DatabaseEventType.WRITE_OPERATION, {
235 | timestamp: new Date().toISOString(),
236 | transactionId: this.transactionCounter,
237 | operation,
238 | });
239 | }
240 |
241 | /**
242 | * Triggers a database backup in the background, including rotation logic.
243 | * Logs errors but does not throw to avoid interrupting the main flow.
244 | * @private
245 | */
246 | private triggerBackgroundBackup(): void {
247 | const reqContext = requestContextService.createRequestContext({
248 | operation: "Neo4jDriver.triggerBackgroundBackup",
249 | });
250 | logger.debug(
251 | "Triggering background database backup with rotation...",
252 | reqContext,
253 | );
254 | // Run backup in the background without awaiting it
255 | exportDatabase()
256 | .then((backupPath) => {
257 | logger.info(`Background database backup successful: ${backupPath}`, {
258 | ...reqContext,
259 | backupPath,
260 | });
261 | })
262 | .catch((error) => {
263 | const errorMessage =
264 | error instanceof Error ? error.message : String(error);
265 | logger.error("Background database backup failed:", error as Error, {
266 | ...reqContext,
267 | detail: errorMessage,
268 | });
269 | // Consider adding more robust error handling/notification if needed
270 | });
271 | }
272 |
273 | /**
274 | * Close the Neo4j driver connection
275 | */
276 | public async close(): Promise<void> {
277 | const reqContext = requestContextService.createRequestContext({
278 | operation: "Neo4jDriver.close",
279 | });
280 | if (this.driver) {
281 | try {
282 | await this.driver.close();
283 | this.driver = null;
284 | this.connectionPromise = null;
285 | logger.info("Neo4j driver connection closed", reqContext);
286 | } catch (error) {
287 | const errorMessage =
288 | error instanceof Error ? error.message : String(error);
289 | logger.error("Error closing Neo4j driver connection", error as Error, {
290 | ...reqContext,
291 | detail: errorMessage,
292 | });
293 | throw error; // Re-throw the error to propagate it
294 | }
295 | }
296 | }
297 | }
298 |
299 | // Export the singleton instance
300 | export const neo4jDriver = Neo4jDriver.getInstance();
301 |
```
--------------------------------------------------------------------------------
/src/utils/security/idGenerator.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * @fileoverview Provides a utility class `IdGenerator` for creating customizable, prefixed unique identifiers,
3 | * and a standalone `generateUUID` function for generating standard UUIDs.
4 | * The `IdGenerator` supports entity-specific prefixes, custom character sets, and lengths.
5 | *
6 | * Note: Logging has been removed from this module to prevent circular dependencies
7 | * with the `requestContextService`, which itself uses `generateUUID` from this module.
8 | * This was causing `ReferenceError: Cannot access 'generateUUID' before initialization`
9 | * during application startup.
10 | * @module src/utils/security/idGenerator
11 | */
12 | import { randomUUID as cryptoRandomUUID, randomBytes } from "crypto";
13 | import { BaseErrorCode, McpError } from "../../types/errors.js";
14 | // Removed: import { logger, requestContextService } from "../index.js";
15 |
16 | /**
17 | * Defines the structure for configuring entity prefixes.
18 | * Keys are entity type names (e.g., "project", "task"), and values are their corresponding ID prefixes (e.g., "PROJ", "TASK").
19 | */
20 | export interface EntityPrefixConfig {
21 | [key: string]: string;
22 | }
23 |
24 | /**
25 | * Defines options for customizing ID generation.
26 | */
27 | export interface IdGenerationOptions {
28 | length?: number;
29 | separator?: string;
30 | charset?: string;
31 | }
32 |
33 | /**
34 | * A generic ID Generator class for creating and managing unique, prefixed identifiers.
35 | * Allows defining custom prefixes, generating random strings, and validating/normalizing IDs.
36 | */
37 | export class IdGenerator {
38 | /**
39 | * Default character set for the random part of the ID.
40 | * @private
41 | */
42 | private static DEFAULT_CHARSET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
43 | /**
44 | * Default separator character between prefix and random part.
45 | * @private
46 | */
47 | private static DEFAULT_SEPARATOR = "_";
48 | /**
49 | * Default length for the random part of the ID.
50 | * @private
51 | */
52 | private static DEFAULT_LENGTH = 6;
53 |
54 | /**
55 | * Stores the mapping of entity types to their prefixes.
56 | * @private
57 | */
58 | private entityPrefixes: EntityPrefixConfig = {};
59 | /**
60 | * Stores a reverse mapping from prefixes (case-insensitive) to entity types.
61 | * @private
62 | */
63 | private prefixToEntityType: Record<string, string> = {};
64 |
65 | /**
66 | * Constructs an `IdGenerator` instance.
67 | * @param entityPrefixes - An initial map of entity types to their prefixes.
68 | */
69 | constructor(entityPrefixes: EntityPrefixConfig = {}) {
70 | // Logging removed to prevent circular dependency with requestContextService.
71 | this.setEntityPrefixes(entityPrefixes);
72 | }
73 |
74 | /**
75 | * Sets or updates the entity prefix configuration and rebuilds the internal reverse lookup map.
76 | * @param entityPrefixes - A map where keys are entity type names and values are their desired ID prefixes.
77 | */
78 | public setEntityPrefixes(entityPrefixes: EntityPrefixConfig): void {
79 | // Logging removed.
80 | this.entityPrefixes = { ...entityPrefixes };
81 |
82 | this.prefixToEntityType = Object.entries(this.entityPrefixes).reduce(
83 | (acc, [type, prefix]) => {
84 | acc[prefix.toLowerCase()] = type; // Store lowercase for case-insensitive lookup
85 | return acc;
86 | },
87 | {} as Record<string, string>,
88 | );
89 | }
90 |
91 | /**
92 | * Retrieves a copy of the current entity prefix configuration.
93 | * @returns The current entity prefix configuration.
94 | */
95 | public getEntityPrefixes(): EntityPrefixConfig {
96 | return { ...this.entityPrefixes };
97 | }
98 |
99 | /**
100 | * Generates a cryptographically secure random string.
101 | * @param length - The desired length of the random string. Defaults to `IdGenerator.DEFAULT_LENGTH`.
102 | * @param charset - The character set to use. Defaults to `IdGenerator.DEFAULT_CHARSET`.
103 | * @returns The generated random string.
104 | */
105 | public generateRandomString(
106 | length: number = IdGenerator.DEFAULT_LENGTH,
107 | charset: string = IdGenerator.DEFAULT_CHARSET,
108 | ): string {
109 | const bytes = randomBytes(length);
110 | let result = "";
111 | for (let i = 0; i < length; i++) {
112 | result += charset[bytes[i] % charset.length];
113 | }
114 | return result;
115 | }
116 |
117 | /**
118 | * Generates a unique ID, optionally prepended with a prefix.
119 | * @param prefix - An optional prefix for the ID.
120 | * @param options - Optional parameters for ID generation (length, separator, charset).
121 | * @returns A unique identifier string.
122 | */
123 | public generate(prefix?: string, options: IdGenerationOptions = {}): string {
124 | // Logging removed.
125 | const {
126 | length = IdGenerator.DEFAULT_LENGTH,
127 | separator = IdGenerator.DEFAULT_SEPARATOR,
128 | charset = IdGenerator.DEFAULT_CHARSET,
129 | } = options;
130 |
131 | const randomPart = this.generateRandomString(length, charset);
132 | const generatedId = prefix
133 | ? `${prefix}${separator}${randomPart}`
134 | : randomPart;
135 | return generatedId;
136 | }
137 |
138 | /**
139 | * Generates a unique ID for a specified entity type, using its configured prefix.
140 | * @param entityType - The type of entity (must be registered).
141 | * @param options - Optional parameters for ID generation.
142 | * @returns A unique identifier string for the entity (e.g., "PROJ_A6B3J0").
143 | * @throws {McpError} If the `entityType` is not registered.
144 | */
145 | public generateForEntity(
146 | entityType: string,
147 | options: IdGenerationOptions = {},
148 | ): string {
149 | const prefix = this.entityPrefixes[entityType];
150 | if (!prefix) {
151 | throw new McpError(
152 | BaseErrorCode.VALIDATION_ERROR,
153 | `Unknown entity type: ${entityType}. No prefix registered.`,
154 | );
155 | }
156 | return this.generate(prefix, options);
157 | }
158 |
159 | /**
160 | * Validates if an ID conforms to the expected format for a specific entity type.
161 | * @param id - The ID string to validate.
162 | * @param entityType - The expected entity type of the ID.
163 | * @param options - Optional parameters used during generation for validation consistency.
164 | * @returns `true` if the ID is valid, `false` otherwise.
165 | */
166 | public isValid(
167 | id: string,
168 | entityType: string,
169 | options: IdGenerationOptions = {},
170 | ): boolean {
171 | const prefix = this.entityPrefixes[entityType];
172 | const {
173 | length = IdGenerator.DEFAULT_LENGTH,
174 | separator = IdGenerator.DEFAULT_SEPARATOR,
175 | } = options;
176 |
177 | if (!prefix) {
178 | return false;
179 | }
180 | // Assumes default charset characters (uppercase letters and digits) for regex.
181 | const pattern = new RegExp(
182 | `^${this.escapeRegex(prefix)}${this.escapeRegex(separator)}[A-Z0-9]{${length}}$`,
183 | );
184 | return pattern.test(id);
185 | }
186 |
187 | /**
188 | * Escapes special characters in a string for use in a regular expression.
189 | * @param str - The string to escape.
190 | * @returns The escaped string.
191 | * @private
192 | */
193 | private escapeRegex(str: string): string {
194 | return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
195 | }
196 |
197 | /**
198 | * Strips the prefix and separator from an ID string.
199 | * @param id - The ID string (e.g., "PROJ_A6B3J0").
200 | * @param separator - The separator used in the ID. Defaults to `IdGenerator.DEFAULT_SEPARATOR`.
201 | * @returns The ID part without the prefix, or the original ID if separator not found.
202 | */
203 | public stripPrefix(
204 | id: string,
205 | separator: string = IdGenerator.DEFAULT_SEPARATOR,
206 | ): string {
207 | const parts = id.split(separator);
208 | return parts.length > 1 ? parts.slice(1).join(separator) : id; // Handle separators in random part
209 | }
210 |
211 | /**
212 | * Determines the entity type from an ID string by its prefix (case-insensitive).
213 | * @param id - The ID string (e.g., "PROJ_A6B3J0").
214 | * @param separator - The separator used in the ID. Defaults to `IdGenerator.DEFAULT_SEPARATOR`.
215 | * @returns The determined entity type.
216 | * @throws {McpError} If ID format is invalid or prefix is unknown.
217 | */
218 | public getEntityType(
219 | id: string,
220 | separator: string = IdGenerator.DEFAULT_SEPARATOR,
221 | ): string {
222 | const parts = id.split(separator);
223 | if (parts.length < 2 || !parts[0]) {
224 | throw new McpError(
225 | BaseErrorCode.VALIDATION_ERROR,
226 | `Invalid ID format: ${id}. Expected format like: PREFIX${separator}RANDOMLPART`,
227 | );
228 | }
229 |
230 | const prefix = parts[0];
231 | const entityType = this.prefixToEntityType[prefix.toLowerCase()];
232 |
233 | if (!entityType) {
234 | throw new McpError(
235 | BaseErrorCode.VALIDATION_ERROR,
236 | `Unknown entity type for prefix: ${prefix}`,
237 | );
238 | }
239 | return entityType;
240 | }
241 |
242 | /**
243 | * Normalizes an entity ID to ensure the prefix matches the registered case
244 | * and the random part is uppercase.
245 | * @param id - The ID to normalize (e.g., "proj_a6b3j0").
246 | * @param separator - The separator used in the ID. Defaults to `IdGenerator.DEFAULT_SEPARATOR`.
247 | * @returns The normalized ID (e.g., "PROJ_A6B3J0").
248 | * @throws {McpError} If the entity type cannot be determined from the ID.
249 | */
250 | public normalize(
251 | id: string,
252 | separator: string = IdGenerator.DEFAULT_SEPARATOR,
253 | ): string {
254 | const entityType = this.getEntityType(id, separator);
255 | const registeredPrefix = this.entityPrefixes[entityType];
256 | const idParts = id.split(separator);
257 | const randomPart = idParts.slice(1).join(separator);
258 |
259 | return `${registeredPrefix}${separator}${randomPart.toUpperCase()}`;
260 | }
261 | }
262 |
263 | /**
264 | * Default singleton instance of the `IdGenerator`.
265 | * Initialize with `idGenerator.setEntityPrefixes({})` to configure.
266 | */
267 | export const idGenerator = new IdGenerator();
268 |
269 | /**
270 | * Generates a standard Version 4 UUID (Universally Unique Identifier).
271 | * Uses the Node.js `crypto` module. This function is independent of the IdGenerator instance
272 | * to prevent circular dependencies when used by other utilities like requestContextService.
273 | * @returns A new UUID string.
274 | */
275 | export const generateUUID = (): string => {
276 | return cryptoRandomUUID();
277 | };
278 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_task_update/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2 | import { z } from "zod";
3 | import { PriorityLevel, TaskStatus } from "../../../types/mcp.js";
4 | import {
5 | createToolExample,
6 | createToolMetadata,
7 | registerTool,
8 | } from "../../../types/tool.js";
9 | import { atlasUpdateTask } from "./updateTask.js";
10 | import { AtlasTaskUpdateSchemaShape } from "./types.js";
11 |
12 | export const registerAtlasTaskUpdateTool = (server: McpServer) => {
13 | registerTool(
14 | server,
15 | "atlas_task_update",
16 | "Updates existing task(s) in the system with support for both individual task modifications and efficient batch updates across multiple tasks",
17 | AtlasTaskUpdateSchemaShape,
18 | atlasUpdateTask,
19 | createToolMetadata({
20 | examples: [
21 | createToolExample(
22 | {
23 | mode: "single",
24 | id: "task_api_gateway",
25 | updates: {
26 | status: "in_progress",
27 | description:
28 | "Enhanced API Gateway design with additional focus on OAuth 2.0 integration and microservice security boundaries",
29 | priority: "critical",
30 | },
31 | },
32 | `{
33 | "id": "task_api_gateway",
34 | "projectId": "proj_ms_migration",
35 | "title": "Design API Gateway Architecture",
36 | "description": "Enhanced API Gateway design with additional focus on OAuth 2.0 integration and microservice security boundaries",
37 | "priority": "critical",
38 | "status": "in_progress",
39 | "assignedTo": null,
40 | "urls": [],
41 | "tags": ["architecture", "api", "gateway", "security"],
42 | "completionRequirements": "Complete architecture diagram with data flow, scaling strategy, and disaster recovery considerations. Implementation specifications must include authentication flow and rate limiting algorithms",
43 | "outputFormat": "Architecture diagram (PDF), Technical specifications document (Markdown), Implementation roadmap",
44 | "taskType": "research",
45 | "createdAt": "2025-03-23T10:11:24.123Z",
46 | "updatedAt": "2025-03-23T10:14:51.456Z"
47 | }`,
48 | "Update task priority and add security details to an existing architecture design task",
49 | ),
50 | createToolExample(
51 | {
52 | mode: "bulk",
53 | tasks: [
54 | {
55 | id: "task_graphql_schema",
56 | updates: {
57 | status: "in_progress",
58 | assignedTo: "user_developer1",
59 | tags: ["graphql", "schema", "foundation", "priority"],
60 | },
61 | },
62 | {
63 | id: "task_auth",
64 | updates: {
65 | priority: "high",
66 | description:
67 | "Implement JWT-based authentication with refresh token rotation and resource-based authorization for GraphQL resolvers",
68 | },
69 | },
70 | ],
71 | },
72 | `{
73 | "success": true,
74 | "message": "Successfully updated 2 tasks",
75 | "updated": [
76 | {
77 | "id": "task_graphql_schema",
78 | "projectId": "proj_graphql",
79 | "title": "Set up GraphQL schema and resolver structure",
80 | "description": "Create the foundation for our GraphQL API by defining the base schema structure, resolver patterns, and integration with existing data sources",
81 | "priority": "high",
82 | "status": "in_progress",
83 | "assignedTo": "user_developer1",
84 | "urls": [],
85 | "tags": ["graphql", "schema", "foundation", "priority"],
86 | "completionRequirements": "Working schema structure with type definitions for core entities. Base resolver pattern implemented with at least one full query path to the database.",
87 | "outputFormat": "TypeScript code implementing the schema and resolvers with documentation",
88 | "taskType": "generation",
89 | "createdAt": "2025-03-23T10:11:24.123Z",
90 | "updatedAt": "2025-03-23T10:14:51.456Z"
91 | },
92 | {
93 | "id": "task_auth",
94 | "projectId": "proj_graphql",
95 | "title": "Implement authentication and authorization",
96 | "description": "Implement JWT-based authentication with refresh token rotation and resource-based authorization for GraphQL resolvers",
97 | "priority": "high",
98 | "status": "backlog",
99 | "assignedTo": null,
100 | "urls": [],
101 | "tags": ["auth", "security", "graphql"],
102 | "completionRequirements": "Authentication middleware and directive implemented. All resolvers protected with appropriate permission checks.",
103 | "outputFormat": "TypeScript code with tests demonstrating security controls",
104 | "taskType": "generation",
105 | "createdAt": "2025-03-23T10:11:24.456Z",
106 | "updatedAt": "2025-03-23T10:14:51.789Z"
107 | }
108 | ],
109 | "errors": []
110 | }`,
111 | "Assign a task to a developer and update the priority of a related dependency task",
112 | ),
113 | ],
114 | requiredPermission: "task:update",
115 | returnSchema: z.union([
116 | // Single task response
117 | z.object({
118 | id: z.string().describe("Task ID"),
119 | projectId: z.string().describe("Parent project ID"),
120 | title: z.string().describe("Task title"),
121 | description: z.string().describe("Task description"),
122 | priority: z
123 | .enum([
124 | PriorityLevel.LOW,
125 | PriorityLevel.MEDIUM,
126 | PriorityLevel.HIGH,
127 | PriorityLevel.CRITICAL,
128 | ])
129 | .describe("Importance level"),
130 | status: z
131 | .enum([
132 | TaskStatus.BACKLOG,
133 | TaskStatus.TODO,
134 | TaskStatus.IN_PROGRESS,
135 | TaskStatus.COMPLETED,
136 | ])
137 | .describe("Task status"),
138 | assignedTo: z
139 | .string()
140 | .nullable()
141 | .describe("ID of entity responsible for completion"),
142 | urls: z
143 | .array(
144 | z.object({
145 | title: z.string(),
146 | url: z.string(),
147 | }),
148 | )
149 | .describe("Reference materials"),
150 | tags: z.array(z.string()).describe("Organizational labels"),
151 | completionRequirements: z.string().describe("Completion criteria"),
152 | outputFormat: z.string().describe("Deliverable format"),
153 | taskType: z.string().describe("Task classification"),
154 | createdAt: z.string().describe("Creation timestamp"),
155 | updatedAt: z.string().describe("Last update timestamp"),
156 | }),
157 | // Bulk update response
158 | z.object({
159 | success: z.boolean().describe("Operation success status"),
160 | message: z.string().describe("Result message"),
161 | updated: z
162 | .array(
163 | z.object({
164 | id: z.string().describe("Task ID"),
165 | projectId: z.string().describe("Parent project ID"),
166 | title: z.string().describe("Task title"),
167 | description: z.string().describe("Task description"),
168 | priority: z
169 | .enum([
170 | PriorityLevel.LOW,
171 | PriorityLevel.MEDIUM,
172 | PriorityLevel.HIGH,
173 | PriorityLevel.CRITICAL,
174 | ])
175 | .describe("Importance level"),
176 | status: z
177 | .enum([
178 | TaskStatus.BACKLOG,
179 | TaskStatus.TODO,
180 | TaskStatus.IN_PROGRESS,
181 | TaskStatus.COMPLETED,
182 | ])
183 | .describe("Task status"),
184 | assignedTo: z
185 | .string()
186 | .nullable()
187 | .describe("ID of entity responsible for completion"),
188 | urls: z
189 | .array(
190 | z.object({
191 | title: z.string(),
192 | url: z.string(),
193 | }),
194 | )
195 | .describe("Reference materials"),
196 | tags: z.array(z.string()).describe("Organizational labels"),
197 | completionRequirements: z
198 | .string()
199 | .describe("Completion criteria"),
200 | outputFormat: z.string().describe("Deliverable format"),
201 | taskType: z.string().describe("Task classification"),
202 | createdAt: z.string().describe("Creation timestamp"),
203 | updatedAt: z.string().describe("Last update timestamp"),
204 | }),
205 | )
206 | .describe("Updated tasks"),
207 | errors: z
208 | .array(
209 | z.object({
210 | index: z.number().describe("Index in the tasks array"),
211 | task: z.any().describe("Original task update data"),
212 | error: z
213 | .object({
214 | code: z.string().describe("Error code"),
215 | message: z.string().describe("Error message"),
216 | details: z
217 | .any()
218 | .optional()
219 | .describe("Additional error details"),
220 | })
221 | .describe("Error information"),
222 | }),
223 | )
224 | .describe("Update errors"),
225 | }),
226 | ]),
227 | rateLimit: {
228 | windowMs: 60 * 1000, // 1 minute
229 | maxRequests: 15, // 15 requests per minute (either single or bulk)
230 | },
231 | }),
232 | );
233 | };
234 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_task_create/createTask.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { TaskService } from "../../../services/neo4j/taskService.js";
2 | import { ProjectService } from "../../../services/neo4j/projectService.js";
3 | import {
4 | BaseErrorCode,
5 | McpError,
6 | ProjectErrorCode,
7 | } from "../../../types/errors.js";
8 | import { ResponseFormat, createToolResponse } from "../../../types/mcp.js";
9 | import { logger, requestContextService } from "../../../utils/index.js"; // Import requestContextService
10 | import { ToolContext } from "../../../types/tool.js";
11 | import { AtlasTaskCreateInput, AtlasTaskCreateSchema } from "./types.js";
12 | import { formatTaskCreateResponse } from "./responseFormat.js";
13 |
14 | export const atlasCreateTask = async (input: unknown, context: ToolContext) => {
15 | let validatedInput: AtlasTaskCreateInput | undefined;
16 | const reqContext =
17 | context.requestContext ??
18 | requestContextService.createRequestContext({ toolName: "atlasCreateTask" });
19 |
20 | try {
21 | // Parse and validate input against schema
22 | validatedInput = AtlasTaskCreateSchema.parse(input);
23 |
24 | // Handle single vs bulk task creation based on mode
25 | if (validatedInput.mode === "bulk") {
26 | // Execute bulk creation operation
27 | logger.info("Initializing multiple tasks", {
28 | ...reqContext,
29 | count: validatedInput.tasks.length,
30 | });
31 |
32 | const results = {
33 | success: true,
34 | message: `Successfully created ${validatedInput.tasks.length} tasks`,
35 | created: [] as any[],
36 | errors: [] as any[],
37 | };
38 |
39 | // Process each task sequentially to maintain consistency
40 | for (let i = 0; i < validatedInput.tasks.length; i++) {
41 | const taskData = validatedInput.tasks[i];
42 | try {
43 | // Verify project exists before creating task
44 | const projectExists = await ProjectService.getProjectById(
45 | taskData.projectId,
46 | );
47 | if (!projectExists) {
48 | throw new McpError(
49 | ProjectErrorCode.PROJECT_NOT_FOUND,
50 | `Project with ID ${taskData.projectId} not found`,
51 | { projectId: taskData.projectId },
52 | );
53 | }
54 |
55 | const createdTask = await TaskService.createTask({
56 | projectId: taskData.projectId,
57 | title: taskData.title,
58 | description: taskData.description,
59 | priority: taskData.priority || "medium",
60 | status: taskData.status || "todo",
61 | assignedTo: taskData.assignedTo,
62 | urls: taskData.urls || [],
63 | tags: taskData.tags || [],
64 | completionRequirements: taskData.completionRequirements,
65 | outputFormat: taskData.outputFormat,
66 | taskType: taskData.taskType,
67 | id: taskData.id, // Use client-provided ID if available
68 | });
69 |
70 | results.created.push(createdTask);
71 |
72 | // Create dependency relationships if specified
73 | if (taskData.dependencies && taskData.dependencies.length > 0) {
74 | for (const dependencyId of taskData.dependencies) {
75 | try {
76 | await TaskService.addTaskDependency(
77 | createdTask.id,
78 | dependencyId,
79 | );
80 | } catch (error) {
81 | const depErrorContext =
82 | requestContextService.createRequestContext({
83 | ...reqContext,
84 | originalErrorMessage:
85 | error instanceof Error ? error.message : String(error),
86 | originalErrorStack:
87 | error instanceof Error ? error.stack : undefined,
88 | taskId: createdTask.id,
89 | dependencyIdAttempted: dependencyId,
90 | });
91 | logger.warning(
92 | `Failed to create dependency for task ${createdTask.id} to ${dependencyId}`,
93 | depErrorContext,
94 | );
95 | }
96 | }
97 | }
98 | } catch (error) {
99 | results.success = false;
100 | results.errors.push({
101 | index: i,
102 | task: taskData,
103 | error: {
104 | code:
105 | error instanceof McpError
106 | ? error.code
107 | : BaseErrorCode.INTERNAL_ERROR,
108 | message: error instanceof Error ? error.message : "Unknown error",
109 | details: error instanceof McpError ? error.details : undefined,
110 | },
111 | });
112 | }
113 | }
114 |
115 | if (results.errors.length > 0) {
116 | results.message = `Created ${results.created.length} of ${validatedInput.tasks.length} tasks with ${results.errors.length} errors`;
117 | }
118 |
119 | logger.info("Bulk task initialization completed", {
120 | ...reqContext,
121 | successCount: results.created.length,
122 | errorCount: results.errors.length,
123 | taskIds: results.created.map((t) => t.id),
124 | });
125 |
126 | // Conditionally format response
127 | if (validatedInput.responseFormat === ResponseFormat.JSON) {
128 | const mappedCreatedTasks = results.created.map((t) => {
129 | const { assignedToUserId, ...restOfTask } = t; // t will have assignedToUserId from service
130 | return { ...restOfTask, assignedTo: assignedToUserId || undefined };
131 | });
132 | const responsePayload = {
133 | ...results,
134 | created: mappedCreatedTasks,
135 | };
136 | return createToolResponse(JSON.stringify(responsePayload, null, 2));
137 | } else {
138 | // Assuming formatTaskCreateResponse can handle the raw 'results' or we map similarly
139 | const mappedCreatedTasksForFormatting = results.created.map((t) => {
140 | const { assignedToUserId, ...restOfTask } = t;
141 | return { ...restOfTask, assignedTo: assignedToUserId || undefined };
142 | });
143 | const formattedResponsePayload = {
144 | ...results,
145 | created: mappedCreatedTasksForFormatting,
146 | };
147 | return formatTaskCreateResponse(formattedResponsePayload);
148 | }
149 | } else {
150 | // Process single task creation
151 | const {
152 | mode,
153 | id,
154 | projectId,
155 | title,
156 | description,
157 | priority,
158 | status,
159 | assignedTo,
160 | urls,
161 | tags,
162 | completionRequirements,
163 | dependencies,
164 | outputFormat,
165 | taskType,
166 | } = validatedInput;
167 |
168 | logger.info("Initializing new task", {
169 | ...reqContext,
170 | title,
171 | projectId,
172 | });
173 |
174 | // Verify project exists
175 | const projectExists = await ProjectService.getProjectById(projectId);
176 | if (!projectExists) {
177 | throw new McpError(
178 | ProjectErrorCode.PROJECT_NOT_FOUND,
179 | `Project with ID ${projectId} not found`,
180 | { projectId },
181 | );
182 | }
183 |
184 | const task = await TaskService.createTask({
185 | id, // Use client-provided ID if available
186 | projectId,
187 | title,
188 | description,
189 | priority: priority || "medium",
190 | status: status || "todo",
191 | assignedTo,
192 | urls: urls || [],
193 | tags: tags || [],
194 | completionRequirements,
195 | outputFormat,
196 | taskType,
197 | });
198 |
199 | // Create dependency relationships if specified
200 | if (dependencies && dependencies.length > 0) {
201 | for (const dependencyId of dependencies) {
202 | try {
203 | await TaskService.addTaskDependency(task.id, dependencyId);
204 | } catch (error) {
205 | const depErrorContext = requestContextService.createRequestContext({
206 | ...reqContext,
207 | originalErrorMessage:
208 | error instanceof Error ? error.message : String(error),
209 | originalErrorStack:
210 | error instanceof Error ? error.stack : undefined,
211 | taskId: task.id,
212 | dependencyIdAttempted: dependencyId,
213 | });
214 | logger.warning(
215 | `Failed to create dependency for task ${task.id} to ${dependencyId}`,
216 | depErrorContext,
217 | );
218 | }
219 | }
220 | }
221 |
222 | logger.info("Task initialized successfully", {
223 | ...reqContext,
224 | taskId: task.id,
225 | projectId,
226 | });
227 |
228 | // Conditionally format response
229 | if (validatedInput.responseFormat === ResponseFormat.JSON) {
230 | const { assignedToUserId, ...restOfTask } = task; // task from service has assignedToUserId
231 | const responsePayload = {
232 | ...restOfTask,
233 | assignedTo: assignedToUserId || undefined,
234 | };
235 | return createToolResponse(JSON.stringify(responsePayload, null, 2));
236 | } else {
237 | const { assignedToUserId, ...restOfTask } = task;
238 | const formattedResponsePayload = {
239 | ...restOfTask,
240 | assignedTo: assignedToUserId || undefined,
241 | };
242 | return formatTaskCreateResponse(formattedResponsePayload);
243 | }
244 | }
245 | } catch (error) {
246 | // Handle specific error cases
247 | if (error instanceof McpError) {
248 | throw error;
249 | }
250 |
251 | logger.error("Failed to initialize task(s)", error as Error, {
252 | ...reqContext,
253 | inputReceived: validatedInput ?? input,
254 | });
255 |
256 | // Handle duplicate name error specifically
257 | if (error instanceof Error && error.message.includes("duplicate")) {
258 | throw new McpError(
259 | ProjectErrorCode.DUPLICATE_NAME,
260 | `A task with this title already exists in the project`,
261 | {
262 | title:
263 | validatedInput?.mode === "single"
264 | ? validatedInput?.title
265 | : validatedInput?.tasks?.[0]?.title,
266 | projectId:
267 | validatedInput?.mode === "single"
268 | ? validatedInput?.projectId
269 | : validatedInput?.tasks?.[0]?.projectId,
270 | },
271 | );
272 | }
273 |
274 | // Convert other errors to McpError
275 | throw new McpError(
276 | BaseErrorCode.INTERNAL_ERROR,
277 | `Error creating task(s): ${error instanceof Error ? error.message : "Unknown error"}`,
278 | );
279 | }
280 | };
281 |
```
--------------------------------------------------------------------------------
/scripts/tree.ts:
--------------------------------------------------------------------------------
```typescript
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * @fileoverview Generates a visual tree representation of the project's directory structure.
5 | * @module scripts/tree
6 | * Respects .gitignore patterns and common exclusions (e.g., node_modules).
7 | * Saves the tree to a markdown file (default: docs/tree.md).
8 | * Supports custom output path and depth limitation.
9 | * Ensures all file operations are within the project root for security.
10 | *
11 | * @example
12 | * // Generate tree with default settings:
13 | * // npm run tree
14 | *
15 | * @example
16 | * // Specify custom output path and depth:
17 | * // ts-node --esm scripts/tree.ts ./documentation/structure.md --depth=3
18 | */
19 |
20 | import fs from "fs/promises";
21 | import ignore from "ignore"; // Import the 'ignore' library
22 | import path from "path";
23 |
24 | // Get the type of the instance returned by ignore()
25 | type Ignore = ReturnType<typeof ignore>;
26 |
27 | const projectRoot = process.cwd();
28 | let outputPathArg = "docs/tree.md"; // Default output path
29 | let maxDepthArg = Infinity;
30 |
31 | const args = process.argv.slice(2);
32 | if (args.includes("--help")) {
33 | console.log(`
34 | Generate Tree - Project directory structure visualization tool
35 |
36 | Usage:
37 | ts-node --esm scripts/tree.ts [output-path] [--depth=<number>] [--help]
38 |
39 | Options:
40 | output-path Custom file path for the tree output (relative to project root, default: docs/tree.md)
41 | --depth=<number> Maximum directory depth to display (default: unlimited)
42 | --help Show this help message
43 | `);
44 | process.exit(0);
45 | }
46 |
47 | args.forEach((arg) => {
48 | if (arg.startsWith("--depth=")) {
49 | const depthValue = parseInt(arg.split("=")[1], 10);
50 | if (!isNaN(depthValue) && depthValue >= 0) {
51 | maxDepthArg = depthValue;
52 | } else {
53 | console.warn(`Invalid depth value: "${arg}". Using unlimited depth.`);
54 | }
55 | } else if (!arg.startsWith("--")) {
56 | outputPathArg = arg;
57 | }
58 | });
59 |
60 | const DEFAULT_IGNORE_PATTERNS: string[] = [
61 | ".git",
62 | "node_modules",
63 | ".DS_Store",
64 | "dist",
65 | "build",
66 | "logs",
67 | ];
68 |
69 | /**
70 | * Loads and parses patterns from the .gitignore file at the project root,
71 | * and combines them with default ignore patterns.
72 | * @returns A promise resolving to an Ignore instance from the 'ignore' library.
73 | */
74 | async function loadIgnoreHandler(): Promise<Ignore> {
75 | const ig = ignore();
76 | ig.add(DEFAULT_IGNORE_PATTERNS); // Add default patterns first
77 |
78 | const gitignorePath = path.join(projectRoot, ".gitignore");
79 | try {
80 | // Security: Ensure we read only from within the project root
81 | if (!path.resolve(gitignorePath).startsWith(projectRoot + path.sep)) {
82 | console.warn(
83 | "Warning: Attempted to read .gitignore outside project root. Using default ignore patterns only.",
84 | );
85 | return ig;
86 | }
87 | const gitignoreContent = await fs.readFile(gitignorePath, "utf-8");
88 | ig.add(gitignoreContent); // Add patterns from .gitignore file
89 | } catch (error: any) {
90 | if (error.code === "ENOENT") {
91 | console.warn(
92 | "Info: No .gitignore file found at project root. Using default ignore patterns only.",
93 | );
94 | } else {
95 | console.error(`Error reading .gitignore: ${error.message}`);
96 | }
97 | }
98 | return ig;
99 | }
100 |
101 | /**
102 | * Checks if a given path should be ignored.
103 | * @param entryPath - The absolute path to the file or directory entry.
104 | * @param ig - An Ignore instance from the 'ignore' library.
105 | * @returns True if the path should be ignored, false otherwise.
106 | */
107 | function isIgnored(entryPath: string, ig: Ignore): boolean {
108 | const relativePath = path.relative(projectRoot, entryPath);
109 | // The 'ignore' library expects POSIX-style paths (with /) even on Windows
110 | const posixRelativePath = relativePath.split(path.sep).join(path.posix.sep);
111 | return ig.ignores(posixRelativePath);
112 | }
113 |
114 | /**
115 | * Recursively generates a string representation of the directory tree.
116 | * @param dir - The absolute path of the directory to traverse.
117 | * @param ig - An Ignore instance.
118 | * @param prefix - String prefix for formatting the tree lines.
119 | * @param currentDepth - Current depth of traversal.
120 | * @returns A promise resolving to the tree string.
121 | */
122 | async function generateTree(
123 | dir: string,
124 | ig: Ignore,
125 | prefix = "",
126 | currentDepth = 0,
127 | ): Promise<string> {
128 | const resolvedDir = path.resolve(dir);
129 | if (
130 | !resolvedDir.startsWith(projectRoot + path.sep) &&
131 | resolvedDir !== projectRoot
132 | ) {
133 | console.warn(
134 | `Security: Skipping directory outside project root: ${resolvedDir}`,
135 | );
136 | return "";
137 | }
138 |
139 | if (currentDepth > maxDepthArg) {
140 | return "";
141 | }
142 |
143 | let entries;
144 | try {
145 | entries = await fs.readdir(resolvedDir, { withFileTypes: true });
146 | } catch (error: any) {
147 | console.error(`Error reading directory ${resolvedDir}: ${error.message}`);
148 | return "";
149 | }
150 |
151 | let output = "";
152 | const filteredEntries = entries
153 | .filter((entry) => !isIgnored(path.join(resolvedDir, entry.name), ig))
154 | .sort((a, b) => {
155 | if (a.isDirectory() && !b.isDirectory()) return -1;
156 | if (!a.isDirectory() && b.isDirectory()) return 1;
157 | return a.name.localeCompare(b.name);
158 | });
159 |
160 | for (let i = 0; i < filteredEntries.length; i++) {
161 | const entry = filteredEntries[i];
162 | const isLastEntry = i === filteredEntries.length - 1;
163 | const connector = isLastEntry ? "└── " : "├── ";
164 | const newPrefix = prefix + (isLastEntry ? " " : "│ ");
165 |
166 | output += prefix + connector + entry.name + "\n";
167 |
168 | if (entry.isDirectory()) {
169 | output += await generateTree(
170 | path.join(resolvedDir, entry.name),
171 | ig,
172 | newPrefix,
173 | currentDepth + 1,
174 | );
175 | }
176 | }
177 | return output;
178 | }
179 |
180 | /**
181 | * Main function to orchestrate loading ignore patterns, generating the tree,
182 | * and writing it to the specified output file.
183 | */
184 | const writeTreeToFile = async (): Promise<void> => {
185 | try {
186 | const projectName = path.basename(projectRoot);
187 | const ignoreHandler = await loadIgnoreHandler(); // Get the Ignore instance
188 | const resolvedOutputFile = path.resolve(projectRoot, outputPathArg);
189 |
190 | // Security Validation for Output Path
191 | if (!resolvedOutputFile.startsWith(projectRoot + path.sep)) {
192 | console.error(
193 | `Error: Output path "${outputPathArg}" resolves outside the project directory: ${resolvedOutputFile}. Aborting.`,
194 | );
195 | process.exit(1);
196 | }
197 | const resolvedOutputDir = path.dirname(resolvedOutputFile);
198 | if (
199 | !resolvedOutputDir.startsWith(projectRoot + path.sep) &&
200 | resolvedOutputDir !== projectRoot
201 | ) {
202 | console.error(
203 | `Error: Output directory "${resolvedOutputDir}" is outside the project directory. Aborting.`,
204 | );
205 | process.exit(1);
206 | }
207 |
208 | console.log(`Generating directory tree for project: ${projectName}`);
209 | console.log(`Output will be saved to: ${resolvedOutputFile}`);
210 | if (maxDepthArg !== Infinity) {
211 | console.log(`Maximum depth set to: ${maxDepthArg}`);
212 | }
213 |
214 | const newGeneratedTreeContent = await generateTree(
215 | projectRoot,
216 | ignoreHandler,
217 | "",
218 | 0,
219 | ); // Pass the Ignore instance
220 |
221 | let existingRawTreeContent: string | null = null;
222 | try {
223 | const currentFileContent = await fs.readFile(resolvedOutputFile, "utf-8");
224 |
225 | // Escape projectName for use in regex
226 | const escapedProjectName = projectName.replace(
227 | /[.*+?^${}()|[\]\\]/g,
228 | "\\$&",
229 | );
230 |
231 | // Regex to find the tree block:
232 | // Matches ``` (optional language specifier) \n
233 | // then projectName \n
234 | // then captures the content (non-greedy)
235 | // until it finds \n``` at the end of a line in the code block
236 | const treeBlockRegex = new RegExp(
237 | `^\\s*\`\`\`(?:[^\\n]*)\\n${escapedProjectName}\\n([\\s\\S]*?)\\n\`\`\`\\s*$`,
238 | "m",
239 | );
240 |
241 | const match = currentFileContent.match(treeBlockRegex);
242 | if (match && typeof match[1] === "string") {
243 | existingRawTreeContent = match[1];
244 | }
245 | } catch (error: any) {
246 | if (error.code !== "ENOENT") {
247 | // ENOENT (file not found) is expected if the file hasn't been created yet.
248 | console.warn(
249 | `Warning: Could not read existing output file ("${resolvedOutputFile}") for comparison: ${error.message}`,
250 | );
251 | }
252 | // If file doesn't exist or is unreadable, existingRawTreeContent remains null,
253 | // which will trigger a write operation.
254 | }
255 |
256 | // Normalize line endings for comparison (Git might change LF to CRLF on Windows)
257 | const normalize = (str: string | null) =>
258 | str?.replace(/\r\n/g, "\n") ?? null;
259 |
260 | if (
261 | normalize(existingRawTreeContent) === normalize(newGeneratedTreeContent)
262 | ) {
263 | console.log(
264 | `Directory structure is unchanged. Output file not updated: ${resolvedOutputFile}`,
265 | );
266 | } else {
267 | // Content has changed, or file is new/unreadable; proceed to write.
268 | // Ensure the output directory exists. fs.mkdir with recursive:true will create it if it doesn't exist,
269 | // and will not throw an error if it already exists.
270 | await fs.mkdir(resolvedOutputDir, { recursive: true });
271 |
272 | const timestamp = new Date()
273 | .toISOString()
274 | .replace(/T/, " ")
275 | .replace(/\..+/, "");
276 | const fileHeader = `# ${projectName} - Directory Structure\n\nGenerated on: ${timestamp}\n`;
277 | const depthInfo =
278 | maxDepthArg !== Infinity
279 | ? `\n_Depth limited to ${maxDepthArg} levels_\n\n`
280 | : "\n";
281 | // Use the newly generated tree content for the output
282 | const treeBlock = `\`\`\`\n${projectName}\n${newGeneratedTreeContent}\`\`\`\n`;
283 | const fileFooter = `\n_Note: This tree excludes files and directories matched by .gitignore and default patterns._\n`;
284 | const finalContent = fileHeader + depthInfo + treeBlock + fileFooter;
285 |
286 | await fs.writeFile(resolvedOutputFile, finalContent);
287 | console.log(
288 | `Successfully generated and updated tree structure in: ${resolvedOutputFile}`,
289 | );
290 | }
291 | } catch (error) {
292 | console.error(
293 | `Error generating tree: ${error instanceof Error ? error.message : String(error)}`,
294 | );
295 | process.exit(1);
296 | }
297 | };
298 |
299 | writeTreeToFile();
300 |
```
--------------------------------------------------------------------------------
/src/services/neo4j/searchService/fullTextSearchLogic.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * @fileoverview Implements the full-text search logic for Neo4j entities.
3 | * @module src/services/neo4j/searchService/fullTextSearchLogic
4 | */
5 |
6 | import { Session } from "neo4j-driver";
7 | import { logger, requestContextService } from "../../../utils/index.js";
8 | import { neo4jDriver } from "../driver.js";
9 | import {
10 | NodeLabels,
11 | PaginatedResult,
12 | RelationshipTypes,
13 | SearchOptions,
14 | } from "../types.js";
15 | import { Neo4jUtils } from "../utils.js";
16 | import { SearchResultItem } from "./searchTypes.js";
17 |
18 | /**
19 | * Perform a full-text search across multiple entity types.
20 | * @param searchValue The string to search for.
21 | * @param options Search options, excluding those not relevant to full-text search.
22 | * @returns Paginated search results.
23 | */
24 | export async function _fullTextSearch(
25 | searchValue: string,
26 | options: Omit<
27 | SearchOptions,
28 | "value" | "fuzzy" | "caseInsensitive" | "property" | "assignedToUserId"
29 | > = {},
30 | ): Promise<PaginatedResult<SearchResultItem>> {
31 | const reqContext_fullText = requestContextService.createRequestContext({
32 | operation: "SearchService._fullTextSearch", // Updated operation name
33 | searchValue,
34 | searchOptions: options,
35 | });
36 | try {
37 | const rawEntityTypes = options.entityTypes;
38 | const taskType = options.taskType;
39 | const page = options.page || 1;
40 | const limit = options.limit || 20;
41 |
42 | const defaultEntityTypesList = ["project", "task", "knowledge"];
43 | const typesToUse =
44 | rawEntityTypes &&
45 | Array.isArray(rawEntityTypes) &&
46 | rawEntityTypes.length > 0
47 | ? rawEntityTypes
48 | : defaultEntityTypesList;
49 |
50 | if (!searchValue || searchValue.trim() === "") {
51 | throw new Error("Search value cannot be empty");
52 | }
53 |
54 | const targetLabels = typesToUse.map((l) => l.toLowerCase());
55 |
56 | const searchResults: SearchResultItem[] = [];
57 |
58 | if (targetLabels.includes("project")) {
59 | let projectSession: Session | null = null;
60 | try {
61 | projectSession = await neo4jDriver.getSession();
62 | const query = `
63 | CALL db.index.fulltext.queryNodes("project_fulltext", $searchValue)
64 | YIELD node AS p, score
65 | ${taskType ? "WHERE p.taskType = $taskType" : ""}
66 | RETURN
67 | p.id AS id, 'project' AS type, p.taskType AS entityType,
68 | p.name AS title, p.description AS description,
69 | 'full-text' AS matchedProperty,
70 | CASE
71 | WHEN score > 2 THEN p.name
72 | WHEN size(toString(p.description)) > 100 THEN left(toString(p.description), 100) + '...'
73 | ELSE toString(p.description)
74 | END AS matchedValue,
75 | p.createdAt AS createdAt, p.updatedAt AS updatedAt,
76 | p.id as projectId,
77 | p.name as projectName,
78 | score * 2 AS adjustedScore
79 | `;
80 | await projectSession.executeRead(async (tx) => {
81 | const result = await tx.run(query, {
82 | searchValue,
83 | ...(taskType && { taskType }),
84 | });
85 | const items = result.records.map((record) => {
86 | const data = record.toObject();
87 | const scoreValue = data.adjustedScore;
88 | const score = typeof scoreValue === "number" ? scoreValue : 5;
89 | return {
90 | ...data,
91 | score,
92 | description:
93 | typeof data.description === "string"
94 | ? data.description
95 | : undefined,
96 | entityType: data.entityType || undefined,
97 | createdAt: data.createdAt || undefined,
98 | updatedAt: data.updatedAt || undefined,
99 | projectId: data.projectId || undefined,
100 | projectName: data.projectName || undefined,
101 | } as SearchResultItem;
102 | });
103 | searchResults.push(...items);
104 | });
105 | } catch (err) {
106 | logger.error(
107 | "Error during project full-text search query",
108 | err as Error,
109 | {
110 | ...reqContext_fullText,
111 | targetLabel: "project",
112 | detail: (err as Error).message,
113 | },
114 | );
115 | } finally {
116 | if (projectSession) await projectSession.close();
117 | }
118 | }
119 |
120 | if (targetLabels.includes("task")) {
121 | let taskSession: Session | null = null;
122 | try {
123 | taskSession = await neo4jDriver.getSession();
124 | const query = `
125 | CALL db.index.fulltext.queryNodes("task_fulltext", $searchValue)
126 | YIELD node AS t, score
127 | ${taskType ? "WHERE t.taskType = $taskType" : ""}
128 | MATCH (p:${NodeLabels.Project} {id: t.projectId})
129 | RETURN
130 | t.id AS id, 'task' AS type, t.taskType AS entityType,
131 | t.title AS title, t.description AS description,
132 | 'full-text' AS matchedProperty,
133 | CASE
134 | WHEN score > 2 THEN t.title
135 | WHEN size(toString(t.description)) > 100 THEN left(toString(t.description), 100) + '...'
136 | ELSE toString(t.description)
137 | END AS matchedValue,
138 | t.createdAt AS createdAt, t.updatedAt AS updatedAt,
139 | t.projectId AS projectId, p.name AS projectName,
140 | score * 1.5 AS adjustedScore
141 | `;
142 | await taskSession.executeRead(async (tx) => {
143 | const result = await tx.run(query, {
144 | searchValue,
145 | ...(taskType && { taskType }),
146 | });
147 | const items = result.records.map((record) => {
148 | const data = record.toObject();
149 | const scoreValue = data.adjustedScore;
150 | const score = typeof scoreValue === "number" ? scoreValue : 5;
151 | return {
152 | ...data,
153 | score,
154 | description:
155 | typeof data.description === "string"
156 | ? data.description
157 | : undefined,
158 | entityType: data.entityType || undefined,
159 | createdAt: data.createdAt || undefined,
160 | updatedAt: data.updatedAt || undefined,
161 | projectId: data.projectId || undefined,
162 | projectName: data.projectName || undefined,
163 | } as SearchResultItem;
164 | });
165 | searchResults.push(...items);
166 | });
167 | } catch (err) {
168 | logger.error("Error during task full-text search query", err as Error, {
169 | ...reqContext_fullText,
170 | targetLabel: "task",
171 | detail: (err as Error).message,
172 | });
173 | } finally {
174 | if (taskSession) await taskSession.close();
175 | }
176 | }
177 |
178 | if (targetLabels.includes("knowledge")) {
179 | let knowledgeSession: Session | null = null;
180 | try {
181 | knowledgeSession = await neo4jDriver.getSession();
182 | const query = `
183 | CALL db.index.fulltext.queryNodes("knowledge_fulltext", $searchValue)
184 | YIELD node AS k, score
185 | MATCH (p:${NodeLabels.Project} {id: k.projectId})
186 | OPTIONAL MATCH (k)-[:${RelationshipTypes.BELONGS_TO_DOMAIN}]->(d:${NodeLabels.Domain})
187 | RETURN
188 | k.id AS id, 'knowledge' AS type, d.name AS entityType,
189 | CASE
190 | WHEN k.text IS NULL THEN 'Untitled Knowledge'
191 | WHEN size(toString(k.text)) <= 50 THEN toString(k.text)
192 | ELSE substring(toString(k.text), 0, 50) + '...'
193 | END AS title,
194 | k.text AS description,
195 | 'text' AS matchedProperty,
196 | CASE
197 | WHEN size(toString(k.text)) > 100 THEN left(toString(k.text), 100) + '...'
198 | ELSE toString(k.text)
199 | END AS matchedValue,
200 | k.createdAt AS createdAt, k.updatedAt AS updatedAt,
201 | k.projectId AS projectId, p.name AS projectName,
202 | score AS adjustedScore
203 | `;
204 | await knowledgeSession.executeRead(async (tx) => {
205 | const result = await tx.run(query, { searchValue });
206 | const items = result.records.map((record) => {
207 | const data = record.toObject();
208 | const scoreValue = data.adjustedScore;
209 | const score = typeof scoreValue === "number" ? scoreValue : 5;
210 | return {
211 | ...data,
212 | score,
213 | description:
214 | typeof data.description === "string"
215 | ? data.description
216 | : undefined,
217 | entityType: data.entityType || undefined,
218 | createdAt: data.createdAt || undefined,
219 | updatedAt: data.updatedAt || undefined,
220 | projectId: data.projectId || undefined,
221 | projectName: data.projectName || undefined,
222 | } as SearchResultItem;
223 | });
224 | searchResults.push(...items);
225 | });
226 | } catch (err) {
227 | logger.error(
228 | "Error during knowledge full-text search query",
229 | err as Error,
230 | {
231 | ...reqContext_fullText,
232 | targetLabel: "knowledge",
233 | detail: (err as Error).message,
234 | },
235 | );
236 | } finally {
237 | if (knowledgeSession) await knowledgeSession.close();
238 | }
239 | }
240 |
241 | searchResults.sort((a, b) => {
242 | if (b.score !== a.score) return b.score - a.score;
243 | const dateA = a.updatedAt || a.createdAt || "1970-01-01T00:00:00.000Z";
244 | const dateB = b.updatedAt || b.createdAt || "1970-01-01T00:00:00.000Z";
245 | return new Date(dateB).getTime() - new Date(dateA).getTime();
246 | });
247 |
248 | return Neo4jUtils.paginateResults(searchResults, { page, limit });
249 | } catch (error) {
250 | const errorMessage = error instanceof Error ? error.message : String(error);
251 | logger.error("Error performing full-text search", error as Error, {
252 | ...reqContext_fullText,
253 | detail: errorMessage,
254 | });
255 | if (errorMessage.includes("Unable to find index")) {
256 | logger.warning(
257 | "Full-text index might not be configured correctly or supported in this Neo4j version.",
258 | { ...reqContext_fullText, detail: "Index not found warning" },
259 | );
260 | throw new Error(
261 | `Full-text search failed: Index not found or query error. (${errorMessage})`,
262 | );
263 | }
264 | throw error;
265 | }
266 | }
267 |
```
--------------------------------------------------------------------------------
/src/services/neo4j/helpers.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { randomUUID } from "crypto";
2 | import neo4j from "neo4j-driver"; // Import the neo4j driver
3 | import { NodeLabels } from "./types.js"; // Import NodeLabels
4 | import { Neo4jUtils } from "./utils.js"; // Import Neo4jUtils
5 |
6 | /**
7 | * Helper functions for the Neo4j service
8 | */
9 |
10 | /**
11 | * Generate a unique ID string
12 | * @returns A unique string ID (without hyphens)
13 | */
14 | export function generateId(): string {
15 | return randomUUID().replace(/-/g, "");
16 | }
17 |
18 | /**
19 | * Escapes a relationship type string for safe use in Cypher queries.
20 | * It wraps the type in backticks and escapes any existing backticks within the type string.
21 | * @param type The relationship type string to escape.
22 | * @returns The escaped relationship type string.
23 | */
24 | export const escapeRelationshipType = (type: string): string => {
25 | // Backtick the type name and escape any backticks within the name itself.
26 | return `\`${type.replace(/`/g, "``")}\``;
27 | };
28 |
29 | /**
30 | * Generate a timestamped ID with an optional prefix
31 | * @param prefix Optional prefix for the ID
32 | * @returns A unique ID with timestamp and random component
33 | */
34 | export function generateTimestampedId(prefix?: string): string {
35 | const timestamp = Date.now().toString(36);
36 | const random = Math.random().toString(36).substring(2, 10);
37 | return prefix ? `${prefix}_${timestamp}${random}` : `${timestamp}${random}`;
38 | }
39 |
40 | // Removed unused toNeo4jParams function
41 |
42 | /**
43 | * Build a Neo4j update query dynamically based on provided fields
44 | * @param nodeLabel Neo4j node label
45 | * @param identifier Node identifier in the query (e.g., 'n')
46 | * @param updates Updates to apply
47 | * @returns Object with setClauses and params
48 | */
49 | export function buildUpdateQuery(
50 | nodeLabel: string, // Keep nodeLabel for potential future use or context
51 | identifier: string,
52 | updates: Record<string, any>,
53 | ): { setClauses: string[]; params: Record<string, any> } {
54 | const params: Record<string, any> = {};
55 | const setClauses: string[] = [];
56 |
57 | // Add update timestamp automatically
58 | const now = new Date().toISOString();
59 | params.updatedAt = now;
60 | setClauses.push(`${identifier}.updatedAt = $updatedAt`);
61 |
62 | // Add update clauses for each provided field in the updates object
63 | for (const [key, value] of Object.entries(updates)) {
64 | // Ensure we don't try to overwrite the id or createdAt
65 | if (key !== "id" && key !== "createdAt" && value !== undefined) {
66 | params[key] = value;
67 | setClauses.push(`${identifier}.${key} = $${key}`);
68 | }
69 | }
70 |
71 | return { setClauses, params };
72 | }
73 |
74 | /**
75 | * Interface for filter options used in buildListQuery
76 | */
77 | interface ListQueryFilterOptions {
78 | projectId?: string; // Always required for Task/Knowledge, handled in MATCH
79 | status?: string | string[];
80 | priority?: string | string[];
81 | assignedTo?: string; // Requires specific MATCH clause handling
82 | taskType?: string;
83 | tags?: string[];
84 | domain?: string; // Requires specific MATCH clause handling
85 | search?: string; // Requires specific WHERE clause handling (e.g., regex or full-text)
86 | // Add other potential filters here
87 | }
88 |
89 | /**
90 | * Interface for pagination and sorting options used in buildListQuery
91 | */
92 | interface ListQueryPaginationOptions {
93 | sortBy?: string;
94 | sortDirection?: "asc" | "desc";
95 | page?: number;
96 | limit?: number;
97 | }
98 |
99 | /**
100 | * Interface for the result of buildListQuery
101 | */
102 | interface ListQueryResult {
103 | countQuery: string;
104 | dataQuery: string;
105 | params: Record<string, any>;
106 | }
107 |
108 | /**
109 | * Builds dynamic Cypher queries for listing entities with filtering, sorting, and pagination.
110 | *
111 | * @param label The primary node label (e.g., NodeLabels.Task, NodeLabels.Knowledge)
112 | * @param returnProperties An array of properties or expressions to return for the data query (e.g., ['t.id as id', 'u.name as userName'])
113 | * @param filters Filter options based on ListQueryFilterOptions
114 | * @param pagination Pagination and sorting options based on ListQueryPaginationOptions
115 | * @param nodeAlias Alias for the primary node in the query (default: 'n')
116 | * @param additionalMatchClauses Optional string containing additional MATCH or OPTIONAL MATCH clauses (e.g., for relationships like assigned user or domain)
117 | * @returns ListQueryResult containing the count query, data query, and parameters
118 | */
119 | export function buildListQuery(
120 | label: NodeLabels,
121 | returnProperties: string[],
122 | filters: ListQueryFilterOptions,
123 | pagination: ListQueryPaginationOptions,
124 | nodeAlias: string = "n",
125 | additionalMatchClauses: string = "",
126 | ): ListQueryResult {
127 | const params: Record<string, any> = {};
128 | let conditions: string[] = [];
129 |
130 | // --- Base MATCH Clause ---
131 | // projectId is handled directly in the MATCH for Task and Knowledge
132 | let projectIdFilter = "";
133 | // Only add projectId filter if it's provided and not the wildcard '*'
134 | if (filters.projectId && filters.projectId !== "*") {
135 | projectIdFilter = `{projectId: $projectId}`;
136 | params.projectId = filters.projectId;
137 | }
138 | let baseMatch = `MATCH (${nodeAlias}:${label} ${projectIdFilter})`;
139 |
140 | // --- Additional MATCH Clauses (Relationships) ---
141 | // Add user-provided MATCH/OPTIONAL MATCH clauses
142 | const fullMatchClause = `${baseMatch}\n${additionalMatchClauses}`;
143 |
144 | // --- WHERE Clause Conditions ---
145 | // Add assignedTo to params if it's part of the filters and used in additionalMatchClauses
146 | if (filters.assignedTo) {
147 | params.assignedTo = filters.assignedTo;
148 | }
149 |
150 | // Status filter
151 | if (filters.status) {
152 | if (Array.isArray(filters.status) && filters.status.length > 0) {
153 | params.statusList = filters.status;
154 | conditions.push(`${nodeAlias}.status IN $statusList`);
155 | } else if (typeof filters.status === "string") {
156 | params.status = filters.status;
157 | conditions.push(`${nodeAlias}.status = $status`);
158 | }
159 | }
160 | // Priority filter (assuming it applies to the primary node)
161 | if (filters.priority) {
162 | if (Array.isArray(filters.priority) && filters.priority.length > 0) {
163 | params.priorityList = filters.priority;
164 | conditions.push(`${nodeAlias}.priority IN $priorityList`);
165 | } else if (typeof filters.priority === "string") {
166 | params.priority = filters.priority;
167 | conditions.push(`${nodeAlias}.priority = $priority`);
168 | }
169 | }
170 | // TaskType filter (assuming it applies to the primary node)
171 | if (filters.taskType) {
172 | params.taskType = filters.taskType;
173 | conditions.push(`${nodeAlias}.taskType = $taskType`);
174 | }
175 | // Tags filter (using helper)
176 | if (filters.tags && filters.tags.length > 0) {
177 | // Ensure Neo4jUtils is accessible or import it if helpers.ts is separate
178 | // Assuming Neo4jUtils is available in scope or imported
179 | const tagQuery = Neo4jUtils.generateArrayInListQuery(
180 | nodeAlias,
181 | "tags",
182 | "tagsList",
183 | filters.tags,
184 | );
185 | if (tagQuery.cypher) {
186 | conditions.push(tagQuery.cypher);
187 | Object.assign(params, tagQuery.params);
188 | }
189 | }
190 | // Text search filter (Knowledge specific, using regex for now)
191 | if (label === NodeLabels.Knowledge && filters.search) {
192 | // Use case-insensitive regex
193 | params.search = `(?i).*${filters.search.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}.*`;
194 | conditions.push(`${nodeAlias}.text =~ $search`);
195 | // TODO: Consider switching to full-text index search for performance:
196 | // conditions.push(`apoc.index.search('${NodeLabels.Knowledge}_fulltext', $search) YIELD node as ${nodeAlias}`);
197 | // This would require changing the MATCH structure significantly.
198 | }
199 | // Domain filter is handled via additionalMatchClauses typically
200 |
201 | const whereClause =
202 | conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
203 |
204 | // --- Sorting ---
205 | const sortField = pagination.sortBy || "createdAt"; // Default sort field
206 | const sortDirection = pagination.sortDirection || "desc"; // Default sort direction
207 | const orderByClause = `ORDER BY ${nodeAlias}.${sortField} ${sortDirection.toUpperCase()}`;
208 |
209 | // --- Pagination ---
210 | const page = Math.max(pagination.page || 1, 1);
211 | const limit = Math.min(Math.max(pagination.limit || 20, 1), 100);
212 | const skip = (page - 1) * limit;
213 | // Use neo4j.int() to ensure skip and limit are treated as integers
214 | params.skip = neo4j.int(skip);
215 | params.limit = neo4j.int(limit);
216 | const paginationClause = `SKIP $skip LIMIT $limit`;
217 |
218 | // --- Count Query ---
219 | const countQuery = `
220 | ${fullMatchClause}
221 | ${whereClause}
222 | RETURN count(DISTINCT ${nodeAlias}) as total
223 | `;
224 |
225 | // --- Data Query ---
226 | // Use WITH clause to pass distinct nodes after filtering before collecting relationships
227 | // This is crucial if additionalMatchClauses involve OPTIONAL MATCH that could multiply rows
228 | const dataQuery = `
229 | ${fullMatchClause}
230 | ${whereClause}
231 | WITH DISTINCT ${nodeAlias} ${additionalMatchClauses ? ", " + additionalMatchClauses.split(" ")[1] : ""} // Pass distinct primary node and potentially relationship aliases
232 | ${orderByClause} // Order before skip/limit
233 | ${paginationClause}
234 | // Re-apply OPTIONAL MATCHes if needed after pagination to get related data for the paginated set
235 | ${additionalMatchClauses} // Re-apply OPTIONAL MATCH here if needed for RETURN
236 | RETURN ${returnProperties.join(",\n ")}
237 | `;
238 |
239 | // Refined Data Query structure (alternative): Apply OPTIONAL MATCH *after* pagination
240 | // This can be more efficient if relationship data is only needed for the final page results.
241 | const dataQueryAlternative = `
242 | ${baseMatch} // Only match the primary node initially
243 | ${whereClause} // Apply filters on the primary node
244 | WITH ${nodeAlias}
245 | ${orderByClause}
246 | ${paginationClause}
247 | // Now apply OPTIONAL MATCHes for related data for the paginated nodes
248 | ${additionalMatchClauses}
249 | RETURN ${returnProperties.join(",\n ")}
250 | `;
251 | // Choosing dataQueryAlternative as it's generally more performant for pagination
252 |
253 | // Remove skip/limit from count params
254 | const countParams = { ...params };
255 | delete countParams.skip;
256 | delete countParams.limit;
257 |
258 | return {
259 | countQuery: countQuery,
260 | dataQuery: dataQueryAlternative, // Use the alternative query
261 | params: params, // Return params including skip/limit for the data query
262 | };
263 | }
264 |
```
--------------------------------------------------------------------------------
/src/mcp/resources/knowledge/knowledgeResources.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2 | import { KnowledgeService } from "../../../services/neo4j/knowledgeService.js";
3 | import { ProjectService } from "../../../services/neo4j/projectService.js";
4 | import { KnowledgeFilterOptions } from "../../../services/neo4j/types.js";
5 | import {
6 | toKnowledgeResource,
7 | ResourceTemplates,
8 | ResourceURIs,
9 | } from "../types.js";
10 | import { logger, requestContextService } from "../../../utils/index.js"; // Import requestContextService
11 | import {
12 | BaseErrorCode,
13 | McpError,
14 | ProjectErrorCode,
15 | } from "../../../types/errors.js";
16 |
17 | /**
18 | * Register Knowledge Resources
19 | *
20 | * This function registers resource endpoints for the Knowledge entity
21 | * - GET atlas://knowledge - List all knowledge items
22 | * - GET atlas://knowledge/{knowledgeId} - Get specific knowledge item by ID
23 | * - GET atlas://projects/{projectId}/knowledge - List knowledge items for a specific project
24 | *
25 | * @param server The MCP server instance
26 | */
27 | export function registerKnowledgeResources(server: McpServer) {
28 | // List all knowledge
29 | server.resource(
30 | "knowledge-list",
31 | ResourceURIs.KNOWLEDGE,
32 | {
33 | name: "All Knowledge",
34 | description:
35 | "List of all knowledge items in the Atlas platform with pagination and filtering support",
36 | mimeType: "application/json",
37 | },
38 | async (uri) => {
39 | const reqContext = requestContextService.createRequestContext({
40 | operation: "listAllKnowledge",
41 | resourceUri: uri.href,
42 | });
43 | try {
44 | logger.info("Listing all knowledge items", {
45 | ...reqContext,
46 | uri: uri.href,
47 | });
48 |
49 | // Parse query parameters
50 | const queryParams = new URLSearchParams(uri.search);
51 | // Default project ID required by knowledge service
52 | const projectId = queryParams.get("projectId") || "*";
53 |
54 | const filters: KnowledgeFilterOptions = {
55 | projectId,
56 | };
57 |
58 | // Parse domain parameter
59 | const domain = queryParams.get("domain");
60 | if (domain) {
61 | filters.domain = String(domain);
62 | }
63 |
64 | // Parse tags parameter
65 | const tags = queryParams.get("tags");
66 | if (tags) {
67 | // Split comma-separated tags
68 | filters.tags = String(tags)
69 | .split(",")
70 | .map((tag) => tag.trim());
71 | }
72 |
73 | // Parse search parameter
74 | const search = queryParams.get("search");
75 | if (search) {
76 | filters.search = String(search);
77 | }
78 |
79 | // Parse pagination parameters
80 | const page = queryParams.has("page")
81 | ? parseInt(queryParams.get("page") || "1", 10)
82 | : 1;
83 |
84 | const limit = queryParams.has("limit")
85 | ? parseInt(queryParams.get("limit") || "20", 10)
86 | : 20;
87 |
88 | // Add pagination to filters
89 | filters.page = page;
90 | filters.limit = limit;
91 |
92 | // Query the database
93 | const result = await KnowledgeService.getKnowledge(filters);
94 |
95 | // Map Neo4j knowledge items to resource objects
96 | const knowledgeResources = result.data.map((item) =>
97 | toKnowledgeResource(item),
98 | );
99 |
100 | return {
101 | contents: [
102 | {
103 | uri: uri.href,
104 | mimeType: "application/json",
105 | text: JSON.stringify(
106 | {
107 | knowledge: knowledgeResources,
108 | pagination: {
109 | total: result.total,
110 | page: result.page,
111 | limit: result.limit,
112 | totalPages: result.totalPages,
113 | },
114 | },
115 | null,
116 | 2,
117 | ),
118 | },
119 | ],
120 | };
121 | } catch (error) {
122 | logger.error("Error listing knowledge items", error as Error, {
123 | ...reqContext,
124 | // error is now part of the Error object passed to logger
125 | uri: uri.href,
126 | });
127 |
128 | throw new McpError(
129 | BaseErrorCode.INTERNAL_ERROR,
130 | `Failed to list knowledge items: ${error instanceof Error ? error.message : String(error)}`,
131 | );
132 | }
133 | },
134 | );
135 |
136 | // Get knowledge by ID
137 | server.resource(
138 | "knowledge-by-id",
139 | ResourceTemplates.KNOWLEDGE,
140 | {
141 | name: "Knowledge by ID",
142 | description: "Retrieves a single knowledge item by its unique identifier",
143 | mimeType: "application/json",
144 | },
145 | async (uri, params) => {
146 | const reqContext = requestContextService.createRequestContext({
147 | operation: "getKnowledgeById",
148 | resourceUri: uri.href,
149 | knowledgeIdParam: params.knowledgeId,
150 | });
151 | try {
152 | const knowledgeId = params.knowledgeId as string;
153 |
154 | logger.info("Fetching knowledge by ID", {
155 | ...reqContext,
156 | knowledgeId, // Already in reqContext but can be explicit for clarity
157 | uri: uri.href, // Already in reqContext but can be explicit
158 | });
159 |
160 | if (!knowledgeId) {
161 | throw new McpError(
162 | BaseErrorCode.VALIDATION_ERROR,
163 | "Knowledge ID is required",
164 | );
165 | }
166 |
167 | // Query the database
168 | const knowledge = await KnowledgeService.getKnowledgeById(knowledgeId);
169 |
170 | if (!knowledge) {
171 | throw new McpError(
172 | BaseErrorCode.NOT_FOUND,
173 | `Knowledge item with ID ${knowledgeId} not found`,
174 | { knowledgeId },
175 | );
176 | }
177 |
178 | // Convert to resource object
179 | const knowledgeResource = toKnowledgeResource(knowledge);
180 |
181 | return {
182 | contents: [
183 | {
184 | uri: uri.href,
185 | mimeType: "application/json",
186 | text: JSON.stringify(knowledgeResource, null, 2),
187 | },
188 | ],
189 | };
190 | } catch (error) {
191 | // Handle specific error cases
192 | if (error instanceof McpError) {
193 | throw error;
194 | }
195 |
196 | logger.error("Error fetching knowledge by ID", error as Error, {
197 | ...reqContext,
198 | // error is now part of the Error object passed to logger
199 | parameters: params,
200 | });
201 |
202 | throw new McpError(
203 | BaseErrorCode.INTERNAL_ERROR,
204 | `Failed to fetch knowledge: ${error instanceof Error ? error.message : String(error)}`,
205 | );
206 | }
207 | },
208 | );
209 |
210 | // List knowledge by project
211 | server.resource(
212 | "knowledge-by-project",
213 | ResourceTemplates.KNOWLEDGE_BY_PROJECT,
214 | {
215 | name: "Knowledge by Project",
216 | description:
217 | "Retrieves all knowledge items belonging to a specific project",
218 | mimeType: "application/json",
219 | },
220 | async (uri, params) => {
221 | const reqContext = requestContextService.createRequestContext({
222 | operation: "listKnowledgeByProject",
223 | resourceUri: uri.href,
224 | projectIdParam: params.projectId,
225 | });
226 | try {
227 | const projectId = params.projectId as string;
228 |
229 | logger.info("Listing knowledge for project", {
230 | ...reqContext,
231 | projectId, // Already in reqContext but can be explicit
232 | uri: uri.href, // Already in reqContext
233 | });
234 |
235 | if (!projectId) {
236 | throw new McpError(
237 | BaseErrorCode.VALIDATION_ERROR,
238 | "Project ID is required",
239 | );
240 | }
241 |
242 | // Verify the project exists
243 | const project = await ProjectService.getProjectById(projectId);
244 | if (!project) {
245 | throw new McpError(
246 | ProjectErrorCode.PROJECT_NOT_FOUND,
247 | `Project with ID ${projectId} not found`,
248 | { projectId },
249 | );
250 | }
251 |
252 | // Parse query parameters
253 | const queryParams = new URLSearchParams(uri.search);
254 | const filters: KnowledgeFilterOptions = {
255 | projectId,
256 | };
257 |
258 | // Parse domain parameter
259 | const domain = queryParams.get("domain");
260 | if (domain) {
261 | filters.domain = String(domain);
262 | }
263 |
264 | // Parse tags parameter
265 | const tags = queryParams.get("tags");
266 | if (tags) {
267 | // Split comma-separated tags
268 | filters.tags = String(tags)
269 | .split(",")
270 | .map((tag) => tag.trim());
271 | }
272 |
273 | // Parse search parameter
274 | const search = queryParams.get("search");
275 | if (search) {
276 | filters.search = String(search);
277 | }
278 |
279 | // Parse pagination parameters
280 | const page = queryParams.has("page")
281 | ? parseInt(queryParams.get("page") || "1", 10)
282 | : 1;
283 |
284 | const limit = queryParams.has("limit")
285 | ? parseInt(queryParams.get("limit") || "20", 10)
286 | : 20;
287 |
288 | // Add pagination to filters
289 | filters.page = page;
290 | filters.limit = limit;
291 |
292 | // Query the database
293 | const result = await KnowledgeService.getKnowledge(filters);
294 |
295 | // Map Neo4j knowledge items to resource objects
296 | const knowledgeResources = result.data.map((item) =>
297 | toKnowledgeResource(item),
298 | );
299 |
300 | return {
301 | contents: [
302 | {
303 | uri: uri.href,
304 | mimeType: "application/json",
305 | text: JSON.stringify(
306 | {
307 | projectId,
308 | projectName: project.name,
309 | knowledge: knowledgeResources,
310 | pagination: {
311 | total: result.total,
312 | page: result.page,
313 | limit: result.limit,
314 | totalPages: result.totalPages,
315 | },
316 | },
317 | null,
318 | 2,
319 | ),
320 | },
321 | ],
322 | };
323 | } catch (error) {
324 | // Handle specific error cases
325 | if (error instanceof McpError) {
326 | throw error;
327 | }
328 |
329 | logger.error("Error listing knowledge for project", error as Error, {
330 | ...reqContext,
331 | // error is now part of the Error object passed to logger
332 | parameters: params,
333 | });
334 |
335 | throw new McpError(
336 | BaseErrorCode.INTERNAL_ERROR,
337 | `Failed to list knowledge for project: ${error instanceof Error ? error.message : String(error)}`,
338 | );
339 | }
340 | },
341 | );
342 | }
343 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_task_create/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2 | import { z } from "zod";
3 | import { PriorityLevel, TaskStatus } from "../../../types/mcp.js";
4 | import {
5 | createToolExample,
6 | createToolMetadata,
7 | registerTool,
8 | } from "../../../types/tool.js";
9 | import { atlasCreateTask } from "./createTask.js";
10 | import { AtlasTaskCreateSchemaShape } from "./types.js";
11 |
12 | export const registerAtlasTaskCreateTool = (server: McpServer) => {
13 | registerTool(
14 | server,
15 | "atlas_task_create",
16 | "Creates a new task or multiple tasks in the system with detailed specifications, categorization, and dependency tracking",
17 | AtlasTaskCreateSchemaShape,
18 | atlasCreateTask,
19 | createToolMetadata({
20 | examples: [
21 | createToolExample(
22 | {
23 | mode: "single",
24 | projectId: "proj_ms_migration",
25 | title: "Design API Gateway Architecture",
26 | description:
27 | "Create a detailed architecture diagram and specifications for the API gateway that will route requests to appropriate microservices, handle authentication, and implement rate limiting",
28 | priority: "high",
29 | status: "todo",
30 | tags: ["architecture", "api", "gateway"],
31 | completionRequirements:
32 | "Complete architecture diagram with data flow, scaling strategy, and disaster recovery considerations. Implementation specifications must include authentication flow and rate limiting algorithms",
33 | outputFormat:
34 | "Architecture diagram (PDF), Technical specifications document (Markdown), Implementation roadmap",
35 | taskType: "research",
36 | },
37 | `{
38 | "id": "task_api_gateway",
39 | "projectId": "proj_ms_migration",
40 | "title": "Design API Gateway Architecture",
41 | "description": "Create a detailed architecture diagram and specifications for the API gateway that will route requests to appropriate microservices, handle authentication, and implement rate limiting",
42 | "priority": "high",
43 | "status": "todo",
44 | "assignedTo": null,
45 | "urls": [],
46 | "tags": ["architecture", "api", "gateway"],
47 | "completionRequirements": "Complete architecture diagram with data flow, scaling strategy, and disaster recovery considerations. Implementation specifications must include authentication flow and rate limiting algorithms",
48 | "outputFormat": "Architecture diagram (PDF), Technical specifications document (Markdown), Implementation roadmap",
49 | "taskType": "research",
50 | "createdAt": "2025-03-23T10:11:24.123Z",
51 | "updatedAt": "2025-03-23T10:11:24.123Z"
52 | }`,
53 | "Create a high-priority research task with specific completion criteria under an existing project",
54 | ),
55 | createToolExample(
56 | {
57 | mode: "bulk",
58 | tasks: [
59 | {
60 | projectId: "proj_graphql",
61 | title: "Set up GraphQL schema and resolver structure",
62 | description:
63 | "Create the foundation for our GraphQL API by defining the base schema structure, resolver patterns, and integration with existing data sources",
64 | priority: "high",
65 | tags: ["graphql", "schema", "foundation"],
66 | completionRequirements:
67 | "Working schema structure with type definitions for core entities. Base resolver pattern implemented with at least one full query path to the database.",
68 | outputFormat:
69 | "TypeScript code implementing the schema and resolvers with documentation",
70 | taskType: "generation",
71 | },
72 | {
73 | projectId: "proj_graphql",
74 | title: "Implement authentication and authorization",
75 | description:
76 | "Add authentication and authorization to the GraphQL API using JWT tokens and directive-based permission controls",
77 | status: "backlog",
78 | tags: ["auth", "security", "graphql"],
79 | completionRequirements:
80 | "Authentication middleware and directive implemented. All resolvers protected with appropriate permission checks.",
81 | outputFormat:
82 | "TypeScript code with tests demonstrating security controls",
83 | taskType: "generation",
84 | },
85 | ],
86 | },
87 | `{
88 | "success": true,
89 | "message": "Successfully created 2 tasks",
90 | "created": [
91 | {
92 | "id": "task_graphql_schema",
93 | "projectId": "proj_graphql",
94 | "title": "Set up GraphQL schema and resolver structure",
95 | "description": "Create the foundation for our GraphQL API by defining the base schema structure, resolver patterns, and integration with existing data sources",
96 | "priority": "high",
97 | "status": "todo",
98 | "assignedTo": null,
99 | "urls": [],
100 | "tags": ["graphql", "schema", "foundation"],
101 | "completionRequirements": "Working schema structure with type definitions for core entities. Base resolver pattern implemented with at least one full query path to the database.",
102 | "outputFormat": "TypeScript code implementing the schema and resolvers with documentation",
103 | "taskType": "generation",
104 | "createdAt": "2025-03-23T10:11:24.123Z",
105 | "updatedAt": "2025-03-23T10:11:24.123Z"
106 | },
107 | {
108 | "id": "task_auth",
109 | "projectId": "proj_graphql",
110 | "title": "Implement authentication and authorization",
111 | "description": "Add authentication and authorization to the GraphQL API using JWT tokens and directive-based permission controls",
112 | "priority": "medium",
113 | "status": "backlog",
114 | "assignedTo": null,
115 | "urls": [],
116 | "tags": ["auth", "security", "graphql"],
117 | "completionRequirements": "Authentication middleware and directive implemented. All resolvers protected with appropriate permission checks.",
118 | "outputFormat": "TypeScript code with tests demonstrating security controls",
119 | "taskType": "generation",
120 | "createdAt": "2025-03-23T10:11:24.456Z",
121 | "updatedAt": "2025-03-23T10:11:24.456Z"
122 | }
123 | ],
124 | "errors": []
125 | }`,
126 | "Batch-initialize multiple specialized tasks with clear dependencies and technical requirements",
127 | ),
128 | ],
129 | requiredPermission: "task:create",
130 | returnSchema: z.union([
131 | // Single task response
132 | z.object({
133 | id: z.string().describe("Task ID"),
134 | projectId: z.string().describe("Parent project ID"),
135 | title: z.string().describe("Task title"),
136 | description: z.string().describe("Task description"),
137 | priority: z
138 | .enum([
139 | PriorityLevel.LOW,
140 | PriorityLevel.MEDIUM,
141 | PriorityLevel.HIGH,
142 | PriorityLevel.CRITICAL,
143 | ])
144 | .describe("Importance level"),
145 | status: z
146 | .enum([
147 | TaskStatus.BACKLOG,
148 | TaskStatus.TODO,
149 | TaskStatus.IN_PROGRESS,
150 | TaskStatus.COMPLETED,
151 | ])
152 | .describe("Task status"),
153 | assignedTo: z
154 | .string()
155 | .nullable()
156 | .describe("ID of entity responsible for completion"),
157 | urls: z
158 | .array(
159 | z.object({
160 | title: z.string(),
161 | url: z.string(),
162 | }),
163 | )
164 | .describe("Reference materials"),
165 | tags: z.array(z.string()).describe("Organizational labels"),
166 | completionRequirements: z.string().describe("Completion criteria"),
167 | outputFormat: z.string().describe("Deliverable format"),
168 | taskType: z.string().describe("Task classification"),
169 | createdAt: z.string().describe("Creation timestamp"),
170 | updatedAt: z.string().describe("Last update timestamp"),
171 | }),
172 | // Bulk creation response
173 | z.object({
174 | success: z.boolean().describe("Operation success status"),
175 | message: z.string().describe("Result message"),
176 | created: z
177 | .array(
178 | z.object({
179 | id: z.string().describe("Task ID"),
180 | projectId: z.string().describe("Parent project ID"),
181 | title: z.string().describe("Task title"),
182 | description: z.string().describe("Task description"),
183 | priority: z
184 | .enum([
185 | PriorityLevel.LOW,
186 | PriorityLevel.MEDIUM,
187 | PriorityLevel.HIGH,
188 | PriorityLevel.CRITICAL,
189 | ])
190 | .describe("Importance level"),
191 | status: z
192 | .enum([
193 | TaskStatus.BACKLOG,
194 | TaskStatus.TODO,
195 | TaskStatus.IN_PROGRESS,
196 | TaskStatus.COMPLETED,
197 | ])
198 | .describe("Task status"),
199 | assignedTo: z
200 | .string()
201 | .nullable()
202 | .describe("ID of entity responsible for completion"),
203 | urls: z
204 | .array(
205 | z.object({
206 | title: z.string(),
207 | url: z.string(),
208 | }),
209 | )
210 | .describe("Reference materials"),
211 | tags: z.array(z.string()).describe("Organizational labels"),
212 | completionRequirements: z
213 | .string()
214 | .describe("Completion criteria"),
215 | outputFormat: z.string().describe("Deliverable format"),
216 | taskType: z.string().describe("Task classification"),
217 | createdAt: z.string().describe("Creation timestamp"),
218 | updatedAt: z.string().describe("Last update timestamp"),
219 | }),
220 | )
221 | .describe("Created tasks"),
222 | errors: z
223 | .array(
224 | z.object({
225 | index: z.number().describe("Index in the tasks array"),
226 | task: z.any().describe("Original task data"),
227 | error: z
228 | .object({
229 | code: z.string().describe("Error code"),
230 | message: z.string().describe("Error message"),
231 | details: z
232 | .any()
233 | .optional()
234 | .describe("Additional error details"),
235 | })
236 | .describe("Error information"),
237 | }),
238 | )
239 | .describe("Creation errors"),
240 | }),
241 | ]),
242 | rateLimit: {
243 | windowMs: 60 * 1000, // 1 minute
244 | maxRequests: 15, // 15 requests per minute (either single or bulk)
245 | },
246 | }),
247 | );
248 | };
249 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_deep_research/types.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { z } from "zod";
2 | import {
3 | createKnowledgeDomainEnum,
4 | createResponseFormatEnum,
5 | ResponseFormat,
6 | } from "../../../types/mcp.js";
7 |
8 | /**
9 | * Zod schema defining the structure for a single sub-topic provided as input
10 | * to the deep research tool.
11 | */
12 | export const DeepResearchSubTopicSchema = z.object({
13 | /** A focused, well-defined sub-topic or precise question to investigate. */
14 | question: z
15 | .string()
16 | .min(1)
17 | .describe(
18 | "A focused, well-defined sub-topic or precise question to investigate. Effective research requires clear, bounded inquiries rather than overly broad topics.",
19 | ),
20 | /** Concise, targeted search queries or specific keywords relevant to this sub-topic. */
21 | initialSearchQueries: z
22 | .array(z.string())
23 | .optional()
24 | .describe(
25 | "Concise, targeted search queries or specific keywords relevant to this sub-topic. Effective deep research relies on precise, focused queries rather than broad terms.",
26 | ),
27 | /** Optional client-provided ID for the knowledge node representing this sub-topic. */
28 | nodeId: z
29 | .string()
30 | .optional()
31 | .describe(
32 | "Optional client-provided ID for this sub-topic knowledge node. Useful for maintaining consistent cross-referencing across research efforts.",
33 | ),
34 | /** Strategic priority level for the task created for this sub-topic. */
35 | priority: z
36 | .enum(["low", "medium", "high", "critical"])
37 | .optional()
38 | .describe(
39 | "Strategic priority level for the task created for this sub-topic. Helps organize the research workflow by importance and urgency.",
40 | ),
41 | /** Optional assignee ID for the task created for this sub-topic. */
42 | assignedTo: z
43 | .string()
44 | .optional()
45 | .describe(
46 | "Optional assignee ID for the task created for this sub-topic. Enables clear ownership and accountability for specific research areas.",
47 | ),
48 | /** Workflow status for the task created for this sub-topic. */
49 | initialStatus: z
50 | .enum(["backlog", "todo", "in-progress", "completed"])
51 | .optional()
52 | .default("todo")
53 | .describe(
54 | "Workflow status for the task created for this sub-topic (default: todo). Facilitates research progression tracking across multiple inquiry areas.",
55 | ),
56 | });
57 |
58 | /**
59 | * TypeScript type inferred from `DeepResearchSubTopicSchema`. Represents a single sub-topic input.
60 | */
61 | export type DeepResearchSubTopic = z.infer<typeof DeepResearchSubTopicSchema>;
62 |
63 | /**
64 | * Defines the shape of the input parameters for the `atlas_deep_research` tool.
65 | * This structure is used to build the final Zod schema.
66 | */
67 | export const AtlasDeepResearchSchemaShape = {
68 | /** Organizational parent project ID for contextualizing this research within broader objectives. */
69 | projectId: z
70 | .string()
71 | .describe(
72 | "Organizational parent project ID for contextualizing this research within broader objectives (required). Essential for proper knowledge graph relationships.",
73 | ),
74 | researchTopic: z
75 | .string()
76 | .min(1)
77 | .describe(
78 | "The primary, overarching topic or central question driving this deep research initiative (required). Should be substantive yet focused enough to yield actionable insights.",
79 | ),
80 | /** Clearly articulated objective or specific outcome this research aims to achieve. */
81 | researchGoal: z
82 | .string()
83 | .min(1)
84 | .describe(
85 | "Clearly articulated objective or specific outcome this research aims to achieve (required). Defines what successful research completion looks like.",
86 | ),
87 | /** Strategic boundary definition clarifying research inclusions and exclusions. */
88 | scopeDefinition: z
89 | .string()
90 | .optional()
91 | .describe(
92 | "Strategic boundary definition clarifying research inclusions and exclusions. Prevents scope creep and maintains research focus on high-value areas.",
93 | ),
94 | /** Structured decomposition of the main topic into discrete, manageable sub-questions or investigation areas. */
95 | subTopics: z
96 | .array(DeepResearchSubTopicSchema)
97 | .min(1)
98 | .describe(
99 | "Structured decomposition of the main topic into discrete, manageable sub-questions or investigation areas. Effective research requires breaking complex topics into component inquiries.",
100 | ),
101 | /** Knowledge domain classification for the overall research topic. */
102 | researchDomain: createKnowledgeDomainEnum()
103 | .or(z.string())
104 | .optional()
105 | .describe(
106 | "Knowledge domain classification for the overall research topic (e.g., 'technical', 'business', 'scientific'). Enables better categorization and retrieval within the knowledge management system.",
107 | ),
108 | /** Semantic categorization tags for improved searchability and relationship identification. */
109 | initialTags: z
110 | .array(z.string())
111 | .optional()
112 | .describe(
113 | "Semantic categorization tags for improved searchability and relationship identification. Facilitates connecting this research to related knowledge areas.",
114 | ),
115 | /** Unique identifier for the main research plan knowledge node. */
116 | planNodeId: z
117 | .string()
118 | .optional()
119 | .describe(
120 | "Unique identifier for the main research plan knowledge node. Enables programmatic reference to this research plan in future operations.",
121 | ),
122 | /** Output format specification for the tool response. */
123 | responseFormat: createResponseFormatEnum()
124 | .optional()
125 | .default(ResponseFormat.FORMATTED)
126 | .describe(
127 | "Output format specification for the tool response. Controls whether the response is human-readable ('formatted') or machine-processable ('json').",
128 | ),
129 | /** Task generation control flag for research operationalization. */
130 | createTasks: z
131 | .boolean()
132 | .optional()
133 | .default(true)
134 | .describe(
135 | "Task generation control flag for research operationalization (default: true). When enabled, creates trackable tasks for each sub-topic to facilitate systematic investigation.",
136 | ),
137 | } as const;
138 |
139 | /**
140 | * The complete Zod schema for validating the input arguments of the `atlas_deep_research` tool.
141 | */
142 | export const AtlasDeepResearchInputSchema = z.object(
143 | AtlasDeepResearchSchemaShape,
144 | );
145 |
146 | /**
147 | * TypeScript type inferred from `AtlasDeepResearchInputSchema`. Represents the validated input object.
148 | */
149 | export type AtlasDeepResearchInput = z.infer<
150 | typeof AtlasDeepResearchInputSchema
151 | >;
152 |
153 | /**
154 | * Zod schema defining the structure for representing a created sub-topic knowledge node
155 | * in the tool's output.
156 | */
157 | export const DeepResearchSubTopicNodeResultSchema = z.object({
158 | /** The formulated sub-topic question representing a discrete research inquiry. */
159 | question: z
160 | .string()
161 | .describe(
162 | "The formulated sub-topic question representing a discrete research inquiry. Forms the foundation for focused knowledge gathering.",
163 | ),
164 | /** Unique identifier for the knowledge node containing insights related to this sub-topic. */
165 | nodeId: z
166 | .string()
167 | .describe(
168 | "Unique identifier for the knowledge node containing insights related to this sub-topic. Essential for cross-referencing and knowledge relationship mapping.",
169 | ),
170 | /** Reference to the actionable task entity created to investigate this sub-topic. */
171 | taskId: z
172 | .string()
173 | .optional()
174 | .describe(
175 | "Reference to the actionable task entity created to investigate this sub-topic, if applicable. Links knowledge goals with operational workflow.",
176 | ),
177 | /** Precision-targeted search queries used to initiate investigation of this sub-topic. */
178 | initialSearchQueries: z
179 | .array(z.string())
180 | .optional()
181 | .describe(
182 | "Precision-targeted search queries used to initiate investigation of this sub-topic. Effective deep research begins with carefully crafted, specific queries.",
183 | ),
184 | });
185 |
186 | /**
187 | * TypeScript type inferred from `DeepResearchSubTopicNodeResultSchema`. Represents a single sub-topic node result.
188 | */
189 | export type DeepResearchSubTopicNodeResult = z.infer<
190 | typeof DeepResearchSubTopicNodeResultSchema
191 | >;
192 |
193 | /**
194 | * Interface defining the expected output structure returned by the core `deepResearch` function.
195 | */
196 | export interface DeepResearchResult {
197 | /** Execution status indicator for the overall research plan creation operation. */
198 | success: boolean;
199 | /** Comprehensive summary of the research plan creation outcome with relevant details. */
200 | message: string;
201 | /** Unique reference identifier for the root knowledge node containing the complete research plan. */
202 | planNodeId: string;
203 | /** Semantic categorization markers applied to the root research plan for improved discoverability. */
204 | initialTags?: string[];
205 | /** Structured collection of created knowledge nodes and associated tasks representing discrete research areas. */
206 | subTopicNodes: DeepResearchSubTopicNodeResult[];
207 | /** Operational workflow status indicating whether actionable tasks were created for research execution. */
208 | tasksCreated: boolean;
209 | }
210 |
211 | /**
212 | * Zod schema defining the structure of the output returned by the `atlas_deep_research` tool handler.
213 | * This is used for potential validation or type checking of the final tool response content.
214 | */
215 | export const AtlasDeepResearchOutputSchema = z.object({
216 | /** Status indicator reflecting whether the research plan creation completed successfully. */
217 | success: z
218 | .boolean()
219 | .describe(
220 | "Status indicator reflecting whether the research plan creation completed successfully. Critical for error handling and flow control.",
221 | ),
222 | /** Informative summary describing the research plan creation outcome with actionable details. */
223 | message: z
224 | .string()
225 | .describe(
226 | "Informative summary describing the research plan creation outcome with actionable details. Provides context for next steps.",
227 | ),
228 | /** Unique reference ID for the core knowledge node containing the comprehensive research plan. */
229 | planNodeId: z
230 | .string()
231 | .describe(
232 | "Unique reference ID for the core knowledge node containing the comprehensive research plan. Essential for future references to this research initiative.",
233 | ),
234 | /** Semantic classification markers applied to the research plan for improved categorical organization. */
235 | initialTags: z
236 | .array(z.string())
237 | .optional()
238 | .describe(
239 | "Semantic classification markers applied to the research plan for improved categorical organization. Facilitates knowledge discovery and relationship mapping.",
240 | ),
241 | /** Structured collection of generated knowledge nodes and workflow tasks for each research sub-area. */
242 | subTopicNodes: z
243 | .array(DeepResearchSubTopicNodeResultSchema)
244 | .describe(
245 | "Structured collection of generated knowledge nodes and workflow tasks for each research sub-area. Provides the complete map of the created research knowledge structure.",
246 | ),
247 | /** Task creation status indicating whether operational workflow items were generated. */
248 | tasksCreated: z
249 | .boolean()
250 | .describe(
251 | "Task creation status indicating whether operational workflow items were generated. Confirms proper integration with the task management system.",
252 | ),
253 | });
254 |
255 | /**
256 | * TypeScript type inferred from `AtlasDeepResearchOutputSchema`. Represents the structured output of the tool.
257 | */
258 | export type AtlasDeepResearchOutput = z.infer<
259 | typeof AtlasDeepResearchOutputSchema
260 | >;
261 |
```
--------------------------------------------------------------------------------
/src/mcp/tools/atlas_deep_research/deepResearch.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { nanoid } from "nanoid";
2 | import { KnowledgeService } from "../../../services/neo4j/knowledgeService.js";
3 | import { ProjectService } from "../../../services/neo4j/projectService.js";
4 | import { TaskService } from "../../../services/neo4j/taskService.js"; // Import TaskService
5 | import { BaseErrorCode, McpError } from "../../../types/errors.js";
6 | import { logger, requestContextService } from "../../../utils/index.js"; // Import requestContextService
7 | import { sanitization } from "../../../utils/security/sanitization.js";
8 | import {
9 | AtlasDeepResearchInput,
10 | DeepResearchResult,
11 | DeepResearchSubTopicNodeResult,
12 | } from "./types.js";
13 |
14 | /**
15 | * Generates a unique ID suitable for knowledge nodes using nanoid.
16 | * Includes a prefix for better identification (e.g., 'plan', 'sub').
17 | *
18 | * @param prefix - The prefix to use for the ID (defaults to 'knw').
19 | * @returns A unique ID string (e.g., 'plan_aBcDeFgHiJkL').
20 | */
21 | function generateKnowledgeId(prefix: string = "knw"): string {
22 | return `${prefix}_${nanoid(12)}`; // Using 12 characters for increased uniqueness
23 | }
24 |
25 | /**
26 | * Core implementation logic for the `atlas_deep_research` tool.
27 | * This function orchestrates the creation of a hierarchical knowledge structure
28 | * in Neo4j to represent a research plan based on the provided input.
29 | * It creates a root node for the overall plan and child nodes for each sub-topic.
30 | *
31 | * @param input - The validated input object conforming to `AtlasDeepResearchInput`.
32 | * @returns A promise resolving to a `DeepResearchResult` object containing details
33 | * about the created nodes/tasks and the operation's success status.
34 | * @throws {McpError} If the project ID is invalid, or if any database operation fails.
35 | */
36 | export async function deepResearch(
37 | input: AtlasDeepResearchInput,
38 | ): Promise<DeepResearchResult> {
39 | const reqContext = requestContextService.createRequestContext({
40 | operation: "deepResearch",
41 | projectId: input.projectId,
42 | researchTopic: input.researchTopic,
43 | });
44 | logger.info(
45 | `Initiating deep research plan creation for project ID: ${input.projectId}, Topic: "${input.researchTopic}"`,
46 | reqContext,
47 | );
48 |
49 | try {
50 | // 1. Validate Project Existence
51 | // Ensure the specified project exists before proceeding.
52 | const project = await ProjectService.getProjectById(input.projectId);
53 | if (!project) {
54 | throw new McpError(
55 | BaseErrorCode.NOT_FOUND,
56 | `Project with ID "${input.projectId}" not found. Cannot create research plan.`,
57 | );
58 | }
59 | logger.debug(
60 | `Project validation successful for ID: ${input.projectId}`,
61 | reqContext,
62 | );
63 |
64 | // 2. Prepare Root Research Plan Node Data
65 | const planNodeId = input.planNodeId || generateKnowledgeId("plan");
66 | const rootTextParts: string[] = [
67 | `Research Plan: ${sanitization.sanitizeString(input.researchTopic)}`,
68 | `Goal: ${sanitization.sanitizeString(input.researchGoal)}`,
69 | ];
70 | if (input.scopeDefinition) {
71 | rootTextParts.push(
72 | `Scope: ${sanitization.sanitizeString(input.scopeDefinition)}`,
73 | );
74 | }
75 | const rootText = rootTextParts.join("\n\n"); // Combine parts into the main text content
76 |
77 | // Define tags for the root node
78 | const rootTags = [
79 | "research-plan",
80 | "research-root",
81 | "status:active", // Initialize the plan as active
82 | `topic:${sanitization
83 | .sanitizeString(input.researchTopic)
84 | .toLowerCase()
85 | .replace(/\s+/g, "-") // Convert topic to a URL-friendly tag format
86 | .slice(0, 50)}`, // Limit tag length
87 | ...(input.initialTags || []), // Include any user-provided initial tags
88 | ];
89 |
90 | // 3. Create Root Research Plan Node and link to Project
91 | // Assuming KnowledgeService.addKnowledge handles linking if projectId is provided,
92 | // or we might need a specific method like addKnowledgeAndLinkToProject.
93 | // For now, assume addKnowledge creates the node and links it via projectId.
94 | // A more robust approach might involve explicit relationship creation.
95 | logger.debug(
96 | `Attempting to create root research plan node with ID: ${planNodeId}`,
97 | { ...reqContext, planNodeId },
98 | );
99 | await KnowledgeService.addKnowledge({
100 | id: planNodeId,
101 | projectId: input.projectId,
102 | text: rootText,
103 | domain: input.researchDomain || "research",
104 | tags: rootTags,
105 | citations: [],
106 | });
107 | // If explicit linking is needed:
108 | // await KnowledgeService.linkKnowledgeToProject(planNodeId, input.projectId, 'CONTAINS_PLAN');
109 | logger.info(
110 | `Root research plan node ${planNodeId} created and associated with project.`,
111 | { ...reqContext, planNodeId },
112 | );
113 |
114 | // 4. Create Knowledge Nodes and Optional Tasks for Each Sub-Topic
115 | const createdSubTopicNodes: DeepResearchSubTopicNodeResult[] = [];
116 | const tasksToCreate = input.createTasks ?? true; // Default to true if not specified
117 | logger.debug(
118 | `Processing ${input.subTopics.length} sub-topics to create knowledge nodes ${
119 | tasksToCreate ? "and tasks" : ""
120 | }.`,
121 | {
122 | ...reqContext,
123 | subTopicCount: input.subTopics.length,
124 | willCreateTasks: tasksToCreate,
125 | },
126 | );
127 |
128 | for (const subTopic of input.subTopics) {
129 | const subTopicNodeId = subTopic.nodeId || generateKnowledgeId("sub");
130 | let createdTaskId: string | undefined = undefined;
131 |
132 | // Sanitize search queries before joining
133 | const searchQueriesString = (subTopic.initialSearchQueries || [])
134 | .map((kw) => sanitization.sanitizeString(kw))
135 | .join(", ");
136 | // Construct the text content for the sub-topic node
137 | const subTopicText = `Research Question: ${sanitization.sanitizeString(
138 | subTopic.question,
139 | )}\n\nInitial Search Queries: ${searchQueriesString || "None provided"}`;
140 |
141 | // Define tags for the sub-topic node
142 | const subTopicTags = [
143 | "research-subtopic",
144 | "status:pending", // Initialize sub-topics as pending
145 | // `parent-plan:${planNodeId}`, // Replaced by relationship if implemented
146 | ...(subTopic.initialSearchQueries?.map(
147 | (kw: string) =>
148 | `search-query:${sanitization
149 | .sanitizeString(kw) // Create tags for each search query
150 | .toLowerCase()
151 | .replace(/\s+/g, "-")
152 | .slice(0, 50)}`,
153 | ) || []),
154 | ];
155 |
156 | logger.debug(
157 | `Attempting to create sub-topic node with ID: ${subTopicNodeId} for question: "${subTopic.question}"`,
158 | { ...reqContext, subTopicNodeId, question: subTopic.question },
159 | );
160 | // Create the sub-topic knowledge node and link it to the parent plan node
161 | // Assuming addKnowledge links to project, now link to parent knowledge node
162 | await KnowledgeService.addKnowledge({
163 | id: subTopicNodeId,
164 | projectId: input.projectId, // Associate with the same project
165 | text: subTopicText,
166 | domain: input.researchDomain || "research", // Inherit domain from the root plan
167 | tags: subTopicTags,
168 | citations: [], // Sub-topics also start with no citations
169 | });
170 | // Explicitly link sub-topic to parent plan node
171 | await KnowledgeService.linkKnowledgeToKnowledge(
172 | subTopicNodeId,
173 | planNodeId,
174 | "IS_SUBTOPIC_OF", // Relationship type from child to parent
175 | );
176 | logger.info(
177 | `Sub-topic node ${subTopicNodeId} created and linked to plan ${planNodeId}.`,
178 | { ...reqContext, subTopicNodeId, parentPlanNodeId: planNodeId },
179 | );
180 |
181 | // Create Task if requested
182 | if (tasksToCreate) {
183 | logger.debug(`Creating task for sub-topic node ${subTopicNodeId}`, {
184 | ...reqContext,
185 | subTopicNodeId,
186 | });
187 | const taskTitle = `Research: ${sanitization.sanitizeString(
188 | subTopic.question,
189 | )}`;
190 | const taskDescription = `Investigate the research question: "${sanitization.sanitizeString(
191 | subTopic.question,
192 | )}"\n\nInitial Search Queries: ${
193 | searchQueriesString || "None provided"
194 | }\n\nAssociated Knowledge Node: ${subTopicNodeId}`;
195 |
196 | // Use TaskService to create the task and link it to the project
197 | const taskResult = await TaskService.createTask({
198 | projectId: input.projectId,
199 | title: taskTitle.slice(0, 150), // Ensure title length constraint
200 | description: taskDescription,
201 | priority: subTopic.priority || "medium",
202 | status: subTopic.initialStatus || "todo",
203 | assignedTo: subTopic.assignedTo,
204 | completionRequirements: `Gather relevant information and synthesize findings related to the research question. Update associated knowledge node ${subTopicNodeId}.`,
205 | outputFormat:
206 | "Update to knowledge node, potentially new linked knowledge items.",
207 | taskType: "research", // Specific task type
208 | // tags: [`research-task`, `plan:${planNodeId}`], // Optional tags for the task
209 | });
210 |
211 | createdTaskId = taskResult.id;
212 | logger.info(
213 | `Task ${createdTaskId} created for sub-topic ${subTopicNodeId}.`,
214 | { ...reqContext, createdTaskId, subTopicNodeId },
215 | );
216 |
217 | // Link Task to the Sub-Topic Knowledge Node
218 | await TaskService.linkTaskToKnowledge(
219 | createdTaskId,
220 | subTopicNodeId,
221 | "ADDRESSES", // Relationship: Task ADDRESSES Knowledge Node
222 | );
223 | logger.debug(
224 | `Linked task ${createdTaskId} to knowledge node ${subTopicNodeId} with ADDRESSES relationship.`,
225 | { ...reqContext, createdTaskId, subTopicNodeId },
226 | );
227 | }
228 |
229 | // Record the details of the created sub-topic node and task
230 | createdSubTopicNodes.push({
231 | question: subTopic.question,
232 | nodeId: subTopicNodeId,
233 | taskId: createdTaskId, // Include task ID if created
234 | initialSearchQueries: subTopic.initialSearchQueries || [],
235 | });
236 | }
237 |
238 | // 5. Assemble and Return the Result
239 | const taskMessage = tasksToCreate
240 | ? `and ${createdSubTopicNodes.length} associated tasks`
241 | : "";
242 | const successMessage = `Successfully created deep research plan "${input.researchTopic}" with root research plan node ${planNodeId}, ${createdSubTopicNodes.length} sub-topic nodes ${taskMessage}.`;
243 | logger.info(successMessage, {
244 | ...reqContext,
245 | planNodeId,
246 | subTopicNodeCount: createdSubTopicNodes.length,
247 | tasksCreatedCount: tasksToCreate ? createdSubTopicNodes.length : 0,
248 | });
249 |
250 | return {
251 | success: true,
252 | message: successMessage,
253 | planNodeId: planNodeId,
254 | initialTags: input.initialTags || [], // Return the initial tags applied to the root
255 | subTopicNodes: createdSubTopicNodes, // Return details of created sub-topic nodes and tasks
256 | tasksCreated: tasksToCreate, // Indicate if tasks were created
257 | };
258 | } catch (error) {
259 | // Log the error with context
260 | const errorContextDetails = {
261 | // errorMessage is part of the error object passed to logger.error
262 | // stack is part of the error object passed to logger.error
263 | projectId: input.projectId, // Already in reqContext
264 | researchTopic: input.researchTopic, // Already in reqContext
265 | };
266 | logger.error(
267 | "Error occurred during deep research plan creation",
268 | error as Error,
269 | { ...reqContext, ...errorContextDetails },
270 | );
271 |
272 | // Re-throw McpError instances directly
273 | if (error instanceof McpError) {
274 | throw error;
275 | }
276 | // Wrap unexpected errors in a generic McpError
277 | throw new McpError(
278 | BaseErrorCode.INTERNAL_ERROR,
279 | `Failed to create deep research plan (Project: ${input.projectId}, Topic: "${input.researchTopic}"): ${
280 | error instanceof Error ? error.message : String(error)
281 | }`,
282 | );
283 | }
284 | }
285 |
```