This is page 3 of 3. Use http://codebase.md/thealchemist6/codecompass-mcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .dockerignore
├── .env.example
├── .gitignore
├── config
│ ├── .eslintrc.json
│ ├── .prettierignore
│ ├── .prettierrc
│ ├── README.md
│ └── tsconfig.dev.json
├── CONTRIBUTING.md
├── docker
│ ├── docker-compose.dev.yml
│ ├── docker-compose.yml
│ ├── Dockerfile.dev
│ └── README.md
├── Dockerfile
├── docs
│ ├── API.md
│ ├── DOCKER.md
│ ├── legacy-tools
│ │ ├── chat.ts
│ │ ├── extract.ts
│ │ ├── files.ts
│ │ ├── README.md
│ │ ├── refactor.ts
│ │ ├── repository.ts
│ │ ├── template.ts
│ │ └── transform.ts
│ ├── MONITORING.md
│ ├── README.md
│ └── SETUP.md
├── examples
│ ├── basic-usage.js
│ └── basic-usage.md
├── LICENSE
├── package-lock.json
├── package.json
├── README.md
├── scripts
│ ├── docker-build.sh
│ ├── docker-logs.sh
│ ├── docker-run.sh
│ ├── monitor.js
│ └── start-mcp.sh
├── src
│ ├── index.ts
│ ├── services
│ │ ├── github.ts
│ │ ├── openai.ts
│ │ └── refactor.ts
│ ├── tools
│ │ └── consolidated.ts
│ ├── types
│ │ ├── index.ts
│ │ └── responses.ts
│ └── utils
│ ├── config.ts
│ ├── file-processor.ts
│ ├── logger.ts
│ ├── monitoring.ts
│ ├── security.ts
│ └── validation.ts
├── tests
│ └── verify-installation.sh
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/src/tools/consolidated.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { Tool } from '@modelcontextprotocol/sdk/types.js';
2 |
3 | /**
4 | * Streamlined tool definitions for CodeCompass MCP Server
5 | * Rationalized to 12 atomic, composable tools with clear boundaries
6 | * Each tool does one thing well with no overlapping responsibilities
7 | */
8 |
9 | export const consolidatedTools: Tool[] = [
10 | // Core Data Tools (6 tools)
11 | {
12 | name: 'get_repository_info',
13 | description: '📊 Get basic repository metadata, statistics, and key information. Atomic tool focused purely on repository-level data without file content analysis.',
14 | inputSchema: {
15 | type: 'object',
16 | properties: {
17 | url: {
18 | type: 'string',
19 | description: 'GitHub repository URL (e.g., https://github.com/owner/repo)',
20 | },
21 | options: {
22 | type: 'object',
23 | properties: {
24 | include_stats: {
25 | type: 'boolean',
26 | description: 'Include repository statistics (stars, forks, etc.)',
27 | default: true,
28 | },
29 | include_languages: {
30 | type: 'boolean',
31 | description: 'Include language breakdown',
32 | default: true,
33 | },
34 | include_topics: {
35 | type: 'boolean',
36 | description: 'Include repository topics and tags',
37 | default: true,
38 | },
39 | },
40 | },
41 | },
42 | required: ['url'],
43 | },
44 | },
45 |
46 | {
47 | name: 'get_file_tree',
48 | description: '🌳 Get complete directory structure and file listing with filtering options. Focused on file system structure without content analysis.',
49 | inputSchema: {
50 | type: 'object',
51 | properties: {
52 | url: {
53 | type: 'string',
54 | description: 'GitHub repository URL',
55 | },
56 | options: {
57 | type: 'object',
58 | properties: {
59 | max_depth: {
60 | type: 'number',
61 | description: 'Maximum directory depth to traverse',
62 | default: 10,
63 | },
64 | include_hidden: {
65 | type: 'boolean',
66 | description: 'Include hidden files and directories',
67 | default: false,
68 | },
69 | file_extensions: {
70 | type: 'array',
71 | items: { type: 'string' },
72 | description: 'Filter by file extensions (e.g., [".js", ".ts"])',
73 | },
74 | exclude_paths: {
75 | type: 'array',
76 | items: { type: 'string' },
77 | description: 'Paths to exclude from listing',
78 | default: ['node_modules', 'dist', 'build', '.git'],
79 | },
80 | include_file_info: {
81 | type: 'boolean',
82 | description: 'Include file metadata (size, modified date)',
83 | default: true,
84 | },
85 | },
86 | },
87 | },
88 | required: ['url'],
89 | },
90 | },
91 |
92 | {
93 | name: 'search_repository',
94 | description: 'Search for patterns, text, functions, or classes across the entire repository with advanced filtering options.',
95 | inputSchema: {
96 | type: 'object',
97 | properties: {
98 | url: {
99 | type: 'string',
100 | description: 'GitHub repository URL',
101 | },
102 | query: {
103 | type: 'string',
104 | description: 'Search query (supports regex patterns)',
105 | },
106 | search_type: {
107 | type: 'string',
108 | enum: ['text', 'regex', 'function', 'class', 'variable', 'import'],
109 | description: 'Type of search to perform',
110 | default: 'text',
111 | },
112 | options: {
113 | type: 'object',
114 | properties: {
115 | case_sensitive: {
116 | type: 'boolean',
117 | description: 'Case sensitive search',
118 | default: false,
119 | },
120 | file_extensions: {
121 | type: 'array',
122 | items: { type: 'string' },
123 | description: 'File extensions to search in',
124 | },
125 | exclude_paths: {
126 | type: 'array',
127 | items: { type: 'string' },
128 | description: 'Paths to exclude from search',
129 | default: ['node_modules', 'dist', 'build', '.git'],
130 | },
131 | max_results: {
132 | type: 'number',
133 | description: 'Maximum number of results',
134 | default: 100,
135 | },
136 | include_context: {
137 | type: 'boolean',
138 | description: 'Include surrounding code context',
139 | default: true,
140 | },
141 | },
142 | },
143 | },
144 | required: ['url', 'query'],
145 | },
146 | },
147 |
148 | {
149 | name: 'get_file_content',
150 | description: '📁 Retrieve content of specific files with smart truncation and batch processing capabilities.\n\n⚠️ FEATURES:\n• Batch processing with concurrent file retrieval\n• Automatic file validation and security checks\n• Rich metadata extraction (file type, language, size, line count)\n• Configurable processing limits and error handling\n• Support for multiple file formats with type detection',
151 | inputSchema: {
152 | type: 'object',
153 | properties: {
154 | url: {
155 | type: 'string',
156 | description: 'GitHub repository URL',
157 | },
158 | file_paths: {
159 | type: 'array',
160 | items: { type: 'string' },
161 | description: 'Paths to files to retrieve (supports batch processing)',
162 | },
163 | options: {
164 | type: 'object',
165 | properties: {
166 | max_size: {
167 | type: 'number',
168 | description: 'Maximum file size in bytes',
169 | default: 100000,
170 | },
171 | include_metadata: {
172 | type: 'boolean',
173 | description: 'Include file metadata (size, modified date, etc.)',
174 | default: false,
175 | },
176 | truncate_large_files: {
177 | type: 'boolean',
178 | description: 'Truncate files larger than max_size',
179 | default: true,
180 | },
181 | max_concurrent: {
182 | type: 'number',
183 | description: 'Maximum concurrent file processing',
184 | default: 5,
185 | minimum: 1,
186 | maximum: 20,
187 | },
188 | continue_on_error: {
189 | type: 'boolean',
190 | description: 'Continue processing other files if one fails',
191 | default: true,
192 | },
193 | file_extensions: {
194 | type: 'array',
195 | items: { type: 'string' },
196 | description: 'Only process files with these extensions (e.g., [".js", ".ts"])',
197 | },
198 | exclude_patterns: {
199 | type: 'array',
200 | items: { type: 'string' },
201 | description: 'Exclude files matching these regex patterns',
202 | },
203 | format: {
204 | type: 'string',
205 | enum: ['raw', 'parsed', 'summary'],
206 | description: 'Format for file content',
207 | default: 'raw',
208 | },
209 | },
210 | },
211 | },
212 | required: ['url', 'file_paths'],
213 | },
214 | },
215 |
216 | {
217 | name: 'analyze_codebase',
218 | description: '🔬 Comprehensive codebase analysis combining structure, architecture, and metrics. Provides unified view of code organization, design patterns, complexity, and quality indicators.',
219 | inputSchema: {
220 | type: 'object',
221 | properties: {
222 | url: {
223 | type: 'string',
224 | description: 'GitHub repository URL',
225 | },
226 | file_paths: {
227 | type: 'array',
228 | items: { type: 'string' },
229 | description: 'Specific files to analyze (optional - analyzes all code files if not specified)',
230 | },
231 | analysis_types: {
232 | type: 'array',
233 | items: {
234 | type: 'string',
235 | enum: ['structure', 'architecture', 'metrics', 'patterns', 'complexity'],
236 | },
237 | description: 'Types of analysis to perform',
238 | default: ['structure', 'architecture', 'metrics'],
239 | },
240 | options: {
241 | type: 'object',
242 | properties: {
243 | include_functions: {
244 | type: 'boolean',
245 | description: 'Include function analysis',
246 | default: true,
247 | },
248 | include_classes: {
249 | type: 'boolean',
250 | description: 'Include class analysis',
251 | default: true,
252 | },
253 | include_imports: {
254 | type: 'boolean',
255 | description: 'Include import/dependency analysis',
256 | default: true,
257 | },
258 | include_complexity: {
259 | type: 'boolean',
260 | description: 'Include complexity metrics',
261 | default: true,
262 | },
263 | include_patterns: {
264 | type: 'boolean',
265 | description: 'Include design pattern detection',
266 | default: true,
267 | },
268 | include_components: {
269 | type: 'boolean',
270 | description: 'Include reusable component identification',
271 | default: false,
272 | },
273 | languages: {
274 | type: 'array',
275 | items: { type: 'string' },
276 | description: 'Programming languages to analyze',
277 | },
278 | confidence_threshold: {
279 | type: 'number',
280 | description: 'Minimum confidence score for pattern detection',
281 | default: 0.7,
282 | },
283 | },
284 | },
285 | },
286 | required: ['url'],
287 | },
288 | },
289 |
290 | {
291 | name: 'analyze_dependencies',
292 | description: '📦 Comprehensive dependency analysis including external packages, internal dependencies, security vulnerabilities, and version conflicts.',
293 | inputSchema: {
294 | type: 'object',
295 | properties: {
296 | url: {
297 | type: 'string',
298 | description: 'GitHub repository URL',
299 | },
300 | options: {
301 | type: 'object',
302 | properties: {
303 | include_dev_dependencies: {
304 | type: 'boolean',
305 | description: 'Include development dependencies',
306 | default: true,
307 | },
308 | include_security_scan: {
309 | type: 'boolean',
310 | description: 'Include security vulnerability scanning',
311 | default: true,
312 | },
313 | include_version_analysis: {
314 | type: 'boolean',
315 | description: 'Include version conflict analysis',
316 | default: true,
317 | },
318 | check_outdated: {
319 | type: 'boolean',
320 | description: 'Check for outdated packages',
321 | default: true,
322 | },
323 | },
324 | },
325 | },
326 | required: ['url'],
327 | },
328 | },
329 |
330 | // AI-Enhanced Tools (3 tools)
331 | {
332 | name: 'review_code',
333 | description: '🔍 Comprehensive code review combining AI insights with rule-based validation. Provides intelligent analysis, security scanning, and actionable recommendations.',
334 | inputSchema: {
335 | type: 'object',
336 | properties: {
337 | url: {
338 | type: 'string',
339 | description: 'GitHub repository URL',
340 | },
341 | file_paths: {
342 | type: 'array',
343 | items: { type: 'string' },
344 | description: 'Specific files to review (optional - reviews key files if not specified)',
345 | },
346 | review_mode: {
347 | type: 'string',
348 | enum: ['ai', 'rules', 'combined'],
349 | description: 'Review approach: AI-powered, rule-based, or combined',
350 | default: 'combined',
351 | },
352 | review_focus: {
353 | type: 'array',
354 | items: {
355 | type: 'string',
356 | enum: ['security', 'performance', 'maintainability', 'best-practices', 'bugs', 'accessibility'],
357 | },
358 | description: 'Areas to focus the review on',
359 | default: ['security', 'performance', 'maintainability'],
360 | },
361 | options: {
362 | type: 'object',
363 | properties: {
364 | ai_model: {
365 | type: 'string',
366 | description: 'AI model to use for analysis (OpenRouter models). Use "auto" for intelligent model selection',
367 | default: 'auto',
368 | },
369 | severity_threshold: {
370 | type: 'string',
371 | enum: ['low', 'medium', 'high', 'critical'],
372 | description: 'Minimum severity level to report',
373 | default: 'medium',
374 | },
375 | include_fixes: {
376 | type: 'boolean',
377 | description: 'Include suggested fixes',
378 | default: true,
379 | },
380 | include_examples: {
381 | type: 'boolean',
382 | description: 'Include code examples in suggestions',
383 | default: true,
384 | },
385 | language_specific: {
386 | type: 'boolean',
387 | description: 'Include language-specific best practices',
388 | default: true,
389 | },
390 | framework_specific: {
391 | type: 'boolean',
392 | description: 'Include framework-specific checks',
393 | default: true,
394 | },
395 | },
396 | },
397 | },
398 | required: ['url'],
399 | },
400 | },
401 |
402 | {
403 | name: 'explain_code',
404 | description: '📚 AI-powered code explanation generating human-readable documentation, tutorials, and architectural insights. Transforms technical analysis into accessible explanations.',
405 | inputSchema: {
406 | type: 'object',
407 | properties: {
408 | url: {
409 | type: 'string',
410 | description: 'GitHub repository URL',
411 | },
412 | file_paths: {
413 | type: 'array',
414 | items: { type: 'string' },
415 | description: 'Specific files to explain (optional - explains key files if not specified)',
416 | },
417 | explanation_type: {
418 | type: 'string',
419 | enum: ['overview', 'detailed', 'architecture', 'tutorial', 'integration'],
420 | description: 'Type of explanation to generate',
421 | default: 'overview',
422 | },
423 | options: {
424 | type: 'object',
425 | properties: {
426 | ai_model: {
427 | type: 'string',
428 | description: 'AI model to use for explanation (OpenRouter models). Use "auto" for intelligent model selection',
429 | default: 'auto',
430 | },
431 | target_audience: {
432 | type: 'string',
433 | enum: ['beginner', 'intermediate', 'advanced'],
434 | description: 'Target audience for explanation',
435 | default: 'intermediate',
436 | },
437 | include_examples: {
438 | type: 'boolean',
439 | description: 'Include code examples in explanations',
440 | default: true,
441 | },
442 | include_diagrams: {
443 | type: 'boolean',
444 | description: 'Include ASCII diagrams where helpful',
445 | default: true,
446 | },
447 | focus_on_patterns: {
448 | type: 'boolean',
449 | description: 'Focus on design patterns and architecture',
450 | default: true,
451 | },
452 | },
453 | },
454 | },
455 | required: ['url'],
456 | },
457 | },
458 |
459 | {
460 | name: 'suggest_improvements',
461 | description: '💡 AI-powered improvement suggestions providing strategic refactoring recommendations, modernization plans, and architectural enhancements.',
462 | inputSchema: {
463 | type: 'object',
464 | properties: {
465 | url: {
466 | type: 'string',
467 | description: 'GitHub repository URL',
468 | },
469 | file_paths: {
470 | type: 'array',
471 | items: { type: 'string' },
472 | description: 'Specific files to analyze for improvements (optional - analyzes key files if not specified)',
473 | },
474 | improvement_goals: {
475 | type: 'array',
476 | items: {
477 | type: 'string',
478 | enum: ['modernize', 'performance', 'maintainability', 'security', 'readability', 'testability'],
479 | },
480 | description: 'Goals for improvement suggestions',
481 | default: ['modernize', 'maintainability'],
482 | },
483 | target_framework: {
484 | type: 'string',
485 | description: 'Target framework for improvement suggestions',
486 | },
487 | options: {
488 | type: 'object',
489 | properties: {
490 | ai_model: {
491 | type: 'string',
492 | description: 'AI model to use for suggestions (OpenRouter models). Use "auto" for intelligent model selection',
493 | default: 'auto',
494 | },
495 | include_code_examples: {
496 | type: 'boolean',
497 | description: 'Include before/after code examples',
498 | default: true,
499 | },
500 | priority_level: {
501 | type: 'string',
502 | enum: ['low', 'medium', 'high'],
503 | description: 'Minimum priority level for suggestions',
504 | default: 'medium',
505 | },
506 | estimate_effort: {
507 | type: 'boolean',
508 | description: 'Include effort estimates for improvement tasks',
509 | default: true,
510 | },
511 | },
512 | },
513 | },
514 | required: ['url'],
515 | },
516 | },
517 |
518 | // Transformation Tools (1 tool)
519 | {
520 | name: 'transform_code',
521 | description: '🔧 Apply code transformations including syntax changes, structural reorganization, framework migration, and modernization. Combines syntax-level and structural changes.',
522 | inputSchema: {
523 | type: 'object',
524 | properties: {
525 | code: {
526 | type: 'string',
527 | description: 'Source code to transform',
528 | },
529 | transformations: {
530 | type: 'array',
531 | items: {
532 | type: 'object',
533 | properties: {
534 | type: {
535 | type: 'string',
536 | enum: ['naming', 'modernize', 'framework', 'performance', 'security', 'structure', 'migration'],
537 | description: 'Type of transformation',
538 | },
539 | options: {
540 | type: 'object',
541 | description: 'Transformation-specific options',
542 | },
543 | },
544 | required: ['type'],
545 | },
546 | description: 'List of transformations to apply',
547 | },
548 | language: {
549 | type: 'string',
550 | description: 'Programming language of the code',
551 | },
552 | target_language: {
553 | type: 'string',
554 | description: 'Target language (for language conversion)',
555 | },
556 | target_framework: {
557 | type: 'string',
558 | description: 'Target framework (for framework migration)',
559 | },
560 | options: {
561 | type: 'object',
562 | properties: {
563 | preserve_comments: {
564 | type: 'boolean',
565 | description: 'Preserve code comments',
566 | default: true,
567 | },
568 | preserve_logic: {
569 | type: 'boolean',
570 | description: 'Preserve business logic during transformation',
571 | default: true,
572 | },
573 | update_imports: {
574 | type: 'boolean',
575 | description: 'Update import paths automatically',
576 | default: true,
577 | },
578 | include_instructions: {
579 | type: 'boolean',
580 | description: 'Include transformation instructions',
581 | default: true,
582 | },
583 | validate_syntax: {
584 | type: 'boolean',
585 | description: 'Validate syntax after transformation',
586 | default: true,
587 | },
588 | },
589 | },
590 | },
591 | required: ['code', 'transformations', 'language'],
592 | },
593 | },
594 |
595 |
596 | // Utility Tools (1 tool)
597 | {
598 | name: 'health_check',
599 | description: '🏥 System Health Check - Monitor server health, performance, and operational metrics. Provides comprehensive monitoring dashboard with real-time insights.',
600 | inputSchema: {
601 | type: 'object',
602 | properties: {
603 | checks: {
604 | type: 'array',
605 | items: {
606 | type: 'string',
607 | enum: ['api-limits', 'system-health', 'monitoring', 'dependencies', 'configuration'],
608 | },
609 | description: 'Types of health checks to perform',
610 | default: ['api-limits', 'system-health', 'monitoring'],
611 | },
612 | options: {
613 | type: 'object',
614 | properties: {
615 | include_metrics: {
616 | type: 'boolean',
617 | description: 'Include comprehensive system metrics in response',
618 | default: false,
619 | },
620 | include_insights: {
621 | type: 'boolean',
622 | description: 'Include performance insights and recommendations',
623 | default: false,
624 | },
625 | include_logs: {
626 | type: 'boolean',
627 | description: 'Include recent log entries',
628 | default: false,
629 | },
630 | include_diagnostics: {
631 | type: 'boolean',
632 | description: 'Include diagnostic information',
633 | default: false,
634 | },
635 | },
636 | },
637 | },
638 | required: [],
639 | },
640 | },
641 | ];
```
--------------------------------------------------------------------------------
/docs/API.md:
--------------------------------------------------------------------------------
```markdown
1 | # CodeCompass MCP API Reference
2 |
3 | This document provides comprehensive documentation for all 18 tools available in the CodeCompass MCP server.
4 |
5 | ## Response Format
6 |
7 | All tools return responses in this standardized format:
8 |
9 | ```json
10 | {
11 | "success": true,
12 | "data": { /* tool-specific data */ },
13 | "metadata": {
14 | "processing_time": 1234,
15 | "rate_limit_remaining": 4999,
16 | "cache_hit": false,
17 | "truncated": false, // Present when response is truncated
18 | "truncationReason": "Response size exceeded maximum token limit",
19 | "maxTokens": 25000,
20 | "estimatedTokens": 18500,
21 | "suggestion": "Use max_response_tokens and max_file_content_length options to control response size"
22 | },
23 | "error": { // Only present if success: false
24 | "code": "ERROR_CODE",
25 | "message": "Error description",
26 | "details": { /* additional error info */ }
27 | }
28 | }
29 | ```
30 |
31 | ### Response Size Management
32 |
33 | The server offers two approaches to handle large repository responses:
34 |
35 | #### 1. **Truncation Mode (Default)**
36 | Automatically truncates responses when they exceed token limits:
37 | - **Default limit**: 25,000 tokens (approximately 100KB of JSON)
38 | - **Token estimation**: 1 token ≈ 4 characters in JSON format
39 | - **Truncation order**: File contents → File tree depth → Metadata
40 |
41 | #### 2. **Chunking Mode (Recommended for Large Repos)**
42 | Splits large responses into multiple manageable chunks:
43 | - **No data loss**: Access all repository content across multiple requests
44 | - **Chunk sizes**: "small" (~10k tokens), "medium" (~20k tokens), "large" (~40k tokens)
45 | - **Pagination**: Use `chunk_index` to navigate through chunks
46 |
47 | **Control options**:
48 | - `max_response_tokens`: Set custom token limit (1,000 - 100,000)
49 | - `max_file_content_length`: Limit individual file content length (100 - 10,000 chars)
50 | - `chunk_mode`: Enable chunked responses (true/false)
51 | - `chunk_index`: Specify which chunk to retrieve (0-based)
52 | - `chunk_size`: Choose chunk size ("small", "medium", "large")
53 |
54 | When truncation occurs, the response includes truncation metadata. When chunking is enabled, the response includes chunk navigation information.
55 |
56 | ## Core Data Tools
57 |
58 | ### 1. `fetch_repository_data`
59 |
60 | **Description**: Comprehensive repository analysis and metadata retrieval.
61 |
62 | **Parameters**:
63 | ```json
64 | {
65 | "url": "https://github.com/owner/repo",
66 | "options": {
67 | "include_structure": true,
68 | "include_dependencies": true,
69 | "include_key_files": true,
70 | "max_files": 50,
71 | "file_extensions": [".js", ".ts"],
72 | "max_response_tokens": 25000,
73 | "max_file_content_length": 1000
74 | }
75 | }
76 | ```
77 |
78 | **Option Details**:
79 | - `include_structure` (boolean): Include file tree structure (default: true)
80 | - `include_dependencies` (boolean): Include dependency analysis (default: true)
81 | - `include_key_files` (boolean): Include key file contents (README, package.json, etc.) (default: true)
82 | - `max_files` (number): Maximum number of files to analyze (default: 50)
83 | - `file_extensions` (array): File extensions to focus on (e.g., [".js", ".ts"])
84 | - `max_response_tokens` (number): Maximum response size in tokens (default: 25000, range: 1000-100000)
85 | - `max_file_content_length` (number): Maximum content length per file (default: 1000, range: 100-10000)
86 | - `chunk_mode` (boolean): Enable chunked responses for large repositories (default: false)
87 | - `chunk_index` (number): Chunk index to retrieve (0-based, use with chunk_mode) (default: 0)
88 | - `chunk_size` (string): Chunk size - "small", "medium", or "large" (default: "medium")
89 |
90 | **Response**:
91 | ```json
92 | {
93 | "success": true,
94 | "data": {
95 | "info": {
96 | "name": "repository-name",
97 | "description": "Repository description",
98 | "language": "JavaScript",
99 | "owner": "owner-name",
100 | "stars": 1234,
101 | "forks": 567,
102 | "created_at": "2024-01-01T00:00:00Z"
103 | },
104 | "structure": {
105 | "fileCount": 150,
106 | "lineCount": 10000,
107 | "fileTree": { /* file structure */ },
108 | "keyFiles": { /* key file contents */ }
109 | },
110 | "dependencies": { /* dependency analysis */ },
111 | "architecture": { /* architecture analysis */ }
112 | }
113 | }
114 | ```
115 |
116 | ### 2. `search_repository`
117 |
118 | **Description**: Advanced search within repositories with filtering and context.
119 |
120 | **Parameters**:
121 | ```json
122 | {
123 | "url": "https://github.com/owner/repo",
124 | "query": "search pattern",
125 | "search_type": "text|regex|function|class|variable|import",
126 | "options": {
127 | "case_sensitive": false,
128 | "file_extensions": [".js", ".ts"],
129 | "exclude_paths": ["node_modules", "dist"],
130 | "max_results": 100,
131 | "include_context": true
132 | }
133 | }
134 | ```
135 |
136 | **Response**:
137 | ```json
138 | {
139 | "success": true,
140 | "data": {
141 | "results": [
142 | {
143 | "file": "path/to/file.js",
144 | "line": 42,
145 | "match": "matched code",
146 | "context": "surrounding code context"
147 | }
148 | ],
149 | "total_matches": 15,
150 | "files_searched": 100
151 | }
152 | }
153 | ```
154 |
155 | ### 3. `get_file_content`
156 |
157 | **Description**: Advanced batch file retrieval with smart truncation, security validation, and rich metadata extraction.
158 |
159 | **Parameters**:
160 | ```json
161 | {
162 | "url": "https://github.com/owner/repo",
163 | "file_paths": ["README.md", "package.json", "src/index.js"],
164 | "options": {
165 | "max_size": 100000,
166 | "include_metadata": true,
167 | "truncate_large_files": true,
168 | "format": "raw|parsed|summary"
169 | }
170 | }
171 | ```
172 |
173 | **Response**:
174 | ```json
175 | {
176 | "success": true,
177 | "data": {
178 | "README.md": {
179 | "content": "file content",
180 | "size": 1234,
181 | "truncated": false
182 | },
183 | "package.json": {
184 | "content": "{ \"name\": \"example\" }",
185 | "size": 567,
186 | "truncated": false
187 | }
188 | }
189 | }
190 | ```
191 |
192 | ### 4. `analyze_code_structure`
193 |
194 | **Description**: Technical code structure analysis with complexity metrics.
195 |
196 | **Parameters**:
197 | ```json
198 | {
199 | "url": "https://github.com/owner/repo",
200 | "file_paths": ["src/index.js"], // optional
201 | "options": {
202 | "include_functions": true,
203 | "include_classes": true,
204 | "include_imports": true,
205 | "include_complexity": true,
206 | "languages": ["javascript", "typescript"]
207 | }
208 | }
209 | ```
210 |
211 | **Response**:
212 | ```json
213 | {
214 | "success": true,
215 | "data": {
216 | "functions": [
217 | {
218 | "name": "functionName",
219 | "file": "src/index.js",
220 | "line": 10,
221 | "complexity": 5,
222 | "parameters": ["param1", "param2"]
223 | }
224 | ],
225 | "classes": [
226 | {
227 | "name": "ClassName",
228 | "file": "src/class.js",
229 | "line": 1,
230 | "methods": ["method1", "method2"]
231 | }
232 | ],
233 | "imports": [
234 | {
235 | "module": "react",
236 | "type": "default",
237 | "file": "src/component.js"
238 | }
239 | ],
240 | "complexity": {
241 | "cyclomatic": 25,
242 | "cognitive": 30,
243 | "overall": "medium"
244 | }
245 | }
246 | }
247 | ```
248 |
249 | ### 5. `analyze_dependencies`
250 |
251 | **Description**: Dependency analysis and security scanning.
252 |
253 | **Parameters**:
254 | ```json
255 | {
256 | "url": "https://github.com/owner/repo",
257 | "options": {
258 | "include_dev_dependencies": true,
259 | "include_security_scan": true,
260 | "include_version_analysis": true,
261 | "check_outdated": true
262 | }
263 | }
264 | ```
265 |
266 | **Response**:
267 | ```json
268 | {
269 | "success": true,
270 | "data": {
271 | "dependencies": {
272 | "production": [
273 | {
274 | "name": "react",
275 | "version": "^18.0.0",
276 | "type": "runtime"
277 | }
278 | ],
279 | "development": [
280 | {
281 | "name": "jest",
282 | "version": "^29.0.0",
283 | "type": "testing"
284 | }
285 | ]
286 | },
287 | "security": {
288 | "vulnerabilities": [
289 | {
290 | "package": "package-name",
291 | "severity": "high",
292 | "description": "Vulnerability description"
293 | }
294 | ]
295 | },
296 | "outdated": [
297 | {
298 | "package": "package-name",
299 | "current": "1.0.0",
300 | "latest": "2.0.0"
301 | }
302 | ]
303 | }
304 | }
305 | ```
306 |
307 | ### 6. `calculate_metrics`
308 |
309 | **Description**: Quantitative code quality metrics and technical debt analysis.
310 |
311 | **Parameters**:
312 | ```json
313 | {
314 | "url": "https://github.com/owner/repo",
315 | "options": {
316 | "metrics": ["complexity", "maintainability", "duplication", "security"],
317 | "include_file_level": true,
318 | "include_trend_analysis": false
319 | }
320 | }
321 | ```
322 |
323 | **Response**:
324 | ```json
325 | {
326 | "success": true,
327 | "data": {
328 | "overall": {
329 | "complexity": 6.5,
330 | "maintainability": 75,
331 | "duplication": 15,
332 | "security": 85,
333 | "technical_debt": "medium"
334 | },
335 | "file_level": {
336 | "src/index.js": {
337 | "complexity": 8.2,
338 | "maintainability": 70,
339 | "lines": 150
340 | }
341 | },
342 | "recommendations": [
343 | "Reduce complexity in src/index.js",
344 | "Address code duplication in utils/"
345 | ]
346 | }
347 | }
348 | ```
349 |
350 | ## Code Transformation Tools
351 |
352 | ### 7. `transform_code`
353 |
354 | **Description**: Syntax modernization and language conversion.
355 |
356 | **Parameters**:
357 | ```json
358 | {
359 | "code": "var x = 5; function test() { return x + 1; }",
360 | "transformations": [
361 | {
362 | "type": "modernize|framework|performance|security",
363 | "options": { /* transformation-specific options */ }
364 | }
365 | ],
366 | "language": "javascript",
367 | "target_language": "typescript", // optional
368 | "options": {
369 | "preserve_comments": true,
370 | "include_instructions": true,
371 | "validate_syntax": true
372 | }
373 | }
374 | ```
375 |
376 | **Response**:
377 | ```json
378 | {
379 | "success": true,
380 | "data": {
381 | "transformed_code": "const x = 5; const test = () => x + 1;",
382 | "transformations_applied": [
383 | {
384 | "type": "modernize",
385 | "description": "Converted var to const, function to arrow function"
386 | }
387 | ],
388 | "syntax_valid": true,
389 | "instructions": "Code has been modernized to use ES6+ features"
390 | }
391 | }
392 | ```
393 |
394 | ### 8. `extract_components`
395 |
396 | **Description**: Component extraction with reusability scoring.
397 |
398 | **Parameters**:
399 | ```json
400 | {
401 | "url": "https://github.com/owner/repo",
402 | "extraction_types": ["components", "functions", "utilities", "hooks", "types"],
403 | "options": {
404 | "min_reusability_score": 60,
405 | "include_dependencies": true,
406 | "include_examples": true,
407 | "framework": "react"
408 | }
409 | }
410 | ```
411 |
412 | **Response**:
413 | ```json
414 | {
415 | "success": true,
416 | "data": {
417 | "components": [
418 | {
419 | "name": "Button",
420 | "file": "src/components/Button.js",
421 | "reusability_score": 85,
422 | "dependencies": ["react"],
423 | "props": ["onClick", "children", "variant"],
424 | "example_usage": "<Button onClick={handleClick}>Click me</Button>"
425 | }
426 | ],
427 | "functions": [
428 | {
429 | "name": "formatDate",
430 | "file": "src/utils/date.js",
431 | "reusability_score": 90,
432 | "parameters": ["date", "format"],
433 | "return_type": "string"
434 | }
435 | ]
436 | }
437 | }
438 | ```
439 |
440 | ### 9. `adapt_code_structure`
441 |
442 | **Description**: Framework migration and architectural restructuring.
443 |
444 | **Parameters**:
445 | ```json
446 | {
447 | "url": "https://github.com/owner/repo",
448 | "target_structure": {
449 | "framework": "react|vue|angular|express",
450 | "pattern": "mvc|mvvm|clean",
451 | "folder_structure": { /* custom structure */ }
452 | },
453 | "options": {
454 | "preserve_logic": true,
455 | "update_imports": true,
456 | "generate_config": true
457 | }
458 | }
459 | ```
460 |
461 | **Response**:
462 | ```json
463 | {
464 | "success": true,
465 | "data": {
466 | "migration_plan": {
467 | "steps": [
468 | {
469 | "action": "move",
470 | "from": "src/components/",
471 | "to": "components/",
472 | "reason": "Framework convention"
473 | }
474 | ],
475 | "estimated_effort": "medium",
476 | "breaking_changes": ["Import paths", "Config structure"]
477 | },
478 | "file_mappings": {
479 | "src/index.js": "src/main.js",
480 | "src/App.js": "src/App.vue"
481 | },
482 | "config_files": {
483 | "package.json": "{ \"scripts\": { \"dev\": \"vite\" } }",
484 | "vite.config.js": "export default { /* config */ }"
485 | }
486 | }
487 | }
488 | ```
489 |
490 | ### 10. `generate_project_template`
491 |
492 | **Description**: Template generation from analysis.
493 |
494 | **Parameters**:
495 | ```json
496 | {
497 | "url": "https://github.com/owner/repo",
498 | "template_type": "starter|library|microservice|fullstack|component-library",
499 | "options": {
500 | "project_name": "my-new-project",
501 | "framework": "react",
502 | "language": "typescript",
503 | "include_tests": true,
504 | "include_docs": true,
505 | "include_ci": true,
506 | "package_manager": "npm|yarn|pnpm|bun"
507 | }
508 | }
509 | ```
510 |
511 | **Response**:
512 | ```json
513 | {
514 | "success": true,
515 | "data": {
516 | "template": {
517 | "name": "my-new-project",
518 | "structure": {
519 | "src/": "Source code directory",
520 | "tests/": "Test files",
521 | "docs/": "Documentation"
522 | },
523 | "files": {
524 | "package.json": "{ \"name\": \"my-new-project\" }",
525 | "README.md": "# My New Project",
526 | "tsconfig.json": "{ \"compilerOptions\": {} }"
527 | }
528 | },
529 | "setup_instructions": [
530 | "npm install",
531 | "npm run build",
532 | "npm test"
533 | ]
534 | }
535 | }
536 | ```
537 |
538 | ## Analysis Tools
539 |
540 | ### 11. `analyze_architecture`
541 |
542 | **Description**: Design patterns, layering, and scalability analysis.
543 |
544 | **Parameters**:
545 | ```json
546 | {
547 | "url": "https://github.com/owner/repo",
548 | "options": {
549 | "pattern_types": ["mvc", "mvvm", "clean", "hexagonal"],
550 | "include_frameworks": true,
551 | "include_conventions": true,
552 | "confidence_threshold": 0.7
553 | }
554 | }
555 | ```
556 |
557 | **Response**:
558 | ```json
559 | {
560 | "success": true,
561 | "data": {
562 | "patterns": [
563 | {
564 | "type": "mvc",
565 | "confidence": 0.85,
566 | "evidence": ["Controllers found", "Models directory", "View components"],
567 | "implementation": "Well-structured MVC with clear separation"
568 | }
569 | ],
570 | "frameworks": [
571 | {
572 | "name": "React",
573 | "version": "18.x",
574 | "usage": "Frontend framework"
575 | }
576 | ],
577 | "scalability": {
578 | "score": 75,
579 | "strengths": ["Modular architecture", "Good separation of concerns"],
580 | "concerns": ["Monolithic structure", "Tight coupling in utils"]
581 | }
582 | }
583 | }
584 | ```
585 |
586 | ### 12. `compare_implementations`
587 |
588 | **Description**: Multi-repository comparison and benchmarking.
589 |
590 | **Parameters**:
591 | ```json
592 | {
593 | "implementations": [
594 | {
595 | "name": "Implementation A",
596 | "url": "https://github.com/owner/repo-a",
597 | "focus_areas": ["performance", "security"]
598 | },
599 | {
600 | "name": "Implementation B",
601 | "url": "https://github.com/owner/repo-b"
602 | }
603 | ],
604 | "comparison_criteria": ["performance", "maintainability", "security", "complexity"],
605 | "options": {
606 | "include_metrics": true,
607 | "include_recommendations": true
608 | }
609 | }
610 | ```
611 |
612 | **Response**:
613 | ```json
614 | {
615 | "success": true,
616 | "data": {
617 | "comparison": {
618 | "performance": {
619 | "Implementation A": 85,
620 | "Implementation B": 70,
621 | "winner": "Implementation A"
622 | },
623 | "maintainability": {
624 | "Implementation A": 75,
625 | "Implementation B": 90,
626 | "winner": "Implementation B"
627 | }
628 | },
629 | "recommendations": [
630 | "Implementation A has better performance optimizations",
631 | "Implementation B follows better coding practices"
632 | ],
633 | "summary": "Implementation A is better for performance-critical applications"
634 | }
635 | }
636 | ```
637 |
638 | ### 13. `validate_code_quality`
639 |
640 | **Description**: Rule-based validation and standards compliance.
641 |
642 | **Parameters**:
643 | ```json
644 | {
645 | "url": "https://github.com/owner/repo",
646 | "validation_types": ["security", "performance", "best-practices", "accessibility"],
647 | "options": {
648 | "severity_level": "low|medium|high|critical",
649 | "include_fixes": true,
650 | "framework_specific": true
651 | }
652 | }
653 | ```
654 |
655 | **Response**:
656 | ```json
657 | {
658 | "success": true,
659 | "data": {
660 | "validation_results": {
661 | "security": {
662 | "score": 85,
663 | "issues": [
664 | {
665 | "type": "XSS vulnerability",
666 | "severity": "high",
667 | "file": "src/component.js",
668 | "line": 42,
669 | "fix": "Use proper sanitization"
670 | }
671 | ]
672 | },
673 | "performance": {
674 | "score": 70,
675 | "issues": [
676 | {
677 | "type": "Large bundle size",
678 | "severity": "medium",
679 | "suggestion": "Implement code splitting"
680 | }
681 | ]
682 | }
683 | },
684 | "overall_score": 78,
685 | "certification": "Good"
686 | }
687 | }
688 | ```
689 |
690 | ## Utility Tools
691 |
692 | ### 14. `batch_process`
693 |
694 | **Description**: Parallel execution of multiple operations.
695 |
696 | **Parameters**:
697 | ```json
698 | {
699 | "operations": [
700 | {
701 | "id": "op1",
702 | "tool": "health_check",
703 | "params": {},
704 | "priority": 5
705 | },
706 | {
707 | "id": "op2",
708 | "tool": "fetch_repository_data",
709 | "params": {
710 | "url": "https://github.com/owner/repo"
711 | },
712 | "priority": 8
713 | }
714 | ],
715 | "options": {
716 | "max_concurrent": 3,
717 | "fail_fast": false,
718 | "include_progress": true
719 | }
720 | }
721 | ```
722 |
723 | **Response**:
724 | ```json
725 | {
726 | "success": true,
727 | "data": {
728 | "operations": [
729 | {
730 | "type": "health_check",
731 | "id": "op1"
732 | }
733 | ],
734 | "results": [
735 | {
736 | "id": "op1",
737 | "success": true,
738 | "data": { /* operation result */ },
739 | "processingTime": 1200
740 | }
741 | ],
742 | "totalTime": 5000,
743 | "successCount": 2,
744 | "failureCount": 0
745 | }
746 | }
747 | ```
748 |
749 | ### 15. `health_check`
750 |
751 | **Description**: Server health, API limits, and system monitoring.
752 |
753 | **Parameters**:
754 | ```json
755 | {
756 | "checks": ["api-limits", "cache-status", "system-health", "dependencies"],
757 | "options": {
758 | "include_metrics": true,
759 | "include_diagnostics": false
760 | }
761 | }
762 | ```
763 |
764 | **Response**:
765 | ```json
766 | {
767 | "success": true,
768 | "data": {
769 | "status": "healthy",
770 | "timestamp": "2024-01-17T23:41:31.079Z",
771 | "checks": {
772 | "api-limits": {
773 | "github": {
774 | "remaining": 4950,
775 | "limit": 5000,
776 | "reset": "2024-01-17T24:00:00Z"
777 | },
778 | "openrouter": {
779 | "status": "healthy"
780 | }
781 | },
782 | "system-health": {
783 | "status": "healthy",
784 | "memory": {
785 | "used": 128,
786 | "total": 512
787 | },
788 | "uptime": 3600
789 | }
790 | },
791 | "metrics": {
792 | "uptime": 3600,
793 | "memory": {
794 | "rss": 50331648,
795 | "heapTotal": 20971520,
796 | "heapUsed": 15728640
797 | },
798 | "version": "1.0.0"
799 | }
800 | }
801 | }
802 | ```
803 |
804 | ## AI-Enhanced Tools
805 |
806 | ### 16. `ai_code_review`
807 |
808 | **Description**: AI-powered code review with intelligent insights.
809 |
810 | **Parameters**:
811 | ```json
812 | {
813 | "url": "https://github.com/owner/repo",
814 | "file_paths": ["src/index.js"], // optional
815 | "review_focus": ["security", "performance", "maintainability", "best-practices"],
816 | "options": {
817 | "ai_model": "auto|anthropic/claude-3.5-sonnet|openai/gpt-4o",
818 | "severity_threshold": "low|medium|high",
819 | "include_examples": true,
820 | "language_specific": true
821 | }
822 | }
823 | ```
824 |
825 | **Response**:
826 | ```json
827 | {
828 | "success": true,
829 | "data": {
830 | "repository": {
831 | "name": "repo-name",
832 | "description": "Repository description",
833 | "language": "JavaScript",
834 | "owner": "owner-name"
835 | },
836 | "review": {
837 | "files_reviewed": ["src/index.js"],
838 | "focus_areas": ["security", "performance"],
839 | "ai_model_used": "anthropic/claude-3.5-sonnet",
840 | "ai_model_requested": "auto",
841 | "analysis": "Comprehensive AI-generated review content...",
842 | "severity_threshold": "medium",
843 | "timestamp": "2024-01-17T23:41:31.079Z",
844 | "model_warning": null
845 | },
846 | "recommendations": {
847 | "priority_fixes": [
848 | "Fix XSS vulnerability in line 42",
849 | "Optimize database queries in user service"
850 | ],
851 | "suggestions": [
852 | "Consider implementing caching",
853 | "Add input validation"
854 | ],
855 | "best_practices": [
856 | "Use TypeScript for better type safety",
857 | "Add comprehensive tests"
858 | ]
859 | }
860 | }
861 | }
862 | ```
863 |
864 | ### 17. `ai_explain_code`
865 |
866 | **Description**: AI-generated explanations and documentation.
867 |
868 | **Parameters**:
869 | ```json
870 | {
871 | "url": "https://github.com/owner/repo",
872 | "file_paths": ["src/index.js"], // optional
873 | "explanation_type": "overview|detailed|architecture|tutorial|integration",
874 | "options": {
875 | "ai_model": "auto|anthropic/claude-3.5-sonnet|openai/gpt-4o",
876 | "target_audience": "beginner|intermediate|advanced",
877 | "include_examples": true,
878 | "include_diagrams": true,
879 | "focus_on_patterns": true
880 | }
881 | }
882 | ```
883 |
884 | **Response**:
885 | ```json
886 | {
887 | "success": true,
888 | "data": {
889 | "repository": {
890 | "name": "repo-name",
891 | "description": "Repository description",
892 | "language": "JavaScript",
893 | "owner": "owner-name"
894 | },
895 | "explanation": {
896 | "type": "overview",
897 | "files_analyzed": ["src/index.js"],
898 | "ai_model_used": "openai/gpt-4o",
899 | "ai_model_requested": "auto",
900 | "target_audience": "intermediate",
901 | "content": "Detailed AI-generated explanation...",
902 | "timestamp": "2024-01-17T23:41:31.079Z",
903 | "model_warning": null
904 | },
905 | "metadata": {
906 | "file_count": 5,
907 | "total_lines": 1200
908 | }
909 | }
910 | }
911 | ```
912 |
913 | ### 18. `ai_refactor_suggestions`
914 |
915 | **Description**: AI-powered refactoring strategies and recommendations.
916 |
917 | **Parameters**:
918 | ```json
919 | {
920 | "url": "https://github.com/owner/repo",
921 | "file_paths": ["src/index.js"], // optional
922 | "refactoring_goals": ["modernize", "performance", "maintainability", "security"],
923 | "target_framework": "react|vue|angular", // optional
924 | "options": {
925 | "ai_model": "auto|anthropic/claude-3.5-sonnet|openai/gpt-4o",
926 | "include_code_examples": true,
927 | "priority_level": "low|medium|high",
928 | "estimate_effort": true
929 | }
930 | }
931 | ```
932 |
933 | **Response**:
934 | ```json
935 | {
936 | "success": true,
937 | "data": {
938 | "repository": {
939 | "name": "repo-name",
940 | "description": "Repository description",
941 | "language": "JavaScript",
942 | "owner": "owner-name"
943 | },
944 | "refactoring": {
945 | "goals": ["modernize", "performance"],
946 | "target_framework": "react",
947 | "files_analyzed": ["src/index.js"],
948 | "ai_model_used": "anthropic/claude-3.5-sonnet",
949 | "ai_model_requested": "auto",
950 | "suggestions": "Detailed AI-generated refactoring plan...",
951 | "priority_level": "medium",
952 | "timestamp": "2024-01-17T23:41:31.079Z",
953 | "model_warning": null
954 | },
955 | "metadata": {
956 | "file_count": 5,
957 | "total_lines": 1200,
958 | "estimated_effort": "2-3 days for experienced developer"
959 | }
960 | }
961 | }
962 | ```
963 |
964 | ## Error Handling
965 |
966 | ### Error Codes
967 |
968 | - `INVALID_URL`: Invalid GitHub repository URL
969 | - `REPOSITORY_NOT_FOUND`: Repository does not exist or is private
970 | - `RATE_LIMIT_EXCEEDED`: API rate limit exceeded
971 | - `PROCESSING_ERROR`: General processing error
972 | - `VALIDATION_ERROR`: Input validation failed
973 | - `OPENROUTER_ERROR`: OpenRouter API error
974 | - `NETWORK_ERROR`: Network connectivity issue
975 |
976 | ### Error Response Format
977 |
978 | ```json
979 | {
980 | "success": false,
981 | "error": {
982 | "code": "RATE_LIMIT_EXCEEDED",
983 | "message": "GitHub API rate limit exceeded",
984 | "details": {
985 | "limit": 5000,
986 | "remaining": 0,
987 | "reset": "2024-01-17T24:00:00Z"
988 | }
989 | },
990 | "metadata": {
991 | "processing_time": 1234
992 | }
993 | }
994 | ```
995 |
996 | ## Rate Limiting
997 |
998 | ### GitHub API
999 | - **Unauthenticated**: 60 requests/hour
1000 | - **With token**: 5,000 requests/hour
1001 | - **GraphQL**: 5,000 points/hour
1002 |
1003 | ### OpenRouter API
1004 | - Varies by model and subscription plan
1005 | - Check OpenRouter documentation for specific limits
1006 |
1007 | ### Best Practices
1008 | - Use GitHub token for better rate limits
1009 | - Implement exponential backoff
1010 | - Monitor rate limits with `health_check`
1011 | - Cache responses when possible
1012 |
1013 | ## Model Selection Guide
1014 |
1015 | ### Auto Selection Logic
1016 | - **Code Review**: `anthropic/claude-3.5-sonnet`
1017 | - **Explanation**: `openai/gpt-4o`
1018 | - **Refactoring**: `anthropic/claude-3.5-sonnet`
1019 | - **Batch Jobs**: `openai/gpt-4o-mini`
1020 |
1021 | ### Model Characteristics
1022 | - **Speed**: `fastest` → `slowest`
1023 | - **Cost**: `low` → `highest`
1024 | - **Quality**: `good` → `highest`
1025 | - **Recommended**: Production-ready models
1026 |
1027 | ### Model Transparency
1028 | All AI responses include:
1029 | - `ai_model_used`: Actual model used
1030 | - `ai_model_requested`: Requested model
1031 | - `model_warning`: Performance/cost warnings
1032 |
1033 | This comprehensive API reference covers all 18 tools with detailed parameters, responses, and usage examples.
```
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | #!/usr/bin/env node
2 |
3 | import { Server } from '@modelcontextprotocol/sdk/server/index.js';
4 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
5 | import {
6 | CallToolRequestSchema,
7 | ErrorCode,
8 | ListToolsRequestSchema,
9 | McpError,
10 | } from '@modelcontextprotocol/sdk/types.js';
11 | import { GitHubService } from './services/github.js';
12 | import { RefactorService } from './services/refactor.js';
13 | import { OpenAIService } from './services/openai.js';
14 | import { consolidatedTools } from './tools/consolidated.js';
15 | import { ToolResponse, ErrorCodes } from './types/responses.js';
16 | import { getConfig } from './utils/config.js';
17 | import { batchProcessFiles, processSingleFile, validateFilePath, extractFileMetadata, getFileStatistics } from './utils/file-processor.js';
18 | import { log, createPerformanceTimer } from './utils/logger.js';
19 | import { monitoring, monitorTool } from './utils/monitoring.js';
20 | import { z } from 'zod';
21 |
22 | // Initialize configuration
23 | const config = getConfig();
24 |
25 | // Log server startup
26 | log.info('CodeCompass MCP Server starting up', {
27 | nodeVersion: process.version,
28 | nodeEnv: process.env.NODE_ENV,
29 | configSummary: {
30 | hasGitHubToken: !!config.github.token,
31 | hasOpenRouterKey: !!config.openrouter.apiKey,
32 | defaultModel: config.openrouter.defaultModel,
33 | maxResponseTokens: config.response.maxTokens,
34 | logLevel: config.logging.level,
35 | },
36 | });
37 |
38 | const server = new Server(
39 | {
40 | name: 'codecompass-mcp',
41 | version: '1.0.0',
42 | },
43 | {
44 | capabilities: {
45 | tools: {},
46 | },
47 | }
48 | );
49 |
50 | // Initialize services with configuration
51 | const githubService = new GitHubService();
52 | const refactorService = new RefactorService();
53 | const openaiService = new OpenAIService();
54 |
55 | // Helper function to create standardized responses
56 | function createResponse<T>(data: T, error?: any, metadata?: any): ToolResponse<T> {
57 | const response: ToolResponse<T> = {
58 | success: !error,
59 | metadata: {
60 | processing_time: Date.now(),
61 | ...metadata,
62 | },
63 | };
64 |
65 | if (error) {
66 | // Enhanced error handling with contextual messages
67 | const errorMessage = error.message || 'An error occurred';
68 | const errorCode = error.code || ErrorCodes.PROCESSING_ERROR;
69 |
70 | // Add contextual suggestions based on error type
71 | let suggestion = '';
72 | if (errorMessage.includes('rate limit')) {
73 | suggestion = 'Try reducing request frequency or adding GitHub token for higher limits.';
74 | } else if (errorMessage.includes('not found') || errorMessage.includes('404')) {
75 | suggestion = 'Verify the repository URL is correct and the repository is publicly accessible.';
76 | } else if (errorMessage.includes('timeout')) {
77 | suggestion = 'Use chunking mode for large repositories: set chunk_mode=true in options.';
78 | } else if (errorMessage.includes('token')) {
79 | suggestion = 'Check your GitHub and OpenRouter API tokens in environment variables.';
80 | } else if (errorMessage.includes('permission') || errorMessage.includes('403')) {
81 | suggestion = 'Ensure your GitHub token has the necessary permissions for this repository.';
82 | }
83 |
84 | response.error = {
85 | code: errorCode,
86 | message: errorMessage,
87 | details: error.details || error,
88 | suggestion: suggestion || 'Check the API documentation for more details.',
89 | timestamp: new Date().toISOString(),
90 | context: {
91 | tool: metadata?.tool || 'unknown',
92 | url: metadata?.url || 'unknown'
93 | }
94 | };
95 | } else {
96 | response.data = data;
97 | }
98 |
99 | return response;
100 | }
101 |
102 | // Helper function to chunk large responses
103 | function chunkResponse<T>(response: ToolResponse<T>, chunkIndex: number, chunkSize: string): ToolResponse<T> {
104 | if (!response.success || !response.data) {
105 | return response;
106 | }
107 |
108 | const data = response.data as any;
109 | const chunkedResponse = { ...response };
110 | const chunkedData = { ...data };
111 |
112 | // Get chunk size limits from configuration
113 | const chunkLimits = config.response.chunkSizes;
114 | const limits = chunkLimits[chunkSize as keyof typeof chunkLimits] || chunkLimits.medium;
115 |
116 | // Chunk key files
117 | if (data.structure?.keyFiles) {
118 | const keyFiles = data.structure.keyFiles;
119 | const fileEntries = Object.entries(keyFiles);
120 | const startIndex = chunkIndex * limits.filesPerChunk;
121 | const endIndex = startIndex + limits.filesPerChunk;
122 | const chunkedFiles = fileEntries.slice(startIndex, endIndex);
123 |
124 | chunkedData.structure = {
125 | ...data.structure,
126 | keyFiles: Object.fromEntries(chunkedFiles.map(([filename, content]) => [
127 | filename,
128 | typeof content === 'string' && content.length > limits.fileContent
129 | ? content.substring(0, limits.fileContent) + '\n\n... [Content truncated for chunking] ...'
130 | : content
131 | ]))
132 | };
133 |
134 | // Add chunking metadata
135 | chunkedData.chunkInfo = {
136 | chunkIndex,
137 | chunkSize,
138 | totalFiles: fileEntries.length,
139 | totalChunks: Math.ceil(fileEntries.length / limits.filesPerChunk),
140 | filesInChunk: chunkedFiles.length,
141 | hasMore: endIndex < fileEntries.length,
142 | nextChunkIndex: endIndex < fileEntries.length ? chunkIndex + 1 : null
143 | };
144 | }
145 |
146 | chunkedResponse.data = chunkedData;
147 | return chunkedResponse;
148 | }
149 |
150 | // Helper function to truncate large responses (fallback for non-chunked mode)
151 | function truncateResponse<T>(response: ToolResponse<T>, maxTokens: number = 25000, maxFileContentLength: number = 1000): ToolResponse<T> {
152 | const jsonString = JSON.stringify(response, null, 2);
153 |
154 | // Rough token estimation: 1 token ≈ 4 characters
155 | const estimatedTokens = jsonString.length / 4;
156 |
157 | if (estimatedTokens <= maxTokens) {
158 | return response;
159 | }
160 |
161 | // If response is too large, truncate data while preserving structure
162 | const truncatedResponse = { ...response };
163 |
164 | if (truncatedResponse.success && truncatedResponse.data) {
165 | const data = truncatedResponse.data as any;
166 |
167 | // Truncate file contents first
168 | if (data.structure?.keyFiles) {
169 | const keyFiles = data.structure.keyFiles;
170 |
171 | Object.keys(keyFiles).forEach(filename => {
172 | if (keyFiles[filename] && keyFiles[filename].length > maxFileContentLength) {
173 | keyFiles[filename] = keyFiles[filename].substring(0, maxFileContentLength) +
174 | '\n\n... [Content truncated due to size limits] ...';
175 | }
176 | });
177 | }
178 |
179 | // Truncate file tree if still too large
180 | if (data.structure?.fileTree) {
181 | const truncateFileTree = (tree: any[], maxDepth: number = 3, currentDepth: number = 0): any[] => {
182 | if (currentDepth >= maxDepth) {
183 | return [{ name: '...', type: 'truncated', message: 'Directory tree truncated due to size limits' }];
184 | }
185 |
186 | return tree.slice(0, 50).map(node => ({
187 | ...node,
188 | children: node.children ? truncateFileTree(node.children, maxDepth, currentDepth + 1) : undefined
189 | }));
190 | };
191 |
192 | data.structure.fileTree = truncateFileTree(data.structure.fileTree);
193 | }
194 |
195 | // Add truncation warning
196 | if (!data.metadata) {
197 | data.metadata = {};
198 | }
199 | data.metadata.truncated = true;
200 | data.metadata.truncationReason = 'Response size exceeded maximum token limit';
201 | data.metadata.maxTokens = maxTokens;
202 | data.metadata.estimatedTokens = Math.round(estimatedTokens);
203 | data.metadata.suggestion = 'Use chunk_mode=true for complete data access or adjust max_response_tokens and max_file_content_length';
204 | }
205 |
206 | return truncatedResponse;
207 | }
208 |
209 | // Helper function to format tool responses for MCP
210 | function formatToolResponse<T>(
211 | response: ToolResponse<T>,
212 | maxTokens: number = 25000,
213 | maxFileContentLength: number = 1000,
214 | chunkMode: boolean = false,
215 | chunkIndex: number = 0,
216 | chunkSize: string = 'medium'
217 | ) {
218 | let processedResponse;
219 |
220 | if (chunkMode) {
221 | processedResponse = chunkResponse(response, chunkIndex, chunkSize);
222 | } else {
223 | processedResponse = truncateResponse(response, maxTokens, maxFileContentLength);
224 | }
225 |
226 | return {
227 | content: [
228 | {
229 | type: 'text',
230 | text: JSON.stringify(processedResponse, null, 2),
231 | },
232 | ],
233 | };
234 | }
235 |
236 | server.setRequestHandler(ListToolsRequestSchema, async () => {
237 | return {
238 | tools: consolidatedTools,
239 | };
240 | });
241 |
242 | server.setRequestHandler(CallToolRequestSchema, async (request) => {
243 | const { name, arguments: args } = request.params;
244 | const requestId = monitoring.generateRequestId();
245 | const startTime = Date.now();
246 |
247 | // Start monitoring this request
248 | monitoring.startRequest(name, requestId);
249 |
250 | try {
251 | let result;
252 | switch (name) {
253 | // Core Data Tools (6 tools)
254 | case 'get_repository_info':
255 | result = await handleGetRepositoryInfo(args);
256 | break;
257 | case 'get_file_tree':
258 | result = await handleGetFileTree(args);
259 | break;
260 | case 'search_repository':
261 | result = await handleSearchRepository(args);
262 | break;
263 | case 'get_file_content':
264 | result = await handleGetFileContent(args);
265 | break;
266 | case 'analyze_dependencies':
267 | result = await handleAnalyzeDependencies(args);
268 | break;
269 | case 'analyze_codebase':
270 | result = await handleAnalyzeCodebase(args);
271 | break;
272 |
273 | // AI-Enhanced Tools (3 tools)
274 | case 'review_code':
275 | result = await handleReviewCode(args);
276 | break;
277 | case 'explain_code':
278 | result = await handleExplainCode(args);
279 | break;
280 | case 'suggest_improvements':
281 | result = await handleSuggestImprovements(args);
282 | break;
283 |
284 | // Transformation Tools (1 tool)
285 | case 'transform_code':
286 | result = await handleTransformCode(args);
287 | break;
288 |
289 | // Utility Tools (1 tool)
290 | case 'health_check':
291 | result = await handleHealthCheck(args);
292 | break;
293 |
294 | default:
295 | throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`);
296 | }
297 |
298 | // Mark request as successful
299 | monitoring.completeRequest(name, startTime, true, undefined, requestId);
300 | return result;
301 |
302 | } catch (error) {
303 | // Mark request as failed
304 | monitoring.completeRequest(name, startTime, false, (error as Error).message, requestId);
305 |
306 | const response = createResponse(null, error);
307 | return formatToolResponse(response);
308 | }
309 | });
310 |
311 | // Tool handlers using CodeCompass-main core functionality
312 | async function handleGetRepositoryInfo(args: any) {
313 | const { url, options = {} } = args;
314 |
315 | try {
316 | const info = await githubService.getRepositoryInfo(url);
317 |
318 | const response = {
319 | repository: {
320 | name: info.name,
321 | description: info.description,
322 | owner: info.owner,
323 | language: info.language,
324 | defaultBranch: info.defaultBranch,
325 | createdAt: info.createdAt,
326 | updatedAt: info.updatedAt,
327 | license: info.license,
328 | ...(options.include_stats && {
329 | stats: {
330 | stars: info.stars,
331 | fileCount: info.fileCount,
332 | lineCount: info.lineCount,
333 | }
334 | }),
335 | ...(options.include_languages && {
336 | languages: info.languages
337 | }),
338 | ...(options.include_topics && {
339 | topics: [] // Add topics to GitHubRepoInfo interface if needed
340 | })
341 | }
342 | };
343 |
344 | return formatToolResponse(createResponse(response, null, { tool: 'get_repository_info', url }));
345 | } catch (error) {
346 | return formatToolResponse(createResponse(null, error, { tool: 'get_repository_info', url }));
347 | }
348 | }
349 |
350 | async function handleGetFileTree(args: any) {
351 | const { url, options = {} } = args;
352 |
353 | try {
354 | const tree = await githubService.getFileTree(url);
355 |
356 | const response = {
357 | file_tree: tree,
358 | metadata: {
359 | max_depth: options.max_depth || 10,
360 | include_hidden: options.include_hidden || false,
361 | total_files: tree.length,
362 | filtered_extensions: options.file_extensions || null,
363 | excluded_paths: options.exclude_paths || ['node_modules', 'dist', 'build', '.git']
364 | }
365 | };
366 |
367 | return formatToolResponse(createResponse(response, null, { tool: 'get_file_tree', url }));
368 | } catch (error) {
369 | return formatToolResponse(createResponse(null, error, { tool: 'get_file_tree', url }));
370 | }
371 | }
372 |
373 | // Legacy handler - remove after testing
374 | async function handleFetchRepositoryData(args: any) {
375 | try {
376 | const { url, options = {} } = args;
377 |
378 | // Extract size control options with config defaults
379 | const maxTokens = options.max_response_tokens || config.response.maxTokens;
380 | const maxFileContentLength = options.max_file_content_length || config.response.maxFileContentLength;
381 | const chunkMode = options.chunk_mode || false;
382 | const chunkIndex = options.chunk_index || 0;
383 | const chunkSize = options.chunk_size || 'medium';
384 |
385 | // Use CodeCompass-main's core GitHub service functionality
386 | const repositoryInfo = await githubService.getRepositoryInfo(url);
387 | const analysis = await githubService.analyzeRepository(url);
388 |
389 | const result = {
390 | info: repositoryInfo,
391 | structure: {
392 | fileCount: repositoryInfo.fileCount,
393 | lineCount: repositoryInfo.lineCount,
394 | fileTree: repositoryInfo.fileTree,
395 | keyFiles: repositoryInfo.keyFiles,
396 | },
397 | dependencies: analysis.dependencies,
398 | architecture: analysis.architecture,
399 | };
400 |
401 | const response = createResponse(result);
402 | return formatToolResponse(response, maxTokens, maxFileContentLength, chunkMode, chunkIndex, chunkSize);
403 | } catch (error) {
404 | const response = createResponse(null, error, { tool: 'fetch_repository_data', url: args.url });
405 | return formatToolResponse(response);
406 | }
407 | }
408 |
409 | async function handleSearchRepository(args: any) {
410 | try {
411 | const { url, query, search_type = 'text', options = {} } = args;
412 |
413 | // Get repository content and perform search
414 | const repositoryInfo = await githubService.getRepositoryInfo(url);
415 | const searchResults = await githubService.searchInRepository(url, query, {
416 | type: search_type,
417 | ...options,
418 | });
419 |
420 | const response = createResponse(searchResults);
421 | return formatToolResponse(response);
422 | } catch (error) {
423 | const response = createResponse(null, error, { tool: 'search_repository', url: args.url, query: args.query });
424 | return formatToolResponse(response);
425 | }
426 | }
427 |
428 | async function handleGetFileContent(args: any) {
429 | try {
430 | const { url, file_paths, options = {} } = args;
431 |
432 | // Validate file paths first
433 | const pathValidationErrors: string[] = [];
434 | for (const filePath of file_paths) {
435 | const validation = validateFilePath(filePath);
436 | if (!validation.valid) {
437 | pathValidationErrors.push(`${filePath}: ${validation.error}`);
438 | }
439 | }
440 |
441 | if (pathValidationErrors.length > 0) {
442 | throw new Error(`Invalid file paths detected:\n${pathValidationErrors.join('\n')}`);
443 | }
444 |
445 | // Fetch file contents from GitHub
446 | const fileContents: Array<{ path: string; content: string }> = [];
447 | const fetchErrors: Record<string, string> = {};
448 |
449 | for (const filePath of file_paths) {
450 | try {
451 | const content = await githubService.getFileContent(url, filePath);
452 | fileContents.push({ path: filePath, content });
453 | } catch (error: any) {
454 | fetchErrors[filePath] = error.message;
455 | }
456 | }
457 |
458 | // Process files using batch processing
459 | const batchOptions = {
460 | maxConcurrent: options.max_concurrent || config.limits.maxConcurrentRequests,
461 | continueOnError: options.continue_on_error !== false,
462 | validatePaths: false, // Already validated above
463 | includeMetadata: options.include_metadata !== false,
464 | maxFileSize: options.max_size || config.limits.maxFileSize,
465 | allowedExtensions: options.file_extensions,
466 | excludePatterns: options.exclude_patterns,
467 | };
468 |
469 | const batchResult = await batchProcessFiles(fileContents, batchOptions);
470 |
471 | // Combine results with fetch errors
472 | const results: Record<string, any> = {};
473 |
474 | // Add successful and failed processing results
475 | batchResult.results.forEach(result => {
476 | if (result.success) {
477 | results[result.filePath] = {
478 | content: result.content,
479 | metadata: result.metadata,
480 | size: result.metadata?.size || 0,
481 | truncated: result.metadata?.size ? result.metadata.size > (options.max_size || config.limits.maxFileSize) : false,
482 | };
483 | } else {
484 | results[result.filePath] = {
485 | error: result.error?.message || 'Processing failed',
486 | details: result.error?.details,
487 | };
488 | }
489 | });
490 |
491 | // Add fetch errors
492 | Object.entries(fetchErrors).forEach(([filePath, error]) => {
493 | results[filePath] = {
494 | error: `Failed to fetch: ${error}`,
495 | };
496 | });
497 |
498 | // Add processing statistics
499 | const statistics = getFileStatistics(batchResult.results.filter(r => r.success));
500 |
501 | const response = createResponse({
502 | files: results,
503 | summary: {
504 | ...batchResult.summary,
505 | fetchErrors: Object.keys(fetchErrors).length,
506 | statistics,
507 | },
508 | });
509 |
510 | return formatToolResponse(response);
511 | } catch (error) {
512 | const response = createResponse(null, error, { tool: 'get_file_content', url: args.url });
513 | return formatToolResponse(response);
514 | }
515 | }
516 |
517 | async function handleAnalyzeCodebase(args: any) {
518 | try {
519 | const { url, file_paths, options = {} } = args;
520 |
521 | const analysis = await githubService.analyzeCodeStructure(url, file_paths, options);
522 |
523 | const response = createResponse(analysis);
524 | return formatToolResponse(response);
525 | } catch (error) {
526 | const response = createResponse(null, error);
527 | return formatToolResponse(response);
528 | }
529 | }
530 |
531 | async function handleAnalyzeDependencies(args: any) {
532 | try {
533 | const { url, options = {} } = args;
534 |
535 | const dependencies = await githubService.analyzeDependencies(url);
536 |
537 | const response = createResponse(dependencies);
538 | return formatToolResponse(response);
539 | } catch (error) {
540 | const response = createResponse(null, error);
541 | return formatToolResponse(response);
542 | }
543 | }
544 |
545 | async function handleCalculateMetrics(args: any) {
546 | try {
547 | const { url, options = {} } = args;
548 |
549 | const metrics = await githubService.calculateMetrics(url, options);
550 |
551 | const response = createResponse(metrics);
552 | return formatToolResponse(response);
553 | } catch (error) {
554 | const response = createResponse(null, error);
555 | return formatToolResponse(response);
556 | }
557 | }
558 |
559 | async function handleTransformCode(args: any) {
560 | try {
561 | const { code, transformations, language, target_language, options = {} } = args;
562 |
563 | const result = await refactorService.transformCode(
564 | code,
565 | transformations,
566 | language,
567 | target_language,
568 | options
569 | );
570 |
571 | const response = createResponse(result);
572 | return formatToolResponse(response);
573 | } catch (error) {
574 | const response = createResponse(null, error);
575 | return formatToolResponse(response);
576 | }
577 | }
578 |
579 | async function handleExtractComponents(args: any) {
580 | try {
581 | const { url, extraction_types = ['components', 'functions', 'utilities'], options = {} } = args;
582 |
583 | const components = await refactorService.extractReusableComponents(url, extraction_types);
584 |
585 | const response = createResponse(components);
586 | return formatToolResponse(response);
587 | } catch (error) {
588 | const response = createResponse(null, error);
589 | return formatToolResponse(response);
590 | }
591 | }
592 |
593 | async function handleAdaptCodeStructure(args: any) {
594 | try {
595 | const { url, target_structure, options = {} } = args;
596 |
597 | const result = await refactorService.adaptCodeStructure(url, target_structure, options);
598 |
599 | const response = createResponse(result);
600 | return formatToolResponse(response);
601 | } catch (error) {
602 | const response = createResponse(null, error);
603 | return formatToolResponse(response);
604 | }
605 | }
606 |
607 | async function handleGenerateProjectTemplate(args: any) {
608 | try {
609 | const { url, template_type, options = {} } = args;
610 |
611 | const template = await refactorService.generateBoilerplate(url, template_type, options);
612 |
613 | const response = createResponse(template);
614 | return formatToolResponse(response);
615 | } catch (error) {
616 | const response = createResponse(null, error);
617 | return formatToolResponse(response);
618 | }
619 | }
620 |
621 | async function handleAnalyzeArchitecture(args: any) {
622 | try {
623 | const { url, options = {} } = args;
624 |
625 | const architecture = await githubService.analyzeArchitecturePublic(url, options);
626 |
627 | const response = createResponse(architecture);
628 | return formatToolResponse(response);
629 | } catch (error) {
630 | const response = createResponse(null, error);
631 | return formatToolResponse(response);
632 | }
633 | }
634 |
635 | async function handleCompareImplementations(args: any) {
636 | try {
637 | const { implementations, comparison_criteria, options = {} } = args;
638 |
639 | const comparison = await githubService.compareRepositories(implementations, comparison_criteria, options);
640 |
641 | const response = createResponse(comparison);
642 | return formatToolResponse(response);
643 | } catch (error) {
644 | const response = createResponse(null, error);
645 | return formatToolResponse(response);
646 | }
647 | }
648 |
649 | async function handleValidateCodeQuality(args: any) {
650 | try {
651 | const { url, validation_types, options = {} } = args;
652 |
653 | const validation = await githubService.validateCodeQuality(url, validation_types, options);
654 |
655 | const response = createResponse(validation);
656 | return formatToolResponse(response);
657 | } catch (error) {
658 | const response = createResponse(null, error);
659 | return formatToolResponse(response);
660 | }
661 | }
662 |
663 | async function handleBatchProcess(args: any) {
664 | try {
665 | const { operations, options = {} } = args;
666 |
667 | const results = [];
668 | const startTime = Date.now();
669 |
670 | for (const operation of operations) {
671 | const operationStartTime = Date.now();
672 | try {
673 | // Recursively call the appropriate handler
674 | const result = await server.request(
675 | { method: 'tools/call', params: { name: operation.tool, arguments: operation.params } } as any,
676 | {} as any
677 | );
678 |
679 | results.push({
680 | id: operation.id,
681 | success: true,
682 | data: result,
683 | processingTime: Date.now() - operationStartTime,
684 | });
685 | } catch (error: any) {
686 | results.push({
687 | id: operation.id,
688 | success: false,
689 | error: error.message,
690 | processingTime: Date.now() - operationStartTime,
691 | });
692 | }
693 | }
694 |
695 | const batchResult = {
696 | operations: operations.map((op: any) => ({ type: op.tool, params: op.params, id: op.id })),
697 | results,
698 | totalTime: Date.now() - startTime,
699 | successCount: results.filter(r => r.success).length,
700 | failureCount: results.filter(r => !r.success).length,
701 | };
702 |
703 | const response = createResponse(batchResult);
704 | return formatToolResponse(response);
705 | } catch (error) {
706 | const response = createResponse(null, error);
707 | return formatToolResponse(response);
708 | }
709 | }
710 |
711 | async function handleHealthCheck(args: any) {
712 | try {
713 | const { checks = ['api-limits', 'system-health', 'monitoring'], options = {} } = args;
714 |
715 | // Get comprehensive health status from monitoring system
716 | const monitoringHealth = monitoring.getHealthStatus();
717 | const serverMetrics = monitoring.getMetrics();
718 |
719 | const health: any = {
720 | status: monitoringHealth.status,
721 | timestamp: new Date().toISOString(),
722 | checks: { ...monitoringHealth.checks },
723 | metrics: options.include_metrics ? {
724 | uptime: serverMetrics.uptime,
725 | memory: serverMetrics.memory,
726 | version: '1.0.0',
727 | requestCount: serverMetrics.requestCount,
728 | errorCount: serverMetrics.errorCount,
729 | averageResponseTime: serverMetrics.responseTime.average,
730 | toolUsage: serverMetrics.toolUsage,
731 | } : undefined,
732 | };
733 |
734 | // Add additional checks based on requested types
735 | for (const check of checks) {
736 | switch (check) {
737 | case 'api-limits':
738 | try {
739 | health.checks[check] = await githubService.checkApiLimits();
740 | } catch (error: any) {
741 | health.checks[check] = { status: 'error', error: error.message };
742 | }
743 | break;
744 | case 'monitoring':
745 | health.checks[check] = {
746 | status: 'healthy',
747 | totalRequests: serverMetrics.requestCount,
748 | errorRate: serverMetrics.requestCount > 0 ? Math.round((serverMetrics.errorCount / serverMetrics.requestCount) * 100) : 0,
749 | uptime: serverMetrics.uptime,
750 | memoryUsage: Math.round((serverMetrics.memory.heapUsed / serverMetrics.memory.heapTotal) * 100),
751 | };
752 | break;
753 | case 'dependencies':
754 | health.checks[check] = { status: 'healthy' };
755 | break;
756 | case 'configuration':
757 | health.checks[check] = {
758 | status: 'healthy',
759 | hasGitHubToken: !!config.github.token,
760 | hasOpenRouterKey: !!config.openrouter.apiKey,
761 | logLevel: config.logging.level,
762 | maxResponseTokens: config.response.maxTokens,
763 | };
764 | break;
765 | }
766 | }
767 |
768 | // Add performance insights if requested
769 | if (options.include_insights) {
770 | const insights = monitoring.getPerformanceInsights();
771 | health.insights = insights;
772 | }
773 |
774 | // Add recent logs if requested
775 | if (options.include_logs) {
776 | const logBuffer = log.getLogBuffer();
777 | health.recentLogs = logBuffer.slice(-10);
778 | }
779 |
780 | const response = createResponse(health);
781 | return formatToolResponse(response);
782 | } catch (error) {
783 | const response = createResponse(null, error);
784 | return formatToolResponse(response);
785 | }
786 | }
787 |
788 | // AI-Enhanced Tool Handlers
789 | async function handleReviewCode(args: any) {
790 | try {
791 | const { url, file_paths, review_focus = ['security', 'performance', 'maintainability'], options = {} } = args;
792 |
793 | // Get repository info and code content
794 | const repoInfo = await githubService.getRepositoryInfo(url);
795 | let filesToReview: Record<string, string> = {};
796 |
797 | if (file_paths && file_paths.length > 0) {
798 | // Get specific files
799 | for (const filePath of file_paths) {
800 | try {
801 | const content = await githubService.getFileContent(url, filePath);
802 | filesToReview[filePath] = content;
803 | } catch (error) {
804 | // Skip files that can't be fetched
805 | }
806 | }
807 | } else {
808 | // Use key files from repository
809 | filesToReview = repoInfo.keyFiles;
810 | }
811 |
812 | if (Object.keys(filesToReview).length === 0) {
813 | throw new Error('No files found to review');
814 | }
815 |
816 | // Prepare code for AI review
817 | const codeContext = Object.entries(filesToReview)
818 | .map(([path, content]) => `--- ${path} ---\n${content}`)
819 | .join('\n\n');
820 |
821 | const focusAreas = review_focus.join(', ');
822 |
823 | // Generate AI review with specified model
824 | const aiReviewResult = await openaiService.generateCodeReview(
825 | codeContext,
826 | repoInfo.language || 'javascript',
827 | review_focus,
828 | options.ai_model
829 | );
830 |
831 | const result = {
832 | repository: {
833 | name: repoInfo.name,
834 | description: repoInfo.description,
835 | language: repoInfo.language,
836 | owner: repoInfo.owner,
837 | },
838 | review: {
839 | files_reviewed: Object.keys(filesToReview),
840 | focus_areas: review_focus,
841 | ai_model_used: aiReviewResult.modelUsed,
842 | ai_model_requested: options.ai_model || 'auto',
843 | analysis: aiReviewResult.content,
844 | severity_threshold: options.severity_threshold || 'medium',
845 | timestamp: new Date().toISOString(),
846 | model_warning: aiReviewResult.warning,
847 | },
848 | recommendations: {
849 | priority_fixes: [],
850 | suggestions: [],
851 | best_practices: [],
852 | },
853 | };
854 |
855 | const response = createResponse(result);
856 | return formatToolResponse(response);
857 | } catch (error) {
858 | const response = createResponse(null, error);
859 | return formatToolResponse(response);
860 | }
861 | }
862 |
863 | async function handleExplainCode(args: any) {
864 | try {
865 | const { url, file_paths, explanation_type = 'overview', options = {} } = args;
866 |
867 | // Get repository info and code content
868 | const repoInfo = await githubService.getRepositoryInfo(url);
869 | let filesToExplain: Record<string, string> = {};
870 |
871 | if (file_paths && file_paths.length > 0) {
872 | // Get specific files
873 | for (const filePath of file_paths) {
874 | try {
875 | const content = await githubService.getFileContent(url, filePath);
876 | filesToExplain[filePath] = content;
877 | } catch (error) {
878 | // Skip files that can't be fetched
879 | }
880 | }
881 | } else {
882 | // Use key files from repository
883 | filesToExplain = repoInfo.keyFiles;
884 | }
885 |
886 | if (Object.keys(filesToExplain).length === 0) {
887 | throw new Error('No files found to explain');
888 | }
889 |
890 | // Generate AI explanation based on type
891 | let aiExplanation: string;
892 | let aiExplanationResult: { content: string; modelUsed: string; warning?: string };
893 |
894 | switch (explanation_type) {
895 | case 'architecture':
896 | aiExplanation = await openaiService.explainArchitecture(url, repoInfo);
897 | // For architecture, create a mock result for consistency
898 | aiExplanationResult = {
899 | content: aiExplanation,
900 | modelUsed: options.ai_model || 'anthropic/claude-3.5-sonnet',
901 | warning: undefined
902 | };
903 | break;
904 | case 'overview':
905 | case 'detailed':
906 | case 'tutorial':
907 | case 'integration':
908 | default:
909 | // Create a prompt for the specific explanation type
910 | const codeContext = Object.entries(filesToExplain)
911 | .map(([path, content]) => `--- ${path} ---\n${content}`)
912 | .join('\n\n');
913 |
914 | const prompt = `Please provide a ${explanation_type} explanation of this ${repoInfo.language || 'code'} repository:
915 |
916 | Repository: ${repoInfo.name}
917 | Description: ${repoInfo.description || 'No description'}
918 | Language: ${repoInfo.language || 'Multiple'}
919 |
920 | Code:
921 | ${codeContext}
922 |
923 | Please focus on:
924 | ${options.focus_on_patterns ? '- Design patterns and architecture' : ''}
925 | ${options.include_examples ? '- Code examples and usage' : ''}
926 | ${options.include_diagrams ? '- Visual diagrams where helpful' : ''}
927 |
928 | Target audience: ${options.target_audience || 'intermediate'}`;
929 |
930 | aiExplanationResult = await openaiService.chatWithRepository(url, prompt, undefined, options.ai_model);
931 | aiExplanation = aiExplanationResult.content;
932 | break;
933 | }
934 |
935 | const result = {
936 | repository: {
937 | name: repoInfo.name,
938 | description: repoInfo.description,
939 | language: repoInfo.language,
940 | owner: repoInfo.owner,
941 | },
942 | explanation: {
943 | type: explanation_type,
944 | files_analyzed: Object.keys(filesToExplain),
945 | ai_model_used: aiExplanationResult.modelUsed,
946 | ai_model_requested: options.ai_model || 'auto',
947 | target_audience: options.target_audience || 'intermediate',
948 | content: aiExplanation,
949 | timestamp: new Date().toISOString(),
950 | model_warning: aiExplanationResult.warning,
951 | },
952 | metadata: {
953 | file_count: Object.keys(filesToExplain).length,
954 | total_lines: Object.values(filesToExplain).reduce((sum, content) => sum + content.split('\n').length, 0),
955 | },
956 | };
957 |
958 | const response = createResponse(result);
959 | return formatToolResponse(response);
960 | } catch (error) {
961 | const response = createResponse(null, error);
962 | return formatToolResponse(response);
963 | }
964 | }
965 |
966 | async function handleSuggestImprovements(args: any) {
967 | try {
968 | const { url, file_paths, refactoring_goals = ['modernize', 'maintainability'], target_framework, options = {} } = args;
969 |
970 | // Get repository info and code content
971 | const repoInfo = await githubService.getRepositoryInfo(url);
972 | let filesToRefactor: Record<string, string> = {};
973 |
974 | if (file_paths && file_paths.length > 0) {
975 | // Get specific files
976 | for (const filePath of file_paths) {
977 | try {
978 | const content = await githubService.getFileContent(url, filePath);
979 | filesToRefactor[filePath] = content;
980 | } catch (error) {
981 | // Skip files that can't be fetched
982 | }
983 | }
984 | } else {
985 | // Use key files from repository
986 | filesToRefactor = repoInfo.keyFiles;
987 | }
988 |
989 | if (Object.keys(filesToRefactor).length === 0) {
990 | throw new Error('No files found to analyze for refactoring');
991 | }
992 |
993 | // Generate AI refactoring suggestions
994 | const targetProject = {
995 | framework: target_framework || 'Not specified',
996 | language: repoInfo.language || 'javascript',
997 | constraints: [],
998 | timeline: 'Not specified',
999 | };
1000 |
1001 | const aiSuggestionsResult = await openaiService.suggestRefactoringPlan(url, targetProject, refactoring_goals, options.ai_model);
1002 |
1003 | const result = {
1004 | repository: {
1005 | name: repoInfo.name,
1006 | description: repoInfo.description,
1007 | language: repoInfo.language,
1008 | owner: repoInfo.owner,
1009 | },
1010 | refactoring: {
1011 | goals: refactoring_goals,
1012 | target_framework: target_framework,
1013 | files_analyzed: Object.keys(filesToRefactor),
1014 | ai_model_used: aiSuggestionsResult.modelUsed,
1015 | ai_model_requested: options.ai_model || 'auto',
1016 | suggestions: aiSuggestionsResult.content,
1017 | priority_level: options.priority_level || 'medium',
1018 | timestamp: new Date().toISOString(),
1019 | model_warning: aiSuggestionsResult.warning,
1020 | },
1021 | metadata: {
1022 | file_count: Object.keys(filesToRefactor).length,
1023 | total_lines: Object.values(filesToRefactor).reduce((sum, content) => sum + content.split('\n').length, 0),
1024 | estimated_effort: options.estimate_effort ? 'Will be provided by AI' : null,
1025 | },
1026 | };
1027 |
1028 | const response = createResponse(result);
1029 | return formatToolResponse(response);
1030 | } catch (error) {
1031 | const response = createResponse(null, error);
1032 | return formatToolResponse(response);
1033 | }
1034 | }
1035 |
1036 | // Start the server
1037 | async function main() {
1038 | const transport = new StdioServerTransport();
1039 | await server.connect(transport);
1040 | }
1041 |
1042 | main().catch(console.error);
```
--------------------------------------------------------------------------------
/src/services/github.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { Octokit } from '@octokit/rest';
2 | import { GitHubRepoInfo, FileNode, DependencyInfo, RepositoryAnalysis } from '../types/index.js';
3 |
4 | export class GitHubService {
5 | private octokit: Octokit;
6 | private isAuthenticated: boolean;
7 | private cache: Map<string, any> = new Map();
8 | private cacheTimeout: number = 5 * 60 * 1000; // 5 minutes
9 |
10 | constructor() {
11 | const token = process.env.GITHUB_TOKEN || process.env.GITHUB_API_KEY;
12 |
13 | // Initialize with or without authentication
14 | this.octokit = new Octokit({
15 | auth: token,
16 | });
17 |
18 | this.isAuthenticated = !!token;
19 |
20 | if (!this.isAuthenticated) {
21 | console.warn('GitHub token not provided. Using public API with rate limits.');
22 | }
23 | }
24 |
25 | private getCacheKey(method: string, params: any): string {
26 | return `${method}:${JSON.stringify(params)}`;
27 | }
28 |
29 | private getCachedResult<T>(key: string): T | null {
30 | const cached = this.cache.get(key);
31 | if (cached && Date.now() - cached.timestamp < this.cacheTimeout) {
32 | return cached.data;
33 | }
34 | return null;
35 | }
36 |
37 | private setCachedResult<T>(key: string, data: T): void {
38 | this.cache.set(key, {
39 | data,
40 | timestamp: Date.now(),
41 | });
42 | }
43 |
44 | private async withRetry<T>(
45 | operation: () => Promise<T>,
46 | maxRetries: number = 3,
47 | delay: number = 1000
48 | ): Promise<T> {
49 | for (let i = 0; i < maxRetries; i++) {
50 | try {
51 | return await operation();
52 | } catch (error: any) {
53 | if (error.status === 403 && error.response?.headers?.['x-ratelimit-remaining'] === '0') {
54 | const resetTime = parseInt(error.response.headers['x-ratelimit-reset']) * 1000;
55 | const waitTime = resetTime - Date.now();
56 |
57 | if (waitTime > 0 && waitTime < 5 * 60 * 1000) { // Only wait if less than 5 minutes
58 | console.log(`Rate limit exceeded. Waiting ${Math.ceil(waitTime / 1000)} seconds...`);
59 | await new Promise(resolve => setTimeout(resolve, waitTime));
60 | continue;
61 | }
62 | }
63 |
64 | if (i === maxRetries - 1) {
65 | throw error;
66 | }
67 |
68 | // Exponential backoff
69 | await new Promise(resolve => setTimeout(resolve, delay * Math.pow(2, i)));
70 | }
71 | }
72 |
73 | throw new Error('Max retries exceeded');
74 | }
75 |
76 | async analyzeRepository(url: string): Promise<RepositoryAnalysis> {
77 | const info = await this.getRepositoryInfo(url);
78 | const dependencies = await this.analyzeDependencies(url);
79 |
80 | return {
81 | info,
82 | dependencies,
83 | architecture: await this.analyzeArchitecture(url, info),
84 | codeQuality: await this.analyzeCodeQuality(url, info),
85 | refactoringPotential: await this.analyzeRefactoringPotential(url, info),
86 | };
87 | }
88 |
89 | async getRepositoryInfo(url: string): Promise<GitHubRepoInfo> {
90 | const { owner, repo } = this.parseGitHubUrl(url);
91 | const cacheKey = this.getCacheKey('getRepositoryInfo', { owner, repo });
92 |
93 | // Check cache first
94 | const cached = this.getCachedResult<GitHubRepoInfo>(cacheKey);
95 | if (cached) {
96 | return cached;
97 | }
98 |
99 | try {
100 | // Get repository info with retry logic
101 | const { data: repoData } = await this.withRetry(() =>
102 | this.octokit.rest.repos.get({ owner, repo })
103 | );
104 |
105 | // Get languages with retry logic
106 | const { data: languages } = await this.withRetry(() =>
107 | this.octokit.rest.repos.listLanguages({ owner, repo })
108 | );
109 |
110 | // Get file tree with retry logic and fallback for rate limits
111 | let fileTree: FileNode[] = [];
112 | let fileCount = 0;
113 | let keyFiles: Record<string, string> = {};
114 |
115 | try {
116 | const { data: treeData } = await this.withRetry(() =>
117 | this.octokit.rest.git.getTree({
118 | owner,
119 | repo,
120 | tree_sha: repoData.default_branch,
121 | recursive: 'true',
122 | })
123 | );
124 |
125 | fileTree = this.buildFileTree(treeData.tree);
126 | fileCount = treeData.tree.filter(item => item.type === 'blob').length;
127 |
128 | // Fetch key files for comprehensive analysis
129 | console.log(`Fetching key files for ${repoData.name}...`);
130 | keyFiles = await this.getKeyRepositoryFiles(url, fileTree);
131 | console.log(`Fetched ${Object.keys(keyFiles).length} key files`);
132 | } catch (treeError: any) {
133 | // If we hit rate limits on tree API, try to get basic structure
134 | console.warn('Failed to fetch full file tree, falling back to basic analysis');
135 |
136 | // Try to get at least README and package.json
137 | try {
138 | const basicFiles = ['README.md', 'README.txt', 'README', 'package.json'];
139 | for (const fileName of basicFiles) {
140 | try {
141 | const content = await this.getFileContent(url, fileName);
142 | keyFiles[fileName] = content;
143 | } catch (fileError) {
144 | // Skip files that don't exist
145 | }
146 | }
147 | } catch (fallbackError) {
148 | console.warn('Failed to fetch basic files, continuing with minimal info');
149 | }
150 | }
151 |
152 | // Calculate actual line count from fetched files
153 | const actualLineCount = Object.values(keyFiles).reduce((total, content) => {
154 | return total + content.split('\n').length;
155 | }, 0);
156 |
157 | // Estimate total line count based on fetched files ratio
158 | const estimatedLineCount = actualLineCount > 0
159 | ? Math.floor((actualLineCount / Math.max(1, Object.keys(keyFiles).length)) * Math.max(fileCount, 10))
160 | : Math.floor(Math.max(fileCount, 10) * 50);
161 |
162 | const result: GitHubRepoInfo = {
163 | name: repoData.name,
164 | description: repoData.description,
165 | owner: repoData.owner.login,
166 | stars: repoData.stargazers_count,
167 | language: repoData.language,
168 | languages,
169 | fileCount,
170 | lineCount: estimatedLineCount,
171 | fileTree,
172 | keyFiles,
173 | license: repoData.license?.name,
174 | defaultBranch: repoData.default_branch,
175 | createdAt: repoData.created_at,
176 | updatedAt: repoData.updated_at,
177 | };
178 |
179 | // Cache the result
180 | this.setCachedResult(cacheKey, result);
181 | return result;
182 | } catch (error: any) {
183 | if (error.status === 404) {
184 | throw new Error('Repository not found or not accessible');
185 | }
186 | if (error.status === 403 && error.message.includes('rate limit')) {
187 | throw new Error(`GitHub API rate limit exceeded. Please provide a GitHub token for higher limits. Error: ${error.message}`);
188 | }
189 | throw new Error(`Failed to fetch repository: ${error.message}`);
190 | }
191 | }
192 |
193 | async getFileTree(url: string, path?: string): Promise<FileNode[]> {
194 | const { owner, repo } = this.parseGitHubUrl(url);
195 |
196 | try {
197 | const { data: repoData } = await this.octokit.rest.repos.get({
198 | owner,
199 | repo,
200 | });
201 |
202 | const { data: treeData } = await this.octokit.rest.git.getTree({
203 | owner,
204 | repo,
205 | tree_sha: repoData.default_branch,
206 | recursive: 'true',
207 | });
208 |
209 | const fileTree = this.buildFileTree(treeData.tree);
210 |
211 | if (path) {
212 | return this.filterTreeByPath(fileTree, path);
213 | }
214 |
215 | return fileTree;
216 | } catch (error: any) {
217 | throw new Error(`Failed to fetch file tree: ${error.message}`);
218 | }
219 | }
220 |
221 | async getFileContent(url: string, filePath: string): Promise<string> {
222 | const { owner, repo } = this.parseGitHubUrl(url);
223 | const cacheKey = this.getCacheKey('getFileContent', { owner, repo, filePath });
224 |
225 | // Check cache first
226 | const cached = this.getCachedResult<string>(cacheKey);
227 | if (cached) {
228 | return cached;
229 | }
230 |
231 | try {
232 | const { data } = await this.withRetry(() =>
233 | this.octokit.rest.repos.getContent({
234 | owner,
235 | repo,
236 | path: filePath,
237 | })
238 | );
239 |
240 | if ('content' in data) {
241 | const content = Buffer.from(data.content, 'base64').toString('utf-8');
242 | this.setCachedResult(cacheKey, content);
243 | return content;
244 | }
245 |
246 | throw new Error('File content not available');
247 | } catch (error: any) {
248 | if (error.status === 404) {
249 | throw new Error(`File not found: ${filePath}`);
250 | }
251 | if (error.status === 403 && error.message.includes('rate limit')) {
252 | throw new Error(`GitHub API rate limit exceeded. Please provide a GitHub token for higher limits. Error: ${error.message}`);
253 | }
254 | throw new Error(`Failed to fetch file content: ${error.message}`);
255 | }
256 | }
257 |
258 | async getKeyFiles(url: string): Promise<Record<string, string>> {
259 | const fileTree = await this.getFileTree(url);
260 | return await this.getKeyRepositoryFiles(url, fileTree);
261 | }
262 |
263 | async analyzeDependencies(url: string): Promise<DependencyInfo[]> {
264 | const dependencies: DependencyInfo[] = [];
265 |
266 | try {
267 | // Check for package.json
268 | try {
269 | const packageJson = await this.getFileContent(url, 'package.json');
270 | const pkg = JSON.parse(packageJson);
271 |
272 | // Add regular dependencies
273 | if (pkg.dependencies) {
274 | for (const [name, version] of Object.entries(pkg.dependencies)) {
275 | dependencies.push({
276 | name,
277 | version: version as string,
278 | type: 'dependency',
279 | source: 'package.json',
280 | });
281 | }
282 | }
283 |
284 | // Add dev dependencies
285 | if (pkg.devDependencies) {
286 | for (const [name, version] of Object.entries(pkg.devDependencies)) {
287 | dependencies.push({
288 | name,
289 | version: version as string,
290 | type: 'devDependency',
291 | source: 'package.json',
292 | });
293 | }
294 | }
295 |
296 | // Add peer dependencies
297 | if (pkg.peerDependencies) {
298 | for (const [name, version] of Object.entries(pkg.peerDependencies)) {
299 | dependencies.push({
300 | name,
301 | version: version as string,
302 | type: 'peerDependency',
303 | source: 'package.json',
304 | });
305 | }
306 | }
307 | } catch (error) {
308 | // package.json not found, continue with other dependency files
309 | }
310 |
311 | // Check for requirements.txt
312 | try {
313 | const requirementsTxt = await this.getFileContent(url, 'requirements.txt');
314 | const lines = requirementsTxt.split('\n').filter(line => line.trim() && !line.startsWith('#'));
315 |
316 | for (const line of lines) {
317 | const match = line.match(/^([^=><]+)([=><]=?.*)?$/);
318 | if (match) {
319 | dependencies.push({
320 | name: match[1].trim(),
321 | version: match[2] || '*',
322 | type: 'dependency',
323 | source: 'requirements.txt',
324 | });
325 | }
326 | }
327 | } catch (error) {
328 | // requirements.txt not found
329 | }
330 |
331 | // Add more dependency file parsers as needed (Gemfile, Cargo.toml, etc.)
332 |
333 | } catch (error: any) {
334 | console.error('Error analyzing dependencies:', error.message);
335 | }
336 |
337 | return dependencies;
338 | }
339 |
340 | private async analyzeArchitecture(url: string, info: GitHubRepoInfo): Promise<any> {
341 | // Analyze architecture patterns based on file structure and content
342 | const patterns: string[] = [];
343 | const frameworks: string[] = [];
344 |
345 | // Detect frameworks based on dependencies and file patterns
346 | const keyFiles = info.keyFiles;
347 |
348 | // Check for React
349 | if (keyFiles['package.json']?.includes('react')) {
350 | frameworks.push('React');
351 | }
352 |
353 | // Check for Vue
354 | if (keyFiles['package.json']?.includes('vue')) {
355 | frameworks.push('Vue');
356 | }
357 |
358 | // Check for Angular
359 | if (keyFiles['package.json']?.includes('@angular')) {
360 | frameworks.push('Angular');
361 | }
362 |
363 | // Check for Express
364 | if (keyFiles['package.json']?.includes('express')) {
365 | frameworks.push('Express');
366 | }
367 |
368 | // Detect patterns based on file structure
369 | const fileTree = info.fileTree;
370 | const folders = this.extractFolders(fileTree);
371 |
372 | // Check for MVC pattern
373 | if (folders.includes('models') && folders.includes('views') && folders.includes('controllers')) {
374 | patterns.push('MVC');
375 | }
376 |
377 | // Check for component-based architecture
378 | if (folders.includes('components')) {
379 | patterns.push('Component-based');
380 | }
381 |
382 | // Check for layered architecture
383 | if (folders.includes('services') && folders.includes('models')) {
384 | patterns.push('Layered');
385 | }
386 |
387 | return {
388 | patterns,
389 | frameworks,
390 | structure: this.analyzeProjectStructure(fileTree),
391 | entryPoints: this.findEntryPoints(keyFiles),
392 | configFiles: this.findConfigFiles(keyFiles),
393 | testFiles: this.findTestFiles(fileTree),
394 | documentationFiles: this.findDocumentationFiles(keyFiles),
395 | };
396 | }
397 |
398 | private async analyzeCodeQuality(url: string, info: GitHubRepoInfo): Promise<any> {
399 | // Basic code quality analysis
400 | const keyFiles = info.keyFiles;
401 | const codeSmells: string[] = [];
402 |
403 | // Check for common code smells
404 | for (const [filePath, content] of Object.entries(keyFiles)) {
405 | if (content.length > 10000) {
406 | codeSmells.push(`Large file: ${filePath}`);
407 | }
408 |
409 | if (content.includes('TODO') || content.includes('FIXME')) {
410 | codeSmells.push(`TODO/FIXME found in: ${filePath}`);
411 | }
412 |
413 | // Check for long lines
414 | const longLines = content.split('\n').filter(line => line.length > 120);
415 | if (longLines.length > 5) {
416 | codeSmells.push(`Long lines in: ${filePath}`);
417 | }
418 | }
419 |
420 | return {
421 | complexity: this.calculateComplexity(keyFiles),
422 | maintainability: this.calculateMaintainability(keyFiles),
423 | duplicateCode: this.detectDuplicateCode(keyFiles),
424 | codeSmells,
425 | };
426 | }
427 |
428 | private async analyzeRefactoringPotential(url: string, info: GitHubRepoInfo): Promise<any> {
429 | const keyFiles = info.keyFiles;
430 | const extractableComponents: any[] = [];
431 | const reusableUtilities: any[] = [];
432 | const modernizationOpportunities: any[] = [];
433 |
434 | // Analyze files for refactoring potential
435 | for (const [filePath, content] of Object.entries(keyFiles)) {
436 | // Look for extractable components
437 | if (filePath.includes('component') || filePath.includes('Component')) {
438 | extractableComponents.push({
439 | name: this.extractComponentName(filePath),
440 | path: filePath,
441 | type: 'component',
442 | dependencies: this.extractDependencies(content),
443 | complexity: this.calculateFileComplexity(content),
444 | reusabilityScore: this.calculateReusabilityScore(content),
445 | description: this.extractDescription(content),
446 | });
447 | }
448 |
449 | // Look for utility functions
450 | if (filePath.includes('util') || filePath.includes('helper')) {
451 | reusableUtilities.push({
452 | name: this.extractUtilityName(filePath),
453 | path: filePath,
454 | functions: this.extractFunctions(content),
455 | description: this.extractDescription(content),
456 | dependencies: this.extractDependencies(content),
457 | });
458 | }
459 |
460 | // Look for modernization opportunities
461 | if (content.includes('var ') && !content.includes('const ') && !content.includes('let ')) {
462 | modernizationOpportunities.push({
463 | type: 'syntax',
464 | description: 'Use const/let instead of var',
465 | files: [filePath],
466 | suggestion: 'Replace var declarations with const/let',
467 | impact: 'low',
468 | });
469 | }
470 | }
471 |
472 | return {
473 | extractableComponents,
474 | reusableUtilities,
475 | configurationFiles: this.findConfigFiles(keyFiles),
476 | boilerplateCode: this.findBoilerplateCode(keyFiles),
477 | modernizationOpportunities,
478 | };
479 | }
480 |
481 | private parseGitHubUrl(url: string): { owner: string; repo: string } {
482 | const match = url.match(/github\.com\/([^\/]+)\/([^\/]+)/);
483 | if (!match) {
484 | throw new Error('Invalid GitHub URL format');
485 | }
486 | return { owner: match[1], repo: match[2] };
487 | }
488 |
489 | private buildFileTree(gitTree: any[]): FileNode[] {
490 | const tree: FileNode[] = [];
491 | const pathMap = new Map();
492 |
493 | // Sort by path to ensure proper ordering
494 | const sortedTree = gitTree.sort((a, b) => a.path.localeCompare(b.path));
495 |
496 | for (const item of sortedTree) {
497 | const pathParts = item.path.split('/');
498 | let currentLevel = tree;
499 | let currentPath = '';
500 |
501 | for (let i = 0; i < pathParts.length; i++) {
502 | const part = pathParts[i];
503 | currentPath = currentPath ? `${currentPath}/${part}` : part;
504 |
505 | let existingItem = currentLevel.find(node => node.name === part);
506 |
507 | if (!existingItem) {
508 | const isFile = i === pathParts.length - 1 && item.type === 'blob';
509 | existingItem = {
510 | name: part,
511 | path: currentPath,
512 | type: isFile ? 'file' : 'directory',
513 | children: isFile ? undefined : [],
514 | size: isFile ? item.size : undefined,
515 | sha: item.sha,
516 | };
517 | currentLevel.push(existingItem);
518 | }
519 |
520 | if (existingItem.children) {
521 | currentLevel = existingItem.children;
522 | }
523 | }
524 | }
525 |
526 | return tree;
527 | }
528 |
529 | private filterTreeByPath(tree: FileNode[], path: string): FileNode[] {
530 | const pathParts = path.split('/');
531 | let currentLevel = tree;
532 |
533 | for (const part of pathParts) {
534 | const found = currentLevel.find(node => node.name === part);
535 | if (!found || !found.children) {
536 | return [];
537 | }
538 | currentLevel = found.children;
539 | }
540 |
541 | return currentLevel;
542 | }
543 |
544 | private async getKeyRepositoryFiles(url: string, fileTree: FileNode[]): Promise<Record<string, string>> {
545 | const keyFiles: Record<string, string> = {};
546 |
547 | // Priority files to include for comprehensive analysis
548 | const priorityPatterns = [
549 | /^README\.md$/i,
550 | /^README\.txt$/i,
551 | /^CONTRIBUTING\.md$/i,
552 | /^LICENSE$/i,
553 | /^package\.json$/i,
554 | /^pyproject\.toml$/i,
555 | /^requirements\.txt$/i,
556 | /^Cargo\.toml$/i,
557 | /^go\.mod$/i,
558 | /^pom\.xml$/i,
559 | /^build\.gradle$/i,
560 | /^Dockerfile$/i,
561 | /^docker-compose\.yml$/i,
562 | /^\.gitignore$/i,
563 | /^tsconfig\.json$/i,
564 | /^webpack\.config\./i,
565 | /^vite\.config\./i,
566 | /^next\.config\./i,
567 | /^tailwind\.config\./i,
568 | ];
569 |
570 | // Get all files, prioritizing smaller ones and key configuration files
571 | const allFiles: Array<{
572 | path: string;
573 | size: number;
574 | priority: number;
575 | isSmall: boolean;
576 | }> = [];
577 |
578 | const collectFiles = (nodes: FileNode[], currentPath = '') => {
579 | for (const node of nodes) {
580 | if (node.type === 'file') {
581 | const filePath = currentPath ? `${currentPath}/${node.name}` : node.name;
582 | const isHighPriority = priorityPatterns.some(pattern => pattern.test(node.name));
583 | const isSmallFile = (node.size || 0) < 10000; // Files under 10KB
584 | const isCodeFile = /\.(js|ts|jsx|tsx|py|java|cpp|c|h|go|rs|php|rb|swift|kt|dart)$/i.test(node.name);
585 |
586 | allFiles.push({
587 | path: filePath,
588 | size: node.size || 0,
589 | priority: isHighPriority ? 3 : (isCodeFile ? 2 : 1),
590 | isSmall: isSmallFile
591 | });
592 | } else if (node.children) {
593 | const newPath = currentPath ? `${currentPath}/${node.name}` : node.name;
594 | collectFiles(node.children, newPath);
595 | }
596 | }
597 | };
598 |
599 | collectFiles(fileTree);
600 |
601 | // Sort by priority, then by size (smaller first)
602 | allFiles.sort((a, b) => {
603 | if (a.priority !== b.priority) return b.priority - a.priority;
604 | return a.size - b.size;
605 | });
606 |
607 | // Fetch files up to ~500KB total content to stay within reasonable limits
608 | let totalSize = 0;
609 | const maxTotalSize = 500000; // 500KB
610 |
611 | for (const file of allFiles) {
612 | if (totalSize + file.size > maxTotalSize && Object.keys(keyFiles).length > 10) {
613 | break; // Stop if we've reached size limit and have enough files
614 | }
615 |
616 | try {
617 | const content = await this.getFileContent(url, file.path);
618 | keyFiles[file.path] = content;
619 | totalSize += content.length;
620 |
621 | // Always include high priority files regardless of size constraints
622 | if (file.priority < 3 && Object.keys(keyFiles).length > 20) {
623 | break; // Limit to ~20 files for non-priority files
624 | }
625 | } catch (error) {
626 | // Skip files that can't be fetched (binary, too large, etc.)
627 | continue;
628 | }
629 | }
630 |
631 | return keyFiles;
632 | }
633 |
634 | // Helper methods for analysis
635 | private extractFolders(tree: FileNode[]): string[] {
636 | const folders: string[] = [];
637 | const traverse = (nodes: FileNode[]) => {
638 | for (const node of nodes) {
639 | if (node.type === 'directory') {
640 | folders.push(node.name);
641 | if (node.children) {
642 | traverse(node.children);
643 | }
644 | }
645 | }
646 | };
647 | traverse(tree);
648 | return folders;
649 | }
650 |
651 | private analyzeProjectStructure(fileTree: FileNode[]): any {
652 | const folders = this.extractFolders(fileTree);
653 |
654 | // Determine project type
655 | let type = 'single-package';
656 | if (folders.includes('packages') || folders.includes('apps')) {
657 | type = 'monorepo';
658 | } else if (folders.includes('lib') && folders.includes('dist')) {
659 | type = 'multi-package';
660 | }
661 |
662 | // Identify common folder purposes
663 | const folderMapping: Record<string, string> = {};
664 | if (folders.includes('src')) folderMapping.src = 'src';
665 | if (folders.includes('lib')) folderMapping.src = 'lib';
666 | if (folders.includes('test') || folders.includes('tests')) folderMapping.tests = folders.includes('test') ? 'test' : 'tests';
667 | if (folders.includes('docs') || folders.includes('documentation')) folderMapping.docs = folders.includes('docs') ? 'docs' : 'documentation';
668 | if (folders.includes('config') || folders.includes('configs')) folderMapping.config = folders.includes('config') ? 'config' : 'configs';
669 | if (folders.includes('build') || folders.includes('dist')) folderMapping.build = folders.includes('build') ? 'build' : 'dist';
670 | if (folders.includes('public') || folders.includes('static')) folderMapping.public = folders.includes('public') ? 'public' : 'static';
671 |
672 | return {
673 | type,
674 | folders: folderMapping,
675 | };
676 | }
677 |
678 | private findEntryPoints(keyFiles: Record<string, string>): string[] {
679 | const entryPoints: string[] = [];
680 |
681 | for (const filePath of Object.keys(keyFiles)) {
682 | if (filePath.includes('index.') || filePath.includes('main.') || filePath.includes('app.')) {
683 | entryPoints.push(filePath);
684 | }
685 | }
686 |
687 | return entryPoints;
688 | }
689 |
690 | private findConfigFiles(keyFiles: Record<string, string>): string[] {
691 | const configFiles: string[] = [];
692 |
693 | for (const filePath of Object.keys(keyFiles)) {
694 | if (filePath.includes('config') || filePath.includes('.config.') ||
695 | filePath.includes('webpack') || filePath.includes('vite') ||
696 | filePath.includes('tsconfig') || filePath.includes('babel') ||
697 | filePath.includes('eslint') || filePath.includes('prettier')) {
698 | configFiles.push(filePath);
699 | }
700 | }
701 |
702 | return configFiles;
703 | }
704 |
705 | private findTestFiles(fileTree: FileNode[]): string[] {
706 | const testFiles: string[] = [];
707 |
708 | const traverse = (nodes: FileNode[]) => {
709 | for (const node of nodes) {
710 | if (node.type === 'file' && (
711 | node.name.includes('.test.') ||
712 | node.name.includes('.spec.') ||
713 | node.path.includes('test') ||
714 | node.path.includes('spec')
715 | )) {
716 | testFiles.push(node.path);
717 | } else if (node.children) {
718 | traverse(node.children);
719 | }
720 | }
721 | };
722 |
723 | traverse(fileTree);
724 | return testFiles;
725 | }
726 |
727 | private findDocumentationFiles(keyFiles: Record<string, string>): string[] {
728 | const docFiles: string[] = [];
729 |
730 | for (const filePath of Object.keys(keyFiles)) {
731 | if (filePath.includes('README') || filePath.includes('CHANGELOG') ||
732 | filePath.includes('CONTRIBUTING') || filePath.includes('.md') ||
733 | filePath.includes('docs/') || filePath.includes('documentation/')) {
734 | docFiles.push(filePath);
735 | }
736 | }
737 |
738 | return docFiles;
739 | }
740 |
741 | private calculateComplexity(keyFiles: Record<string, string>): number {
742 | // Basic complexity calculation
743 | let totalComplexity = 0;
744 | let fileCount = 0;
745 |
746 | for (const content of Object.values(keyFiles)) {
747 | totalComplexity += this.calculateFileComplexity(content);
748 | fileCount++;
749 | }
750 |
751 | return fileCount > 0 ? totalComplexity / fileCount : 0;
752 | }
753 |
754 | private calculateFileComplexity(content: string): number {
755 | // Count cyclomatic complexity indicators
756 | const complexityPatterns = [
757 | { pattern: /\bif\b/g, type: 'if' },
758 | { pattern: /\belse\b/g, type: 'else' },
759 | { pattern: /\bfor\b/g, type: 'for' },
760 | { pattern: /\bwhile\b/g, type: 'while' },
761 | { pattern: /\bswitch\b/g, type: 'switch' },
762 | { pattern: /\bcase\b/g, type: 'case' },
763 | { pattern: /\bcatch\b/g, type: 'catch' },
764 | { pattern: /\bthrow\b/g, type: 'throw' },
765 | { pattern: /&&/g, type: '&&' },
766 | { pattern: /\|\|/g, type: '||' },
767 | { pattern: /\?/g, type: '?' },
768 | { pattern: /:/g, type: ':' },
769 | { pattern: /\breturn\b/g, type: 'return' }
770 | ];
771 |
772 | let complexity = 1; // Base complexity
773 |
774 | for (const { pattern } of complexityPatterns) {
775 | try {
776 | const matches = content.match(pattern);
777 | if (matches) {
778 | complexity += matches.length;
779 | }
780 | } catch (error) {
781 | // Skip invalid regex patterns
782 | console.warn(`Invalid regex pattern: ${pattern}`);
783 | }
784 | }
785 |
786 | return complexity;
787 | }
788 |
789 | private calculateMaintainability(keyFiles: Record<string, string>): number {
790 | // Basic maintainability score
791 | let score = 100;
792 |
793 | for (const [filePath, content] of Object.entries(keyFiles)) {
794 | const lines = content.split('\n');
795 |
796 | // Penalize long files
797 | if (lines.length > 500) score -= 10;
798 |
799 | // Penalize long lines
800 | const longLines = lines.filter(line => line.length > 120);
801 | score -= longLines.length * 0.1;
802 |
803 | // Penalize lack of comments
804 | const commentLines = lines.filter(line => line.trim().startsWith('//') || line.trim().startsWith('/*'));
805 | if (commentLines.length / lines.length < 0.1) score -= 5;
806 | }
807 |
808 | return Math.max(0, score);
809 | }
810 |
811 | private detectDuplicateCode(keyFiles: Record<string, string>): number {
812 | // Basic duplicate detection
813 | const codeBlocks = new Map<string, number>();
814 |
815 | for (const content of Object.values(keyFiles)) {
816 | const lines = content.split('\n');
817 |
818 | // Look for duplicate blocks of 3+ lines
819 | for (let i = 0; i < lines.length - 2; i++) {
820 | const block = lines.slice(i, i + 3).join('\n').trim();
821 | if (block.length > 50) { // Only consider substantial blocks
822 | codeBlocks.set(block, (codeBlocks.get(block) || 0) + 1);
823 | }
824 | }
825 | }
826 |
827 | let duplicateCount = 0;
828 | for (const count of codeBlocks.values()) {
829 | if (count > 1) duplicateCount++;
830 | }
831 |
832 | return duplicateCount;
833 | }
834 |
835 | private extractComponentName(filePath: string): string {
836 | const parts = filePath.split('/');
837 | const fileName = parts[parts.length - 1];
838 | return fileName.replace(/\.(js|ts|jsx|tsx)$/, '');
839 | }
840 |
841 | private extractUtilityName(filePath: string): string {
842 | const parts = filePath.split('/');
843 | const fileName = parts[parts.length - 1];
844 | return fileName.replace(/\.(js|ts)$/, '');
845 | }
846 |
847 | private extractDependencies(content: string): string[] {
848 | const dependencies: string[] = [];
849 | const importRegex = /import\s+.*?\s+from\s+['"]([^'"]+)['"]/g;
850 | const requireRegex = /require\(['"]([^'"]+)['"]\)/g;
851 |
852 | let match;
853 | while ((match = importRegex.exec(content)) !== null) {
854 | dependencies.push(match[1]);
855 | }
856 |
857 | while ((match = requireRegex.exec(content)) !== null) {
858 | dependencies.push(match[1]);
859 | }
860 |
861 | return dependencies;
862 | }
863 |
864 | private extractFunctions(content: string): string[] {
865 | const functions: string[] = [];
866 | const functionRegex = /function\s+(\w+)|const\s+(\w+)\s*=\s*\(|(\w+)\s*:\s*\(/g;
867 |
868 | let match;
869 | while ((match = functionRegex.exec(content)) !== null) {
870 | const functionName = match[1] || match[2] || match[3];
871 | if (functionName) {
872 | functions.push(functionName);
873 | }
874 | }
875 |
876 | return functions;
877 | }
878 |
879 | private extractDescription(content: string): string {
880 | // Look for JSDoc or comment descriptions
881 | const jsdocMatch = content.match(/\/\*\*\s*\n\s*\*\s*([^*]+)/);
882 | if (jsdocMatch) {
883 | return jsdocMatch[1].trim();
884 | }
885 |
886 | const commentMatch = content.match(/\/\/\s*(.+)/);
887 | if (commentMatch) {
888 | return commentMatch[1].trim();
889 | }
890 |
891 | return 'No description available';
892 | }
893 |
894 | private calculateReusabilityScore(content: string): number {
895 | let score = 50; // Base score
896 |
897 | // Increase score for pure functions
898 | if (content.includes('function') && !content.includes('this.') && !content.includes('document.')) {
899 | score += 20;
900 | }
901 |
902 | // Increase score for TypeScript types
903 | if (content.includes('interface') || content.includes('type ')) {
904 | score += 15;
905 | }
906 |
907 | // Increase score for good documentation
908 | if (content.includes('/**') || content.includes('//')) {
909 | score += 10;
910 | }
911 |
912 | // Decrease score for dependencies on specific libraries
913 | if (content.includes('import') && content.includes('react')) {
914 | score -= 10;
915 | }
916 |
917 | return Math.min(100, Math.max(0, score));
918 | }
919 |
920 | private findBoilerplateCode(keyFiles: Record<string, string>): string[] {
921 | const boilerplate: string[] = [];
922 |
923 | for (const [filePath, content] of Object.entries(keyFiles)) {
924 | // Look for common boilerplate patterns
925 | if (content.includes('export default') && content.includes('import React')) {
926 | boilerplate.push(`React component boilerplate in ${filePath}`);
927 | }
928 |
929 | if (content.includes('app.use(') && content.includes('express')) {
930 | boilerplate.push(`Express setup boilerplate in ${filePath}`);
931 | }
932 |
933 | if (content.includes('describe(') && content.includes('it(')) {
934 | boilerplate.push(`Test boilerplate in ${filePath}`);
935 | }
936 | }
937 |
938 | return boilerplate;
939 | }
940 |
941 | // Additional methods for MCP tool handlers
942 | async searchInRepository(url: string, query: string, options: any = {}): Promise<any> {
943 | const keyFiles = await this.getKeyFiles(url);
944 | const searchResults = [];
945 |
946 | for (const [filePath, content] of Object.entries(keyFiles)) {
947 | const lines = content.split('\n');
948 | let lineNumber = 0;
949 |
950 | for (const line of lines) {
951 | lineNumber++;
952 | if (line.toLowerCase().includes(query.toLowerCase())) {
953 | searchResults.push({
954 | file: filePath,
955 | line: lineNumber,
956 | content: line.trim(),
957 | context: options.include_context ? lines.slice(Math.max(0, lineNumber - 3), lineNumber + 3) : [],
958 | type: 'exact',
959 | });
960 | }
961 | }
962 | }
963 |
964 | return {
965 | query,
966 | results: searchResults,
967 | totalMatches: searchResults.length,
968 | filesSearched: Object.keys(keyFiles).length,
969 | searchTime: Date.now(),
970 | };
971 | }
972 |
973 | async analyzeCodeStructure(url: string, file_paths?: string[], options: any = {}): Promise<any> {
974 | const keyFiles = await this.getKeyFiles(url);
975 | const codeStructure: any = {
976 | functions: [],
977 | classes: [],
978 | imports: [],
979 | exports: [],
980 | complexity: {
981 | cyclomatic: 0,
982 | cognitive: 0,
983 | maintainability: 0,
984 | },
985 | };
986 |
987 | for (const [filePath, content] of Object.entries(keyFiles)) {
988 | if (file_paths && !file_paths.includes(filePath)) continue;
989 |
990 | // Extract functions
991 | const functions = this.extractFunctions(content);
992 | codeStructure.functions.push(...functions.map(func => ({
993 | name: func,
994 | signature: `function ${func}()`,
995 | startLine: 0,
996 | endLine: 0,
997 | complexity: 1,
998 | parameters: [],
999 | documentation: '',
1000 | })));
1001 |
1002 | // Extract imports
1003 | const imports = this.extractDependencies(content);
1004 | codeStructure.imports.push(...imports.map(imp => ({
1005 | source: imp,
1006 | imports: [],
1007 | type: 'import',
1008 | isExternal: !imp.startsWith('.'),
1009 | })));
1010 |
1011 | // Calculate complexity
1012 | codeStructure.complexity.cyclomatic += this.calculateFileComplexity(content);
1013 | }
1014 |
1015 | codeStructure.complexity.maintainability = this.calculateMaintainability(keyFiles);
1016 |
1017 | return codeStructure;
1018 | }
1019 |
1020 | async calculateMetrics(url: string, options: any = {}): Promise<any> {
1021 | const keyFiles = await this.getKeyFiles(url);
1022 | const repoInfo = await this.getRepositoryInfo(url);
1023 |
1024 | return {
1025 | complexity: {
1026 | cyclomatic: this.calculateComplexity(keyFiles),
1027 | cognitive: this.calculateComplexity(keyFiles),
1028 | maintainability: this.calculateMaintainability(keyFiles),
1029 | },
1030 | quality: {
1031 | score: this.calculateMaintainability(keyFiles),
1032 | issues: [],
1033 | },
1034 | size: {
1035 | lines: repoInfo.lineCount,
1036 | files: repoInfo.fileCount,
1037 | functions: this.extractFunctions(Object.values(keyFiles).join('\n')).length,
1038 | classes: 0,
1039 | },
1040 | dependencies: {
1041 | external: (await this.analyzeDependencies(url)).length,
1042 | internal: 0,
1043 | circular: [],
1044 | },
1045 | };
1046 | }
1047 |
1048 | async analyzeArchitecturePublic(url: string, options: any = {}): Promise<any> {
1049 | const repoInfo = await this.getRepositoryInfo(url);
1050 | return this.analyzeArchitecture(url, repoInfo);
1051 | }
1052 |
1053 | async compareRepositories(implementations: any[], comparison_criteria: string[], options: any = {}): Promise<any> {
1054 | const comparisons = [];
1055 |
1056 | for (const impl of implementations) {
1057 | const analysis = await this.analyzeRepository(impl.url);
1058 | comparisons.push({
1059 | name: impl.name,
1060 | url: impl.url,
1061 | analysis,
1062 | });
1063 | }
1064 |
1065 | return {
1066 | implementations: comparisons,
1067 | criteria: comparison_criteria,
1068 | summary: 'Repository comparison completed',
1069 | };
1070 | }
1071 |
1072 | async validateCodeQuality(url: string, validation_types: string[], options: any = {}): Promise<any> {
1073 | const repoInfo = await this.getRepositoryInfo(url);
1074 | const codeQuality = await this.analyzeCodeQuality(url, repoInfo);
1075 |
1076 | return {
1077 | validations: validation_types.map(type => ({
1078 | type,
1079 | status: 'passed',
1080 | issues: [],
1081 | score: 80,
1082 | })),
1083 | overall: {
1084 | score: codeQuality.maintainability,
1085 | issues: codeQuality.codeSmells,
1086 | recommendations: ['Add more tests', 'Improve documentation'],
1087 | },
1088 | };
1089 | }
1090 |
1091 | async checkApiLimits(): Promise<any> {
1092 | try {
1093 | const { data } = await this.octokit.rest.rateLimit.get();
1094 | return {
1095 | status: 'healthy',
1096 | core: data.resources.core,
1097 | search: data.resources.search,
1098 | authenticated: this.isAuthenticated,
1099 | };
1100 | } catch (error: any) {
1101 | return {
1102 | status: 'error',
1103 | error: error.message,
1104 | authenticated: this.isAuthenticated,
1105 | };
1106 | }
1107 | }
1108 | }
```
--------------------------------------------------------------------------------
/src/services/refactor.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { RefactorResult, RefactorOptions, ExtractableComponent, ReusableUtility, GeneratedTemplate, ComponentLibrary } from '../types/index.js';
2 | import { GitHubService } from './github.js';
3 |
4 | export class RefactorService {
5 | private githubService: GitHubService;
6 |
7 | constructor() {
8 | this.githubService = new GitHubService();
9 | }
10 |
11 | async extractFunctions(url: string, filePath: string, functionNames?: string[]): Promise<{ functions: any[], dependencies: string[] }> {
12 | const content = await this.githubService.getFileContent(url, filePath);
13 | const functions = this.parseFunctions(content, functionNames);
14 | const dependencies = this.extractDependencies(content);
15 |
16 | return {
17 | functions,
18 | dependencies,
19 | };
20 | }
21 |
22 | async analyzeDependencies(url: string): Promise<any> {
23 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
24 | const dependencies = await this.githubService.analyzeDependencies(url);
25 |
26 | return {
27 | external: dependencies,
28 | internal: this.analyzeInternalDependencies(repositoryInfo.keyFiles),
29 | graph: this.buildDependencyGraph(repositoryInfo.keyFiles),
30 | };
31 | }
32 |
33 | async refactorForProject(url: string, targetProject: any, refactorOptions: RefactorOptions): Promise<RefactorResult> {
34 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
35 | const keyFiles = repositoryInfo.keyFiles;
36 |
37 | let refactoredCode = '';
38 | const changes: any[] = [];
39 | const warnings: string[] = [];
40 | const dependencies: string[] = [];
41 | const instructions: string[] = [];
42 |
43 | // Process each file
44 | for (const [filePath, content] of Object.entries(keyFiles)) {
45 | let processedContent = content;
46 |
47 | // Apply naming convention transformation
48 | if (refactorOptions.namingConvention) {
49 | const result = await this.transformNamingConvention(
50 | processedContent,
51 | refactorOptions.namingConvention
52 | );
53 | processedContent = result.code;
54 | changes.push(...result.changes);
55 | }
56 |
57 | // Modernize code if requested
58 | if (refactorOptions.modernizationLevel && refactorOptions.modernizationLevel !== 'minimal') {
59 | const result = await this.modernizeCode(
60 | processedContent,
61 | this.detectLanguage(filePath),
62 | refactorOptions.modernizationLevel
63 | );
64 | processedContent = result.refactoredCode;
65 | changes.push(...result.changes);
66 | }
67 |
68 | // Remove project-specific coupling
69 | if (refactorOptions.removeProjectSpecific) {
70 | const result = await this.removeProjectCoupling(
71 | processedContent,
72 | this.detectLanguage(filePath)
73 | );
74 | processedContent = result.refactoredCode;
75 | changes.push(...result.changes);
76 | }
77 |
78 | // Add TypeScript if requested
79 | if (refactorOptions.addTypeScript && this.isJavaScriptFile(filePath)) {
80 | const result = await this.addTypeScript(processedContent, filePath);
81 | processedContent = result.code;
82 | changes.push(...result.changes);
83 | }
84 |
85 | refactoredCode += `\n\n// === ${filePath} ===\n${processedContent}`;
86 | }
87 |
88 | // Generate integration instructions
89 | instructions.push('1. Review the refactored code for compatibility with your project');
90 | instructions.push('2. Update import paths to match your project structure');
91 | instructions.push('3. Install any required dependencies');
92 | instructions.push('4. Run tests to ensure functionality is preserved');
93 |
94 | if (refactorOptions.extractComponents) {
95 | instructions.push('5. Consider extracting reusable components into separate files');
96 | }
97 |
98 | return {
99 | originalCode: Object.values(keyFiles).join('\n\n'),
100 | refactoredCode,
101 | changes,
102 | warnings,
103 | dependencies,
104 | instructions,
105 | };
106 | }
107 |
108 | async extractReusableComponents(url: string, componentTypes?: string[]): Promise<ExtractableComponent[]> {
109 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
110 | const components: ExtractableComponent[] = [];
111 |
112 | for (const [filePath, content] of Object.entries(repositoryInfo.keyFiles)) {
113 | if (this.isComponentFile(filePath, content)) {
114 | const component = await this.analyzeComponent(filePath, content);
115 |
116 | if (!componentTypes || componentTypes.includes(component.type)) {
117 | components.push(component);
118 | }
119 | }
120 | }
121 |
122 | return components.sort((a, b) => b.reusabilityScore - a.reusabilityScore);
123 | }
124 |
125 | async adaptDependencies(code: string, dependencyMappings: Record<string, string>): Promise<string> {
126 | let adaptedCode = code;
127 |
128 | // Replace import statements
129 | for (const [oldDep, newDep] of Object.entries(dependencyMappings)) {
130 | const importRegex = new RegExp(`from\\s+['"]${oldDep}['"]`, 'g');
131 | adaptedCode = adaptedCode.replace(importRegex, `from '${newDep}'`);
132 |
133 | const requireRegex = new RegExp(`require\\(['"]${oldDep}['"]\\)`, 'g');
134 | adaptedCode = adaptedCode.replace(requireRegex, `require('${newDep}')`);
135 | }
136 |
137 | return adaptedCode;
138 | }
139 |
140 | async transformNamingConventions(code: string, fromConvention: string, toConvention: string): Promise<string> {
141 | const { code: transformedCode } = await this.transformNamingConvention(code, {
142 | variables: toConvention as any,
143 | functions: toConvention as any,
144 | classes: toConvention as any,
145 | files: toConvention as any,
146 | folders: toConvention as any,
147 | });
148 |
149 | return transformedCode;
150 | }
151 |
152 | async modernizeCode(code: string, language: string, targetVersion?: string): Promise<RefactorResult> {
153 | const changes: any[] = [];
154 | let modernizedCode = code;
155 |
156 | if (language === 'javascript' || language === 'typescript') {
157 | // Convert var to const/let
158 | const varMatches = code.match(/var\s+(\w+)/g);
159 | if (varMatches) {
160 | modernizedCode = modernizedCode.replace(/var\s+(\w+)/g, 'const $1');
161 | changes.push({
162 | type: 'modify',
163 | file: 'current',
164 | description: 'Replaced var with const/let',
165 | oldValue: 'var',
166 | newValue: 'const/let',
167 | });
168 | }
169 |
170 | // Convert function declarations to arrow functions where appropriate
171 | const functionRegex = /function\s+(\w+)\s*\(([^)]*)\)\s*\{/g;
172 | modernizedCode = modernizedCode.replace(functionRegex, 'const $1 = ($2) => {');
173 |
174 | // Use template literals instead of string concatenation
175 | const concatRegex = /['"]([^'"]*)['"]\s*\+\s*(\w+)\s*\+\s*['"]([^'"]*)['"]/g;
176 | modernizedCode = modernizedCode.replace(concatRegex, '`$1${$2}$3`');
177 |
178 | // Convert promises to async/await if possible
179 | if (modernizedCode.includes('.then(') && !modernizedCode.includes('async ')) {
180 | changes.push({
181 | type: 'modify',
182 | file: 'current',
183 | description: 'Consider converting promises to async/await',
184 | });
185 | }
186 | }
187 |
188 | return {
189 | originalCode: code,
190 | refactoredCode: modernizedCode,
191 | changes,
192 | warnings: [],
193 | dependencies: [],
194 | instructions: ['Review modernized code for compatibility'],
195 | };
196 | }
197 |
198 | async removeProjectCoupling(code: string, language: string): Promise<RefactorResult> {
199 | let decoupledCode = code;
200 | const changes: any[] = [];
201 |
202 | // Remove hard-coded URLs and endpoints
203 | const urlRegex = /https?:\/\/[^\s'"]+/g;
204 | const urls = code.match(urlRegex);
205 | if (urls) {
206 | for (const url of urls) {
207 | decoupledCode = decoupledCode.replace(url, '${API_BASE_URL}/endpoint');
208 | changes.push({
209 | type: 'modify',
210 | file: 'current',
211 | description: `Parameterized hard-coded URL: ${url}`,
212 | oldValue: url,
213 | newValue: '${API_BASE_URL}/endpoint',
214 | });
215 | }
216 | }
217 |
218 | // Remove environment-specific imports
219 | const envImports = [
220 | 'process.env',
221 | 'window.',
222 | 'document.',
223 | 'localStorage.',
224 | 'sessionStorage.',
225 | ];
226 |
227 | for (const envImport of envImports) {
228 | if (decoupledCode.includes(envImport)) {
229 | changes.push({
230 | type: 'modify',
231 | file: 'current',
232 | description: `Environment-specific code found: ${envImport}`,
233 | });
234 | }
235 | }
236 |
237 | // Replace hard-coded values with configuration
238 | const configValues = this.extractConfigurableValues(code);
239 | for (const value of configValues) {
240 | decoupledCode = decoupledCode.replace(value.pattern, value.replacement);
241 | changes.push({
242 | type: 'modify',
243 | file: 'current',
244 | description: `Made configurable: ${value.description}`,
245 | oldValue: value.pattern,
246 | newValue: value.replacement,
247 | });
248 | }
249 |
250 | return {
251 | originalCode: code,
252 | refactoredCode: decoupledCode,
253 | changes,
254 | warnings: [],
255 | dependencies: [],
256 | instructions: [
257 | 'Create a configuration file for parameterized values',
258 | 'Set up environment-specific configuration',
259 | 'Test with different configurations',
260 | ],
261 | };
262 | }
263 |
264 | async generateBoilerplate(url: string, templateType: string, options: any): Promise<GeneratedTemplate> {
265 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
266 | const analysis = await this.githubService.analyzeRepository(url);
267 |
268 | const template: GeneratedTemplate = {
269 | name: options.name || `${repositoryInfo.name}-template`,
270 | description: options.description || `Template based on ${repositoryInfo.name}`,
271 | files: [],
272 | dependencies: [],
273 | scripts: {},
274 | instructions: [],
275 | };
276 |
277 | // Generate package.json
278 | const packageJson = await this.generatePackageJson(repositoryInfo, options);
279 | template.files.push({
280 | path: 'package.json',
281 | content: JSON.stringify(packageJson, null, 2),
282 | type: 'config',
283 | });
284 |
285 | // Generate main files based on template type
286 | switch (templateType) {
287 | case 'starter':
288 | template.files.push(...await this.generateStarterFiles(repositoryInfo, options));
289 | break;
290 | case 'component-library':
291 | template.files.push(...await this.generateComponentLibraryFiles(repositoryInfo, options));
292 | break;
293 | case 'microservice':
294 | template.files.push(...await this.generateMicroserviceFiles(repositoryInfo, options));
295 | break;
296 | }
297 |
298 | // Generate configuration files
299 | if (options.includeConfig) {
300 | template.files.push(...await this.generateConfigFiles(repositoryInfo, options));
301 | }
302 |
303 | // Generate test files
304 | if (options.includeTests) {
305 | template.files.push(...await this.generateTestFiles(repositoryInfo, options));
306 | }
307 |
308 | // Generate documentation
309 | if (options.includeDocs) {
310 | template.files.push(...await this.generateDocumentationFiles(repositoryInfo, options));
311 | }
312 |
313 | return template;
314 | }
315 |
316 | async createComponentLibrary(url: string, componentPaths?: string[]): Promise<ComponentLibrary> {
317 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
318 | const components = await this.extractReusableComponents(url, ['ui-components']);
319 |
320 | const library: ComponentLibrary = {
321 | name: `${repositoryInfo.name}-components`,
322 | description: `Component library extracted from ${repositoryInfo.name}`,
323 | components: [],
324 | utilities: [],
325 | types: [],
326 | styles: [],
327 | documentation: '',
328 | packageJson: {},
329 | };
330 |
331 | // Process components
332 | for (const component of components) {
333 | if (!componentPaths || componentPaths.includes(component.path)) {
334 | const libraryComponent = await this.convertToLibraryComponent(component);
335 | library.components.push(libraryComponent);
336 | }
337 | }
338 |
339 | // Extract utilities
340 | const utilities = await this.extractReusableUtilities(url);
341 | library.utilities = utilities.map(util => ({
342 | ...util,
343 | documentation: util.description,
344 | functions: util.functions.map(func => ({
345 | name: func,
346 | parameters: [],
347 | returnType: 'any',
348 | description: 'Function extracted from code',
349 | examples: []
350 | }))
351 | }));
352 |
353 | // Generate package.json for the library
354 | library.packageJson = await this.generateLibraryPackageJson(repositoryInfo);
355 |
356 | return library;
357 | }
358 |
359 | async scaffoldProjectStructure(url: string, projectType: string): Promise<any> {
360 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
361 | const analysis = await this.githubService.analyzeRepository(url);
362 |
363 | const structure = {
364 | name: `${repositoryInfo.name}-scaffold`,
365 | type: projectType,
366 | folders: this.generateFolderStructure(projectType, analysis),
367 | files: this.generateScaffoldFiles(projectType, repositoryInfo),
368 | scripts: this.generateScripts(projectType, repositoryInfo),
369 | dependencies: this.extractRelevantDependencies(analysis.dependencies),
370 | };
371 |
372 | return structure;
373 | }
374 |
375 | // Helper methods
376 | private parseFunctions(content: string, functionNames?: string[]): any[] {
377 | const functions: any[] = [];
378 | const functionRegex = /(?:function\s+(\w+)|const\s+(\w+)\s*=\s*(?:async\s+)?(?:\([^)]*\)\s*=>|\([^)]*\)\s*\{|function))/g;
379 |
380 | let match;
381 | while ((match = functionRegex.exec(content)) !== null) {
382 | const functionName = match[1] || match[2];
383 | if (functionName && (!functionNames || functionNames.includes(functionName))) {
384 | const functionInfo = this.extractFunctionInfo(content, functionName, match.index);
385 | functions.push(functionInfo);
386 | }
387 | }
388 |
389 | return functions;
390 | }
391 |
392 | private extractFunctionInfo(content: string, name: string, startIndex: number): any {
393 | // Find function body
394 | const lines = content.split('\n');
395 | let currentLine = 0;
396 | let currentIndex = 0;
397 |
398 | while (currentIndex < startIndex) {
399 | currentIndex = content.indexOf('\n', currentIndex) + 1;
400 | currentLine++;
401 | }
402 |
403 | // Extract function signature and body
404 | const functionStart = content.indexOf('{', startIndex);
405 | const functionEnd = this.findMatchingBrace(content, functionStart);
406 |
407 | return {
408 | name,
409 | signature: content.substring(startIndex, functionStart).trim(),
410 | body: content.substring(functionStart + 1, functionEnd).trim(),
411 | startLine: currentLine,
412 | endLine: currentLine + content.substring(functionStart, functionEnd).split('\n').length,
413 | };
414 | }
415 |
416 | private findMatchingBrace(content: string, start: number): number {
417 | let braceCount = 1;
418 | let index = start + 1;
419 |
420 | while (index < content.length && braceCount > 0) {
421 | if (content[index] === '{') braceCount++;
422 | else if (content[index] === '}') braceCount--;
423 | index++;
424 | }
425 |
426 | return index - 1;
427 | }
428 |
429 | private extractDependencies(content: string): string[] {
430 | const dependencies: string[] = [];
431 | const importRegex = /import\s+.*?\s+from\s+['"]([^'"]+)['"]/g;
432 | const requireRegex = /require\(['"]([^'"]+)['"]\)/g;
433 |
434 | let match;
435 | while ((match = importRegex.exec(content)) !== null) {
436 | dependencies.push(match[1]);
437 | }
438 |
439 | while ((match = requireRegex.exec(content)) !== null) {
440 | dependencies.push(match[1]);
441 | }
442 |
443 | return [...new Set(dependencies)];
444 | }
445 |
446 | private analyzeInternalDependencies(keyFiles: Record<string, string>): any {
447 | const dependencies: Record<string, string[]> = {};
448 |
449 | for (const [filePath, content] of Object.entries(keyFiles)) {
450 | const fileDeps = this.extractDependencies(content);
451 | dependencies[filePath] = fileDeps.filter(dep => dep.startsWith('.'));
452 | }
453 |
454 | return dependencies;
455 | }
456 |
457 | private buildDependencyGraph(keyFiles: Record<string, string>): any {
458 | const graph: Record<string, string[]> = {};
459 | const internalDeps = this.analyzeInternalDependencies(keyFiles);
460 |
461 | for (const [file, deps] of Object.entries(internalDeps)) {
462 | graph[file] = deps as string[];
463 | }
464 |
465 | return graph;
466 | }
467 |
468 | private async transformNamingConvention(code: string, convention: any): Promise<{ code: string; changes: any[] }> {
469 | const changes: any[] = [];
470 | let transformedCode = code;
471 |
472 | // Variable naming
473 | const variableRegex = /(?:let|const|var)\s+(\w+)/g;
474 | transformedCode = transformedCode.replace(variableRegex, (match, varName) => {
475 | const newName = this.convertNamingConvention(varName, convention.variables);
476 | if (newName !== varName) {
477 | changes.push({
478 | type: 'modify',
479 | description: `Renamed variable ${varName} to ${newName}`,
480 | oldValue: varName,
481 | newValue: newName,
482 | });
483 | }
484 | return match.replace(varName, newName);
485 | });
486 |
487 | // Function naming
488 | const functionRegex = /function\s+(\w+)/g;
489 | transformedCode = transformedCode.replace(functionRegex, (match, funcName) => {
490 | const newName = this.convertNamingConvention(funcName, convention.functions);
491 | if (newName !== funcName) {
492 | changes.push({
493 | type: 'modify',
494 | description: `Renamed function ${funcName} to ${newName}`,
495 | oldValue: funcName,
496 | newValue: newName,
497 | });
498 | }
499 | return match.replace(funcName, newName);
500 | });
501 |
502 | return { code: transformedCode, changes };
503 | }
504 |
505 | private convertNamingConvention(name: string, targetConvention: string): string {
506 | switch (targetConvention) {
507 | case 'camelCase':
508 | return name.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
509 | case 'snake_case':
510 | return name.replace(/([A-Z])/g, '_$1').toLowerCase();
511 | case 'kebab-case':
512 | return name.replace(/([A-Z])/g, '-$1').toLowerCase();
513 | case 'PascalCase':
514 | return name.charAt(0).toUpperCase() + name.slice(1);
515 | default:
516 | return name;
517 | }
518 | }
519 |
520 | private detectLanguage(filePath: string): string {
521 | const ext = filePath.split('.').pop()?.toLowerCase();
522 | switch (ext) {
523 | case 'js':
524 | case 'jsx':
525 | return 'javascript';
526 | case 'ts':
527 | case 'tsx':
528 | return 'typescript';
529 | case 'py':
530 | return 'python';
531 | case 'java':
532 | return 'java';
533 | case 'cpp':
534 | case 'cc':
535 | case 'c':
536 | return 'cpp';
537 | case 'go':
538 | return 'go';
539 | case 'rs':
540 | return 'rust';
541 | default:
542 | return 'unknown';
543 | }
544 | }
545 |
546 | private isJavaScriptFile(filePath: string): boolean {
547 | return /\.(js|jsx)$/.test(filePath);
548 | }
549 |
550 | private async addTypeScript(code: string, filePath: string): Promise<{ code: string; changes: any[] }> {
551 | const changes: any[] = [];
552 | let tsCode = code;
553 |
554 | // Add basic type annotations
555 | tsCode = tsCode.replace(/function\s+(\w+)\s*\(([^)]*)\)/g, (match, funcName, params) => {
556 | const typedParams = params.replace(/(\w+)/g, '$1: any');
557 | changes.push({
558 | type: 'modify',
559 | description: `Added TypeScript types to function ${funcName}`,
560 | });
561 | return `function ${funcName}(${typedParams}): any`;
562 | });
563 |
564 | // Add interface definitions for objects
565 | const objectRegex = /const\s+(\w+)\s*=\s*\{([^}]+)\}/g;
566 | tsCode = tsCode.replace(objectRegex, (match, objName, objContent) => {
567 | changes.push({
568 | type: 'add',
569 | description: `Added interface for object ${objName}`,
570 | });
571 | return `interface ${objName}Interface {\n // Add properties here\n}\n\n${match}: ${objName}Interface`;
572 | });
573 |
574 | return { code: tsCode, changes };
575 | }
576 |
577 | private isComponentFile(filePath: string, content: string): boolean {
578 | return (
579 | filePath.includes('component') ||
580 | filePath.includes('Component') ||
581 | content.includes('export default') ||
582 | content.includes('React.Component') ||
583 | content.includes('function Component')
584 | );
585 | }
586 |
587 | private async analyzeComponent(filePath: string, content: string): Promise<ExtractableComponent> {
588 | const name = this.extractComponentName(filePath);
589 | const dependencies = this.extractDependencies(content);
590 | const complexity = this.calculateComplexity(content);
591 | const reusabilityScore = this.calculateReusabilityScore(content);
592 |
593 | return {
594 | name,
595 | path: filePath,
596 | type: this.determineComponentType(content),
597 | dependencies,
598 | complexity,
599 | reusabilityScore,
600 | description: this.extractDescription(content),
601 | };
602 | }
603 |
604 | private extractComponentName(filePath: string): string {
605 | const parts = filePath.split('/');
606 | const fileName = parts[parts.length - 1];
607 | return fileName.replace(/\.(js|ts|jsx|tsx)$/, '');
608 | }
609 |
610 | private determineComponentType(content: string): 'component' | 'hook' | 'utility' | 'service' | 'model' {
611 | if (content.includes('React.Component') || content.includes('extends Component')) {
612 | return 'component';
613 | } else if (content.includes('useState') || content.includes('useEffect')) {
614 | return 'hook';
615 | } else if (content.includes('export function') || content.includes('export const')) {
616 | return 'utility';
617 | } else if (content.includes('class') && content.includes('constructor')) {
618 | return 'service';
619 | } else if (content.includes('interface') || content.includes('type ')) {
620 | return 'model';
621 | }
622 | return 'utility';
623 | }
624 |
625 | private calculateComplexity(content: string): number {
626 | const complexityPatterns = [
627 | { pattern: /\bif\b/g, type: 'if' },
628 | { pattern: /\belse\b/g, type: 'else' },
629 | { pattern: /\bfor\b/g, type: 'for' },
630 | { pattern: /\bwhile\b/g, type: 'while' },
631 | { pattern: /\bswitch\b/g, type: 'switch' },
632 | { pattern: /\bcase\b/g, type: 'case' },
633 | { pattern: /\bcatch\b/g, type: 'catch' },
634 | { pattern: /\bthrow\b/g, type: 'throw' },
635 | { pattern: /&&/g, type: '&&' },
636 | { pattern: /\|\|/g, type: '||' },
637 | { pattern: /\?/g, type: '?' },
638 | { pattern: /:/g, type: ':' },
639 | { pattern: /\bfunction\b/g, type: 'function' },
640 | { pattern: /\bclass\b/g, type: 'class' }
641 | ];
642 |
643 | let complexity = 1;
644 | for (const { pattern } of complexityPatterns) {
645 | try {
646 | const matches = content.match(pattern);
647 | if (matches) {
648 | complexity += matches.length;
649 | }
650 | } catch (error) {
651 | // Skip invalid patterns
652 | }
653 | }
654 |
655 | return complexity;
656 | }
657 |
658 | private calculateReusabilityScore(content: string): number {
659 | let score = 50;
660 |
661 | // Increase score for pure functions
662 | if (content.includes('export') && !content.includes('import')) {
663 | score += 20;
664 | }
665 |
666 | // Increase score for TypeScript
667 | if (content.includes('interface') || content.includes('type ')) {
668 | score += 15;
669 | }
670 |
671 | // Increase score for documentation
672 | if (content.includes('/**') || content.includes('//')) {
673 | score += 10;
674 | }
675 |
676 | // Decrease score for framework-specific code
677 | if (content.includes('React') || content.includes('Vue') || content.includes('Angular')) {
678 | score -= 10;
679 | }
680 |
681 | // Decrease score for external dependencies
682 | const dependencies = this.extractDependencies(content);
683 | score -= dependencies.length * 2;
684 |
685 | return Math.max(0, Math.min(100, score));
686 | }
687 |
688 | private extractDescription(content: string): string {
689 | const jsdocMatch = content.match(/\/\*\*\s*\n\s*\*\s*([^*]+)/);
690 | if (jsdocMatch) {
691 | return jsdocMatch[1].trim();
692 | }
693 |
694 | const commentMatch = content.match(/\/\/\s*(.+)/);
695 | if (commentMatch) {
696 | return commentMatch[1].trim();
697 | }
698 |
699 | return 'No description available';
700 | }
701 |
702 | private extractConfigurableValues(code: string): any[] {
703 | const configurableValues: any[] = [];
704 |
705 | // Hard-coded strings that might be configurable
706 | const stringRegex = /['"]([^'"]{10,})['"](?!\s*:)/g;
707 | let match;
708 |
709 | while ((match = stringRegex.exec(code)) !== null) {
710 | const value = match[1];
711 | if (this.isLikelyConfigurable(value)) {
712 | configurableValues.push({
713 | pattern: match[0],
714 | replacement: '${CONFIG.' + this.toConfigKey(value) + '}',
715 | description: value.substring(0, 30) + '...',
716 | });
717 | }
718 | }
719 |
720 | return configurableValues;
721 | }
722 |
723 | private isLikelyConfigurable(value: string): boolean {
724 | return (
725 | value.includes('api') ||
726 | value.includes('endpoint') ||
727 | value.includes('url') ||
728 | value.includes('key') ||
729 | value.includes('secret') ||
730 | value.includes('token') ||
731 | value.length > 20
732 | );
733 | }
734 |
735 | private toConfigKey(value: string): string {
736 | return value
737 | .replace(/[^a-zA-Z0-9]/g, '_')
738 | .toUpperCase()
739 | .substring(0, 20);
740 | }
741 |
742 | private async generatePackageJson(repositoryInfo: any, options: any): Promise<any> {
743 | const originalDeps = repositoryInfo.dependencies || [];
744 |
745 | return {
746 | name: options.name || repositoryInfo.name,
747 | version: '1.0.0',
748 | description: options.description || repositoryInfo.description,
749 | main: 'index.js',
750 | scripts: {
751 | start: 'node index.js',
752 | dev: 'nodemon index.js',
753 | test: 'jest',
754 | build: 'webpack --mode production',
755 | },
756 | dependencies: originalDeps.filter((dep: any) => dep.type === 'dependency'),
757 | devDependencies: originalDeps.filter((dep: any) => dep.type === 'devDependency'),
758 | };
759 | }
760 |
761 | private async generateStarterFiles(repositoryInfo: any, options: any): Promise<any[]> {
762 | const files: any[] = [];
763 |
764 | // Generate main entry file
765 | files.push({
766 | path: 'index.js',
767 | content: `// Main entry point for ${options.name || repositoryInfo.name}
768 |
769 | console.log('Starting ${options.name || repositoryInfo.name}...');
770 |
771 | // Add your application logic here
772 | `,
773 | type: 'source',
774 | });
775 |
776 | return files;
777 | }
778 |
779 | private async generateComponentLibraryFiles(repositoryInfo: any, options: any): Promise<any[]> {
780 | const files: any[] = [];
781 |
782 | // Generate index file
783 | files.push({
784 | path: 'src/index.js',
785 | content: `// Component library entry point
786 |
787 | export { default as Button } from './components/Button';
788 | export { default as Input } from './components/Input';
789 | // Add more component exports here
790 | `,
791 | type: 'source',
792 | });
793 |
794 | return files;
795 | }
796 |
797 | private async generateMicroserviceFiles(repositoryInfo: any, options: any): Promise<any[]> {
798 | const files: any[] = [];
799 |
800 | // Generate main service file
801 | files.push({
802 | path: 'src/server.js',
803 | content: `// Microservice entry point
804 | const express = require('express');
805 | const app = express();
806 |
807 | app.use(express.json());
808 |
809 | app.get('/health', (req, res) => {
810 | res.json({ status: 'healthy' });
811 | });
812 |
813 | const PORT = process.env.PORT || 3000;
814 | app.listen(PORT, () => {
815 | console.log(\`Server running on port \${PORT}\`);
816 | });
817 | `,
818 | type: 'source',
819 | });
820 |
821 | return files;
822 | }
823 |
824 | private async generateConfigFiles(repositoryInfo: any, options: any): Promise<any[]> {
825 | const files: any[] = [];
826 |
827 | // Generate .env template
828 | files.push({
829 | path: '.env.example',
830 | content: `# Environment variables template
831 | NODE_ENV=development
832 | PORT=3000
833 | API_URL=http://localhost:3000
834 | `,
835 | type: 'config',
836 | });
837 |
838 | return files;
839 | }
840 |
841 | private async generateTestFiles(repositoryInfo: any, options: any): Promise<any[]> {
842 | const files: any[] = [];
843 |
844 | // Generate test file
845 | files.push({
846 | path: 'tests/index.test.js',
847 | content: `// Test file for ${options.name || repositoryInfo.name}
848 |
849 | describe('${options.name || repositoryInfo.name}', () => {
850 | test('should work correctly', () => {
851 | expect(true).toBe(true);
852 | });
853 | });
854 | `,
855 | type: 'test',
856 | });
857 |
858 | return files;
859 | }
860 |
861 | private async generateDocumentationFiles(repositoryInfo: any, options: any): Promise<any[]> {
862 | const files: any[] = [];
863 |
864 | // Generate README
865 | files.push({
866 | path: 'README.md',
867 | content: `# ${options.name || repositoryInfo.name}
868 |
869 | ${options.description || repositoryInfo.description || 'Generated template'}
870 |
871 | ## Installation
872 |
873 | \`\`\`bash
874 | npm install
875 | \`\`\`
876 |
877 | ## Usage
878 |
879 | \`\`\`bash
880 | npm start
881 | \`\`\`
882 |
883 | ## Documentation
884 |
885 | Add your documentation here.
886 | `,
887 | type: 'documentation',
888 | });
889 |
890 | return files;
891 | }
892 |
893 | private async convertToLibraryComponent(component: ExtractableComponent): Promise<any> {
894 | return {
895 | name: component.name,
896 | path: component.path,
897 | props: [], // Extract props from component
898 | examples: [], // Generate examples
899 | documentation: component.description,
900 | dependencies: component.dependencies,
901 | };
902 | }
903 |
904 | private async extractReusableUtilities(url: string): Promise<ReusableUtility[]> {
905 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
906 | const utilities: ReusableUtility[] = [];
907 |
908 | for (const [filePath, content] of Object.entries(repositoryInfo.keyFiles)) {
909 | if (this.isUtilityFile(filePath, content)) {
910 | const utility = await this.analyzeUtility(filePath, content);
911 | utilities.push(utility);
912 | }
913 | }
914 |
915 | return utilities;
916 | }
917 |
918 | private isUtilityFile(filePath: string, content: string): boolean {
919 | return (
920 | filePath.includes('util') ||
921 | filePath.includes('helper') ||
922 | filePath.includes('lib') ||
923 | content.includes('export function') ||
924 | content.includes('export const')
925 | );
926 | }
927 |
928 | private async analyzeUtility(filePath: string, content: string): Promise<ReusableUtility> {
929 | const name = this.extractUtilityName(filePath);
930 | const functions = this.extractFunctionNames(content);
931 | const dependencies = this.extractDependencies(content);
932 | const description = this.extractDescription(content);
933 |
934 | return {
935 | name,
936 | path: filePath,
937 | functions,
938 | description,
939 | dependencies,
940 | };
941 | }
942 |
943 | private extractUtilityName(filePath: string): string {
944 | const parts = filePath.split('/');
945 | const fileName = parts[parts.length - 1];
946 | return fileName.replace(/\.(js|ts)$/, '');
947 | }
948 |
949 | private extractFunctionNames(content: string): string[] {
950 | const functions: string[] = [];
951 | const functionRegex = /function\s+(\w+)|const\s+(\w+)\s*=\s*\(|(\w+)\s*:\s*\(/g;
952 |
953 | let match;
954 | while ((match = functionRegex.exec(content)) !== null) {
955 | const functionName = match[1] || match[2] || match[3];
956 | if (functionName) {
957 | functions.push(functionName);
958 | }
959 | }
960 |
961 | return functions;
962 | }
963 |
964 | private async generateLibraryPackageJson(repositoryInfo: any): Promise<any> {
965 | return {
966 | name: `${repositoryInfo.name}-components`,
967 | version: '1.0.0',
968 | description: `Component library extracted from ${repositoryInfo.name}`,
969 | main: 'dist/index.js',
970 | module: 'dist/index.esm.js',
971 | types: 'dist/index.d.ts',
972 | scripts: {
973 | build: 'rollup -c',
974 | dev: 'rollup -c -w',
975 | test: 'jest',
976 | storybook: 'start-storybook -p 6006',
977 | },
978 | peerDependencies: {
979 | react: '^16.8.0 || ^17.0.0 || ^18.0.0',
980 | 'react-dom': '^16.8.0 || ^17.0.0 || ^18.0.0',
981 | },
982 | devDependencies: {
983 | '@rollup/plugin-babel': '^5.3.0',
984 | '@rollup/plugin-commonjs': '^22.0.0',
985 | '@rollup/plugin-node-resolve': '^13.3.0',
986 | rollup: '^2.75.0',
987 | jest: '^28.0.0',
988 | '@storybook/react': '^6.5.0',
989 | },
990 | };
991 | }
992 |
993 | private generateFolderStructure(projectType: string, analysis: any): any {
994 | const baseStructure = {
995 | src: 'Source code',
996 | tests: 'Test files',
997 | docs: 'Documentation',
998 | config: 'Configuration files',
999 | };
1000 |
1001 | switch (projectType) {
1002 | case 'web-app':
1003 | return {
1004 | ...baseStructure,
1005 | components: 'React components',
1006 | pages: 'Page components',
1007 | hooks: 'Custom hooks',
1008 | utils: 'Utility functions',
1009 | };
1010 | case 'api':
1011 | return {
1012 | ...baseStructure,
1013 | routes: 'API routes',
1014 | controllers: 'Route controllers',
1015 | models: 'Data models',
1016 | middleware: 'Express middleware',
1017 | };
1018 | case 'library':
1019 | return {
1020 | ...baseStructure,
1021 | lib: 'Library source code',
1022 | examples: 'Usage examples',
1023 | types: 'TypeScript definitions',
1024 | };
1025 | default:
1026 | return baseStructure;
1027 | }
1028 | }
1029 |
1030 | private generateScaffoldFiles(projectType: string, repositoryInfo: any): any[] {
1031 | const files: any[] = [];
1032 |
1033 | // Generate based on project type
1034 | switch (projectType) {
1035 | case 'web-app':
1036 | files.push({
1037 | path: 'src/App.jsx',
1038 | content: 'import React from "react";\n\nfunction App() {\n return <div>Hello World</div>;\n}\n\nexport default App;',
1039 | type: 'source',
1040 | });
1041 | break;
1042 | case 'api':
1043 | files.push({
1044 | path: 'src/server.js',
1045 | content: 'const express = require("express");\nconst app = express();\n\napp.get("/", (req, res) => {\n res.json({ message: "Hello World" });\n});\n\napp.listen(3000);',
1046 | type: 'source',
1047 | });
1048 | break;
1049 | }
1050 |
1051 | return files;
1052 | }
1053 |
1054 | private generateScripts(projectType: string, repositoryInfo: any): any {
1055 | const baseScripts = {
1056 | test: 'jest',
1057 | lint: 'eslint src/',
1058 | };
1059 |
1060 | switch (projectType) {
1061 | case 'web-app':
1062 | return {
1063 | ...baseScripts,
1064 | start: 'react-scripts start',
1065 | build: 'react-scripts build',
1066 | dev: 'react-scripts start',
1067 | };
1068 | case 'api':
1069 | return {
1070 | ...baseScripts,
1071 | start: 'node src/server.js',
1072 | dev: 'nodemon src/server.js',
1073 | };
1074 | default:
1075 | return baseScripts;
1076 | }
1077 | }
1078 |
1079 | private extractRelevantDependencies(dependencies: any[]): string[] {
1080 | return dependencies
1081 | .filter(dep => dep.type === 'dependency')
1082 | .map(dep => dep.name);
1083 | }
1084 |
1085 | // Additional methods for MCP tool handlers
1086 | async transformCode(code: string, transformations: any[], language: string, target_language?: string, options: any = {}): Promise<any> {
1087 | let transformedCode = code;
1088 | const changes = [];
1089 | const warnings = [];
1090 |
1091 | for (const transformation of transformations) {
1092 | switch (transformation.type) {
1093 | case 'naming':
1094 | const namingResult = await this.transformNamingConvention(transformedCode, transformation.options);
1095 | transformedCode = namingResult.code;
1096 | changes.push(...namingResult.changes);
1097 | break;
1098 | case 'modernize':
1099 | const modernizeResult = await this.modernizeCode(transformedCode, language);
1100 | transformedCode = modernizeResult.refactoredCode;
1101 | changes.push(...modernizeResult.changes);
1102 | break;
1103 | case 'performance':
1104 | warnings.push('Performance optimization not implemented');
1105 | break;
1106 | case 'security':
1107 | warnings.push('Security transformation not implemented');
1108 | break;
1109 | }
1110 | }
1111 |
1112 | return {
1113 | originalCode: code,
1114 | transformedCode,
1115 | changes,
1116 | warnings,
1117 | instructions: ['Review transformed code for correctness'],
1118 | };
1119 | }
1120 |
1121 | async adaptCodeStructure(url: string, target_structure: any, options: any = {}): Promise<any> {
1122 | const repositoryInfo = await this.githubService.getRepositoryInfo(url);
1123 | const adaptedStructure: any = {
1124 | name: `${repositoryInfo.name}-adapted`,
1125 | structure: target_structure,
1126 | files: [],
1127 | changes: [],
1128 | instructions: [],
1129 | };
1130 |
1131 | // Generate adapted structure based on target framework
1132 | if (target_structure.framework === 'react') {
1133 | adaptedStructure.files.push({
1134 | path: 'src/App.jsx',
1135 | content: 'import React from "react";\n\nfunction App() {\n return <div>Hello World</div>;\n}\n\nexport default App;',
1136 | });
1137 | }
1138 |
1139 | return adaptedStructure;
1140 | }
1141 | }
```