#
tokens: 46691/50000 55/55 files
lines: on (toggle) GitHub
raw markdown copy reset
# Directory Structure

```
├── .gitignore
├── .python-version
├── package.json
├── pyproject.toml
├── README.md
├── setup.py
├── src
│   ├── mcp_dev_server
│   │   ├── __init__.py
│   │   ├── __main__.py
│   │   ├── core
│   │   │   ├── __init__.py
│   │   │   └── server.py
│   │   ├── docker
│   │   │   ├── manager.py
│   │   │   ├── streams.py
│   │   │   ├── templates
│   │   │   │   ├── dev.dockerfile
│   │   │   │   ├── node.dockerfile
│   │   │   │   └── python.dockerfile
│   │   │   ├── templates.py
│   │   │   ├── volumes.py
│   │   │   └── xxx.py
│   │   ├── environments
│   │   │   ├── manager.py
│   │   │   ├── tools.py
│   │   │   └── workflow.py
│   │   ├── handlers
│   │   │   ├── __init__.py
│   │   │   └── input_request_handler.py
│   │   ├── managers
│   │   │   ├── __init__.py
│   │   │   ├── base_manager.py
│   │   │   ├── build_manager.py
│   │   │   ├── dependency_manager.py
│   │   │   ├── project_manager.py
│   │   │   ├── template_manager.py
│   │   │   ├── test_manager.py
│   │   │   └── workflow_manager.py
│   │   ├── models
│   │   │   ├── __init__.py
│   │   │   ├── config.py
│   │   │   ├── errors.py
│   │   │   └── input_response.py
│   │   ├── package
│   │   │   └── manager.py
│   │   ├── project_manager
│   │   │   ├── base_project.py
│   │   │   ├── context.py
│   │   │   ├── git.py
│   │   │   ├── manager.py
│   │   │   ├── project_types.py
│   │   │   ├── project.py
│   │   │   └── templates.py
│   │   ├── prompts
│   │   │   ├── handler.py
│   │   │   ├── input_protocol.py
│   │   │   ├── project_templates.py
│   │   │   └── templates.py
│   │   ├── server.py
│   │   ├── test
│   │   │   └── manager.py
│   │   ├── utils
│   │   │   ├── __init__.py
│   │   │   ├── config.py
│   │   │   ├── errors.py
│   │   │   └── logging.py
│   │   └── workflow
│   │       └── manager.py
│   └── resources
│       └── templates
│           └── basic
│               └── files
├── tests
│   └── test_integration.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------

```
1 | 3.12
2 | 
```

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
 1 | # Python
 2 | __pycache__/
 3 | *.py[cod]
 4 | *.so
 5 | .Python
 6 | build/
 7 | develop-eggs/
 8 | dist/
 9 | downloads/
10 | eggs/
11 | .eggs/
12 | lib/
13 | lib64/
14 | parts/
15 | sdist/
16 | var/
17 | wheels/
18 | *.egg-info/
19 | .installed.cfg
20 | *.egg
21 | 
22 | # Virtual environments
23 | .env
24 | .venv
25 | env/
26 | venv/
27 | ENV/
28 | 
29 | # IDE
30 | .idea/
31 | .vscode/
32 | *.swp
33 | *.swo
34 | 
35 | # Project specific
36 | *.log
37 | .docker/
38 | .pytest_cache/
39 | .coverage
40 | htmlcov/
```

--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------

```markdown
 1 | # MCP Development Server
 2 | 
 3 | A Model Context Protocol (MCP) server that enables Claude to manage software development projects, providing complete project context awareness and handling code execution through Docker environments.
 4 | 
 5 | ## Features
 6 | 
 7 | ### Core Infrastructure
 8 | - Project context management
 9 | - File system operations
10 | - Template-based project creation
11 | - Git integration
12 | 
13 | ### Requirements
14 | - Python 3.12 or higher
15 | - Docker
16 | - Git
17 | 
18 | ## Installation
19 | 
20 | ```bash
21 | # Using pip
22 | pip install mcp-dev-server
23 | 
24 | # Development installation
25 | git clone https://github.com/your-org/mcp-dev-server.git
26 | cd mcp-dev-server
27 | pip install -e .
28 | ```
29 | 
30 | ## Configuration
31 | 
32 | ### Claude Desktop Configuration
33 | 
34 | Add to your Claude Desktop configuration file:
35 | 
36 | On MacOS: `~/Library/Application Support/Claude/claude_desktop_config.json`
37 | On Windows: `%APPDATA%/Claude/claude_desktop_config.json`
38 | 
39 | ```json
40 | {
41 |   "mcpServers": {
42 |     "dev": {
43 |       "command": "mcp-dev-server",
44 |       "args": []
45 |     }
46 |   }
47 | }
48 | ```
49 | 
50 | ## Usage
51 | 
52 | The server provides several MCP capabilities:
53 | 
54 | ### Resources
55 | - Project structure and files
56 | - Build status and artifacts
57 | - Test results
58 | - Docker container status
59 | 
60 | ### Tools
61 | - Project initialization
62 | - Build operations
63 | - Test execution
64 | - Docker commands
65 | 
66 | ### Prompts
67 | - Project analysis
68 | - Development suggestions
69 | - Error diagnosis
70 | 
71 | ## Development
72 | 
73 | ### Setting up development environment
74 | 
75 | ```bash
76 | # Create virtual environment
77 | python -m venv .venv
78 | source .venv/bin/activate  # On Windows: .venv\Scripts\activate
79 | 
80 | # Install dependencies
81 | pip install -e ".[dev]"
82 | ```
83 | 
84 | ### Running tests
85 | 
86 | ```bash
87 | pytest tests/
88 | ```
89 | 
90 | ## Contributing
91 | 
92 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
93 | 
94 | ## License
95 | 
96 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/xxx.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/utils/__init__.py:
--------------------------------------------------------------------------------

```python
1 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/core/__init__.py:
--------------------------------------------------------------------------------

```python
1 | from .server import Server
2 | 
3 | __all__ = ['Server']
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/prompts/input_protocol.py:
--------------------------------------------------------------------------------

```python
1 | """Input request protocol for MCP server."""
2 | [Previous content...]
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/handlers/__init__.py:
--------------------------------------------------------------------------------

```python
1 | from .input_request_handler import InputRequestHandler
2 | 
3 | __all__ = ['InputRequestHandler']
```

--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------

```toml
1 | [build-system]
2 | requires = ["setuptools>=45", "wheel"]
3 | build-backend = "setuptools.build_meta"
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/models/errors.py:
--------------------------------------------------------------------------------

```python
1 | class MCPDevServerError(Exception):
2 |     """Base exception class for MCP Development Server errors."""
3 |     pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/__main__.py:
--------------------------------------------------------------------------------

```python
1 | """Main entry point when run with python -m mcp_dev_server"""
2 | from . import main
3 | 
4 | if __name__ == '__main__':
5 |     main()
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/test_manager.py:
--------------------------------------------------------------------------------

```python
1 | class TestManager:
2 |     """Manager class for test-related operations."""
3 |     
4 |     def __init__(self):
5 |         """Initialize the test manager."""
6 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/build_manager.py:
--------------------------------------------------------------------------------

```python
1 | class BuildManager:
2 |     """Manager class for build-related operations."""
3 |     
4 |     def __init__(self):
5 |         """Initialize the build manager."""
6 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/models/__init__.py:
--------------------------------------------------------------------------------

```python
1 | from .config import Config
2 | from .input_response import InputResponse
3 | from .errors import MCPDevServerError
4 | 
5 | __all__ = ['Config', 'InputResponse', 'MCPDevServerError']
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/template_manager.py:
--------------------------------------------------------------------------------

```python
1 | class TemplateManager:
2 |     """Manager class for template-related operations."""
3 |     
4 |     def __init__(self):
5 |         """Initialize the template manager."""
6 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/workflow_manager.py:
--------------------------------------------------------------------------------

```python
1 | class WorkflowManager:
2 |     """Manager class for workflow-related operations."""
3 |     
4 |     def __init__(self):
5 |         """Initialize the workflow manager."""
6 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/dependency_manager.py:
--------------------------------------------------------------------------------

```python
1 | class DependencyManager:
2 |     """Manager class for dependency-related operations."""
3 |     
4 |     def __init__(self):
5 |         """Initialize the dependency manager."""
6 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/project_manager.py:
--------------------------------------------------------------------------------

```python
 1 | from ..models import Config
 2 | 
 3 | class ProjectManager:
 4 |     """Manager class for project-related operations."""
 5 |     
 6 |     def __init__(self, config: Config):
 7 |         """Initialize the project manager.
 8 |         
 9 |         Args:
10 |             config: Server configuration
11 |         """
12 |         self.config = config
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | from .project_manager import ProjectManager
 2 | from .template_manager import TemplateManager
 3 | from .build_manager import BuildManager
 4 | from .dependency_manager import DependencyManager
 5 | from .test_manager import TestManager
 6 | from .workflow_manager import WorkflowManager
 7 | 
 8 | __all__ = [
 9 |     'ProjectManager',
10 |     'TemplateManager',
11 |     'BuildManager',
12 |     'DependencyManager',
13 |     'TestManager',
14 |     'WorkflowManager'
15 | ]
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/managers/base_manager.py:
--------------------------------------------------------------------------------

```python
 1 | """Base manager class with common functionality."""
 2 | import uuid
 3 | from typing import Dict, Any
 4 | 
 5 | class BaseManager:
 6 |     """Base class for all managers."""
 7 |     
 8 |     def _generate_id(self) -> str:
 9 |         """Generate a unique identifier.
10 |         
11 |         Returns:
12 |             str: Unique identifier
13 |         """
14 |         return str(uuid.uuid4())
15 |         
16 |     async def cleanup(self):
17 |         """Clean up resources. Override in subclasses."""
18 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | """MCP Development Server Package."""
 2 | from . import server
 3 | import asyncio
 4 | from typing import Optional
 5 | from .utils.logging import setup_logging
 6 | 
 7 | logger = setup_logging(__name__)
 8 | 
 9 | def main():
10 |     """Main entry point for the package."""
11 |     try:
12 |         server_instance = server.MCPDevServer()
13 |         asyncio.run(server_instance.run())
14 |     except KeyboardInterrupt:
15 |         logger.info("Server shutdown requested")
16 |     except Exception as e:
17 |         logger.error(f"Server error: {str(e)}")
18 |         raise
19 | 
20 | # Expose key components at package level
21 | __all__ = ['main', 'server']
```

--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------

```python
 1 | from setuptools import setup, find_packages
 2 | 
 3 | setup(
 4 |     name="mcp-dev-server",
 5 |     version="0.1.0",
 6 |     packages=find_packages(where="src"),
 7 |     package_dir={"": "src"},
 8 |     install_requires=[
 9 |         "mcp",            # Base MCP package
10 |         "aiohttp>=3.8.0",
11 |         "websockets>=10.0",
12 |         "uvicorn>=0.15.0",
13 |         "fastapi>=0.68.0",
14 |         "typing_extensions>=4.5.0",
15 |     ],
16 |     entry_points={
17 |         "console_scripts": [
18 |             "mcp-dev-server=mcp_dev_server:main",
19 |         ],
20 |     },
21 |     python_requires=">=3.8",
22 |     author="Your Name",
23 |     description="MCP Development Server"
24 | )
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/models/input_response.py:
--------------------------------------------------------------------------------

```python
 1 | from typing import Any, Dict
 2 | 
 3 | class InputResponse:
 4 |     """Class representing a user's input response."""
 5 |     
 6 |     def __init__(self, request_id: str, values: Dict[str, Any]):
 7 |         """Initialize an input response.
 8 |         
 9 |         Args:
10 |             request_id: ID of the input request
11 |             values: Dictionary of input values
12 |         """
13 |         self.request_id = request_id
14 |         self.values = values
15 |         
16 |     def validate(self) -> bool:
17 |         """Validate the input response.
18 |         
19 |         Returns:
20 |             bool: True if valid, False otherwise
21 |         """
22 |         return True  # TODO: Implement validation
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/utils/errors.py:
--------------------------------------------------------------------------------

```python
 1 | """Error definitions for MCP Development Server."""
 2 | 
 3 | class MCPDevServerError(Exception):
 4 |     """Base error class for MCP Development Server."""
 5 |     pass
 6 | 
 7 | class ProjectError(MCPDevServerError):
 8 |     """Project-related errors."""
 9 |     pass
10 | 
11 | class BuildError(MCPDevServerError):
12 |     """Build-related errors."""
13 |     pass
14 | 
15 | class TestError(MCPDevServerError):
16 |     """Test-related errors."""
17 |     pass
18 | 
19 | class EnvironmentError(MCPDevServerError):
20 |     """Environment-related errors."""
21 |     pass
22 | 
23 | class ConfigurationError(MCPDevServerError):
24 |     """Configuration-related errors."""
25 |     pass
26 | 
27 | class WorkflowError(MCPDevServerError):
28 |     """Workflow-related errors."""
29 |     pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/templates/node.dockerfile:
--------------------------------------------------------------------------------

```dockerfile
 1 | # Node.js development environment
 2 | FROM node:{{ node_version }}
 3 | 
 4 | # Install system dependencies
 5 | RUN apt-get update && apt-get install -y \
 6 |     git \
 7 |     curl \
 8 |     && rm -rf /var/lib/apt/lists/*
 9 | 
10 | # Set working directory
11 | WORKDIR /workspace
12 | 
13 | {% if package_file %}
14 | # Install Node.js dependencies
15 | COPY {{ package_file }} .
16 | {% if package_lock %}
17 | COPY {{ package_lock }} .
18 | RUN npm ci
19 | {% else %}
20 | RUN npm install
21 | {% endif %}
22 | {% endif %}
23 | 
24 | {% if global_packages %}
25 | # Install global packages
26 | RUN npm install -g {% for package in global_packages %}{{ package }} {% endfor %}
27 | {% endif %}
28 | 
29 | # Set Node.js environment variables
30 | ENV NODE_ENV=development
31 | 
32 | {% if command %}
33 | # Default command
34 | CMD {{ command }}
35 | {% endif %}
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/models/config.py:
--------------------------------------------------------------------------------

```python
 1 | class Config:
 2 |     """Configuration class for MCP Development Server."""
 3 |     
 4 |     def __init__(self):
 5 |         """Initialize configuration with default values."""
 6 |         self.host = "localhost"
 7 |         self.port = 8000
 8 |         self.debug = False
 9 |         
10 |     def load_from_file(self, file_path: str):
11 |         """Load configuration from a file.
12 |         
13 |         Args:
14 |             file_path: Path to configuration file
15 |         """
16 |         pass  # TODO: Implement configuration loading
17 |         
18 |     def save_to_file(self, file_path: str):
19 |         """Save current configuration to a file.
20 |         
21 |         Args:
22 |             file_path: Path to save configuration
23 |         """
24 |         pass  # TODO: Implement configuration saving
```

--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "name": "mcp-dev-server",
 3 |   "version": "1.0.0",
 4 |   "description": "Model Context Protocol Development Server",
 5 |   "main": "dist/app.js",
 6 |   "scripts": {
 7 |     "start": "node dist/app.js",
 8 |     "dev": "nodemon src/app.ts",
 9 |     "build": "tsc",
10 |     "test": "jest",
11 |     "lint": "eslint . --ext .ts"
12 |   },
13 |   "dependencies": {
14 |     "express": "^4.18.2",
15 |     "typescript": "^5.0.0",
16 |     "mongoose": "^7.0.0",
17 |     "dotenv": "^16.0.0",
18 |     "winston": "^3.8.0",
19 |     "cors": "^2.8.5",
20 |     "helmet": "^6.0.0",
21 |     "joi": "^17.0.0"
22 |   },
23 |   "devDependencies": {
24 |     "@types/express": "^4.17.17",
25 |     "@types/node": "^18.0.0",
26 |     "@types/jest": "^29.0.0",
27 |     "@typescript-eslint/eslint-plugin": "^5.0.0",
28 |     "@typescript-eslint/parser": "^5.0.0",
29 |     "eslint": "^8.0.0",
30 |     "jest": "^29.0.0",
31 |     "nodemon": "^2.0.0",
32 |     "ts-jest": "^29.0.0",
33 |     "ts-node": "^10.0.0"
34 |   }
35 | }
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/templates/python.dockerfile:
--------------------------------------------------------------------------------

```dockerfile
 1 | # Python development environment
 2 | FROM python:{{ python_version }}-slim
 3 | 
 4 | # Install system dependencies
 5 | RUN apt-get update && apt-get install -y \
 6 |     git \
 7 |     curl \
 8 |     build-essential \
 9 |     && rm -rf /var/lib/apt/lists/*
10 | 
11 | # Set working directory
12 | WORKDIR /workspace
13 | 
14 | {% if install_poetry %}
15 | # Install Poetry
16 | RUN curl -sSL https://install.python-poetry.org | python3 -
17 | ENV PATH="/root/.local/bin:$PATH"
18 | {% endif %}
19 | 
20 | {% if requirements_file %}
21 | # Install Python dependencies
22 | COPY {{ requirements_file }} .
23 | RUN pip install -r {{ requirements_file }}
24 | {% endif %}
25 | 
26 | {% if additional_packages %}
27 | # Install additional packages
28 | RUN pip install {% for package in additional_packages %}{{ package }} {% endfor %}
29 | {% endif %}
30 | 
31 | # Set Python environment variables
32 | ENV PYTHONUNBUFFERED=1 \
33 |     PYTHONDONTWRITEBYTECODE=1
34 | 
35 | {% if command %}
36 | # Default command
37 | CMD {{ command }}
38 | {% endif %}
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/utils/logging.py:
--------------------------------------------------------------------------------

```python
 1 | """Logging configuration for MCP Development Server."""
 2 | import logging
 3 | import sys
 4 | from typing import Optional
 5 | 
 6 | def setup_logging(name: Optional[str] = None, level: int = logging.INFO) -> logging.Logger:
 7 |     """Setup logging configuration.
 8 |     
 9 |     Args:
10 |         name: Logger name
11 |         level: Logging level
12 |         
13 |     Returns:
14 |         logging.Logger: Configured logger instance
15 |     """
16 |     # Create logger
17 |     logger = logging.getLogger(name or __name__)
18 |     logger.setLevel(level)
19 |     
20 |     # Create stderr handler (MCP protocol requires clean stdout)
21 |     handler = logging.StreamHandler(sys.stderr)
22 |     handler.setLevel(level)
23 |     
24 |     # Create formatter
25 |     formatter = logging.Formatter(
26 |         '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
27 |     )
28 |     handler.setFormatter(formatter)
29 |     
30 |     # Add handler to logger
31 |     logger.addHandler(handler)
32 |     
33 |     return logger
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/handlers/input_request_handler.py:
--------------------------------------------------------------------------------

```python
 1 | from typing import Dict, Any, Optional
 2 | from ..models import InputResponse
 3 | 
 4 | class InputRequestHandler:
 5 |     """Handler for input requests."""
 6 |     
 7 |     def __init__(self):
 8 |         """Initialize the input request handler."""
 9 |         pass
10 |         
11 |     async def request_input(self, request_type: str, context: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
12 |         """Request input from the user.
13 |         
14 |         Args:
15 |             request_type: Type of input request
16 |             context: Additional context for request
17 |             
18 |         Returns:
19 |             Dict[str, Any]: User's input values
20 |         """
21 |         return {}  # TODO: Implement input request handling
22 |         
23 |     def handle_response(self, response: InputResponse):
24 |         """Handle input response from user.
25 |         
26 |         Args:
27 |             response: User's response
28 |         """
29 |         pass  # TODO: Implement response handling
```

--------------------------------------------------------------------------------
/tests/test_integration.py:
--------------------------------------------------------------------------------

```python
 1 | """Test MCP server integration with Claude."""
 2 | import asyncio
 3 | import pytest
 4 | from mcp_dev_server.server import MCPDevServer
 5 | from mcp_dev_server.utils.config import Config
 6 | 
 7 | @pytest.mark.asyncio
 8 | async def test_server_initialization():
 9 |     """Test server initialization."""
10 |     config = Config()
11 |     server = MCPDevServer()
12 |     
13 |     # Test project creation
14 |     project = await server.project_manager.create_project(
15 |         name="test-project",
16 |         project_type="python",
17 |         project_config={
18 |             "python_version": "3.12",
19 |             "project_type": "fastapi",
20 |             "dependency_management": "poetry"
21 |         }
22 |     )
23 |     
24 |     assert project is not None
25 |     assert project.config["name"] == "test-project"
26 |     
27 |     # Test tool execution
28 |     result = await server.handle_call_tool("build", {
29 |         "environment": "default",
30 |         "command": "build"
31 |     })
32 |     
33 |     assert result[0].type == "text"
34 |     
35 |     # Cleanup
36 |     await server.cleanup()
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/templates.py:
--------------------------------------------------------------------------------

```python
 1 | """Dockerfile templates for different environments."""
 2 | from typing import Dict, Optional
 3 | from jinja2 import Template
 4 | 
 5 | class DockerTemplates:
 6 |     """Manages Dockerfile templates for different environments."""
 7 |     
 8 |     @staticmethod
 9 |     def get_template(environment: str, config: Optional[Dict[str, Any]] = None) -> str:
10 |         """Get Dockerfile template for specific environment."""
11 |         config = config or {}
12 |         
13 |         if environment == "python":
14 |             return Template("""
15 | FROM python:{{ python_version|default('3.12-slim') }}
16 | 
17 | WORKDIR /app
18 | 
19 | {% if requirements_file %}
20 | COPY {{ requirements_file }} .
21 | RUN pip install -r {{ requirements_file }}
22 | {% endif %}
23 | 
24 | {% if install_dev_deps %}
25 | RUN pip install pytest mypy black
26 | {% endif %}
27 | 
28 | {% for cmd in additional_commands|default([]) %}
29 | RUN {{ cmd }}
30 | {% endfor %}
31 | 
32 | COPY . .
33 | 
34 | CMD ["python", "{{ entry_point|default('main.py') }}"]
35 | """).render(config)
36 |             
37 |         elif environment == "node":
38 |             return Template("""
39 | FROM node:{{ node_version|default('20-slim') }}
40 | 
41 | WORKDIR /app
42 | 
43 | COPY package*.json ./
44 | 
45 | RUN npm install {% if install_dev_deps %}--include=dev{% endif %}
46 | 
47 | {% for cmd in additional_commands|default([]) %}
48 | RUN {{ cmd }}
49 | {% endfor %}
50 | 
51 | COPY . .
52 | 
53 | CMD ["npm", "{{ npm_command|default('start') }}"]
54 | """).render(config)
55 |             
56 |         else:
57 |             raise ValueError(f"Unknown environment: {environment}")
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/templates/dev.dockerfile:
--------------------------------------------------------------------------------

```dockerfile
 1 | # Multi-language development environment
 2 | FROM ubuntu:{{ ubuntu_version }}
 3 | 
 4 | # Install system dependencies
 5 | RUN apt-get update && apt-get install -y \
 6 |     git \
 7 |     curl \
 8 |     build-essential \
 9 |     software-properties-common \
10 |     && rm -rf /var/lib/apt/lists/*
11 | 
12 | {% if install_python %}
13 | # Install Python
14 | RUN add-apt-repository ppa:deadsnakes/ppa && \
15 |     apt-get update && \
16 |     apt-get install -y python{{ python_version }} python{{ python_version }}-venv python{{ python_version }}-dev && \
17 |     rm -rf /var/lib/apt/lists/*
18 | {% endif %}
19 | 
20 | {% if install_node %}
21 | # Install Node.js
22 | RUN curl -fsSL https://deb.nodesource.com/setup_{{ node_version }}.x | bash - && \
23 |     apt-get install -y nodejs && \
24 |     rm -rf /var/lib/apt/lists/*
25 | {% endif %}
26 | 
27 | {% if install_docker %}
28 | # Install Docker
29 | RUN curl -fsSL https://get.docker.com | sh && \
30 |     rm -rf /var/lib/apt/lists/*
31 | {% endif %}
32 | 
33 | # Set working directory
34 | WORKDIR /workspace
35 | 
36 | {% if requirements_file %}
37 | # Install Python dependencies
38 | COPY {{ requirements_file }} .
39 | RUN pip{{ python_version }} install -r {{ requirements_file }}
40 | {% endif %}
41 | 
42 | {% if package_file %}
43 | # Install Node.js dependencies
44 | COPY {{ package_file }} .
45 | {% if package_lock %}
46 | COPY {{ package_lock }} .
47 | RUN npm ci
48 | {% else %}
49 | RUN npm install
50 | {% endif %}
51 | {% endif %}
52 | 
53 | {% if additional_tools %}
54 | # Install additional tools
55 | RUN apt-get update && apt-get install -y \
56 |     {% for tool in additional_tools %}{{ tool }} {% endfor %} \
57 |     && rm -rf /var/lib/apt/lists/*
58 | {% endif %}
59 | 
60 | # Set environment variables
61 | ENV PYTHONUNBUFFERED=1 \
62 |     PYTHONDONTWRITEBYTECODE=1 \
63 |     NODE_ENV=development
64 | 
65 | {% if command %}
66 | # Default command
67 | CMD {{ command }}
68 | {% endif %}
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/prompts/handler.py:
--------------------------------------------------------------------------------

```python
 1 | [Previous handler.py content...]
 2 | 
 3 |     async def process_field_dependencies(self, request: InputRequest, field_updates: Dict[str, Any]):
 4 |         """Process field dependencies based on user input.
 5 |         
 6 |         Some fields might need to be updated based on values of other fields.
 7 |         For example, if user selects Python as language, we need to show Python version field.
 8 |         
 9 |         Args:
10 |             request: Current input request
11 |             field_updates: Updated field values
12 |         """
13 |         try:
14 |             if request.request_id == "environment_setup":
15 |                 language = field_updates.get("language")
16 |                 if language:
17 |                     # Update required fields based on language selection
18 |                     for field in request.fields:
19 |                         if field.name == "python_version":
20 |                             field.required = language in ["python", "both"]
21 |                         elif field.name == "node_version":
22 |                             field.required = language in ["node", "both"]
23 |                             
24 |             elif request.request_id == "test_configuration":
25 |                 test_framework = field_updates.get("test_framework")
26 |                 if test_framework:
27 |                     # Update coverage options based on test framework
28 |                     for field in request.fields:
29 |                         if field.name == "include_coverage":
30 |                             field.options = self._get_coverage_options(test_framework)
31 |                             
32 |     def _get_coverage_options(self, framework: str) -> List[Dict[str, str]]:
33 |         """Get coverage tool options based on test framework."""
34 |         coverage_tools = {
35 |             "pytest": [
36 |                 {"value": "pytest-cov", "label": "pytest-cov"},
37 |                 {"value": "coverage", "label": "coverage.py"}
38 |             ],
39 |             "unittest": [
40 |                 {"value": "coverage", "label": "coverage.py"}
41 |             ],
42 |             "jest": [
43 |                 {"value": "jest-coverage", "label": "Jest Coverage"}
44 |             ],
45 |             "mocha": [
46 |                 {"value": "nyc", "label": "Istanbul/nyc"}
47 |             ]
48 |         }
49 |         return coverage_tools.get(framework, [])
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/utils/config.py:
--------------------------------------------------------------------------------

```python
 1 | """Configuration management for MCP Development Server."""
 2 | import os
 3 | import json
 4 | from typing import Dict, Any, Optional
 5 | from pathlib import Path
 6 | 
 7 | class Config:
 8 |     """Configuration manager."""
 9 |     
10 |     def __init__(self):
11 |         """Initialize configuration."""
12 |         self.config_dir = self._get_config_dir()
13 |         self.config_file = self.config_dir / "config.json"
14 |         self.config: Dict[str, Any] = self._load_config()
15 |         
16 |     def _get_config_dir(self) -> Path:
17 |         """Get configuration directory path."""
18 |         if os.name == "nt":  # Windows
19 |             config_dir = Path(os.getenv("APPDATA")) / "Claude"
20 |         else:  # macOS/Linux
21 |             config_dir = Path.home() / ".config" / "claude"
22 |             
23 |         config_dir.mkdir(parents=True, exist_ok=True)
24 |         return config_dir
25 |         
26 |     def _load_config(self) -> Dict[str, Any]:
27 |         """Load configuration from file."""
28 |         if self.config_file.exists():
29 |             try:
30 |                 with open(self.config_file, "r") as f:
31 |                     return json.load(f)
32 |             except Exception as e:
33 |                 print(f"Error loading config: {e}")
34 |                 return self._get_default_config()
35 |         else:
36 |             config = self._get_default_config()
37 |             self._save_config(config)
38 |             return config
39 |             
40 |     def _save_config(self, config: Dict[str, Any]):
41 |         """Save configuration to file."""
42 |         try:
43 |             with open(self.config_file, "w") as f:
44 |                 json.dump(config, f, indent=2)
45 |         except Exception as e:
46 |             print(f"Error saving config: {e}")
47 |             
48 |     def _get_default_config(self) -> Dict[str, Any]:
49 |         """Get default configuration."""
50 |         return {
51 |             "projectsDir": str(Path.home() / "Projects"),
52 |             "templatesDir": str(self.config_dir / "templates"),
53 |             "environments": {
54 |                 "default": {
55 |                     "type": "docker",
56 |                     "image": "python:3.12-slim"
57 |                 }
58 |             }
59 |         }
60 |         
61 |     def get(self, key: str, default: Any = None) -> Any:
62 |         """Get configuration value."""
63 |         return self.config.get(key, default)
64 |         
65 |     def set(self, key: str, value: Any):
66 |         """Set configuration value."""
67 |         self.config[key] = value
68 |         self._save_config(self.config)
69 |         
70 |     def update(self, updates: Dict[str, Any]):
71 |         """Update multiple configuration values."""
72 |         self.config.update(updates)
73 |         self._save_config(self.config)
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/volumes.py:
--------------------------------------------------------------------------------

```python
 1 | """Docker volume management for MCP Development Server."""
 2 | from typing import Dict, List, Optional
 3 | import docker
 4 | from docker.errors import DockerException
 5 | 
 6 | from ..utils.logging import setup_logging
 7 | from ..utils.errors import DockerError
 8 | 
 9 | logger = setup_logging(__name__)
10 | 
11 | class VolumeManager:
12 |     """Manages Docker volumes for development environments."""
13 |     
14 |     def __init__(self):
15 |         self.client = docker.from_env()
16 |         
17 |     async def create_volume(
18 |         self,
19 |         name: str,
20 |         labels: Optional[Dict[str, str]] = None
21 |     ) -> str:
22 |         """Create a Docker volume."""
23 |         try:
24 |             volume = self.client.volumes.create(
25 |                 name=name,
26 |                 driver='local',
27 |                 labels=labels or {}
28 |             )
29 |             logger.info(f"Created volume: {name}")
30 |             return volume.name
31 |             
32 |         except DockerException as e:
33 |             raise DockerError(f"Failed to create volume: {str(e)}")
34 |             
35 |     async def remove_volume(self, name: str) -> None:
36 |         """Remove a Docker volume."""
37 |         try:
38 |             volume = self.client.volumes.get(name)
39 |             volume.remove()
40 |             logger.info(f"Removed volume: {name}")
41 |             
42 |         except DockerException as e:
43 |             raise DockerError(f"Failed to remove volume: {str(e)}")
44 |             
45 |     async def list_volumes(
46 |         self,
47 |         filters: Optional[Dict[str, str]] = None
48 |     ) -> List[Dict[str, Any]]:
49 |         """List Docker volumes."""
50 |         try:
51 |             volumes = self.client.volumes.list(filters=filters or {})
52 |             return [
53 |                 {
54 |                     "name": v.name,
55 |                     "driver": v.attrs['Driver'],
56 |                     "mountpoint": v.attrs['Mountpoint'],
57 |                     "labels": v.attrs['Labels'] or {}
58 |                 }
59 |                 for v in volumes
60 |             ]
61 |             
62 |         except DockerException as e:
63 |             raise DockerError(f"Failed to list volumes: {str(e)}")
64 |             
65 |     async def get_volume_info(self, name: str) -> Dict[str, Any]:
66 |         """Get detailed information about a volume."""
67 |         try:
68 |             volume = self.client.volumes.get(name)
69 |             return {
70 |                 "name": volume.name,
71 |                 "driver": volume.attrs['Driver'],
72 |                 "mountpoint": volume.attrs['Mountpoint'],
73 |                 "labels": volume.attrs['Labels'] or {},
74 |                 "scope": volume.attrs['Scope'],
75 |                 "status": volume.attrs.get('Status', {})
76 |             }
77 |             
78 |         except DockerException as e:
79 |             raise DockerError(f"Failed to get volume info: {str(e)}")
80 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/package/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Package management integration for MCP Development Server."""
  2 | 
  3 | from typing import Dict, List, Optional, Any
  4 | from enum import Enum
  5 | from ..utils.errors import PackageError
  6 | from ..utils.logging import setup_logging
  7 | 
  8 | logger = setup_logging(__name__)
  9 | 
 10 | class PackageManager(str, Enum):
 11 |     """Supported package managers."""
 12 |     NPM = "npm"
 13 |     PIP = "pip"
 14 |     CARGO = "cargo"
 15 | 
 16 | class DependencyManager:
 17 |     """Manages project dependencies."""
 18 |     
 19 |     def __init__(self, env_manager):
 20 |         self.env_manager = env_manager
 21 |         
 22 |     async def install_dependencies(
 23 |         self,
 24 |         environment: str,
 25 |         package_manager: PackageManager,
 26 |         dependencies: List[str],
 27 |         dev: bool = False
 28 |     ) -> Dict[str, Any]:
 29 |         """Install project dependencies."""
 30 |         try:
 31 |             command = self._build_install_command(
 32 |                 package_manager,
 33 |                 dependencies,
 34 |                 dev
 35 |             )
 36 |             
 37 |             result = await self.env_manager.execute_in_environment(
 38 |                 environment,
 39 |                 command
 40 |             )
 41 |             
 42 |             return {
 43 |                 "success": result["exit_code"] == 0,
 44 |                 "output": result["output"],
 45 |                 "error": result.get("error")
 46 |             }
 47 |             
 48 |         except Exception as e:
 49 |             raise PackageError(f"Failed to install dependencies: {str(e)}")
 50 |             
 51 |     async def update_dependencies(
 52 |         self,
 53 |         environment: str,
 54 |         package_manager: PackageManager,
 55 |         dependencies: Optional[List[str]] = None
 56 |     ) -> Dict[str, Any]:
 57 |         """Update project dependencies."""
 58 |         try:
 59 |             command = self._build_update_command(package_manager, dependencies)
 60 |             
 61 |             result = await self.env_manager.execute_in_environment(
 62 |                 environment,
 63 |                 command
 64 |             )
 65 |             
 66 |             return {
 67 |                 "success": result["exit_code"] == 0,
 68 |                 "output": result["output"],
 69 |                 "error": result.get("error")
 70 |             }
 71 |             
 72 |         except Exception as e:
 73 |             raise PackageError(f"Failed to update dependencies: {str(e)}")
 74 |             
 75 |     def _build_install_command(
 76 |         self,
 77 |         package_manager: PackageManager,
 78 |         dependencies: List[str],
 79 |         dev: bool
 80 |     ) -> str:
 81 |         """Build dependency installation command."""
 82 |         if package_manager == PackageManager.NPM:
 83 |             dev_flag = "--save-dev" if dev else ""
 84 |             deps = " ".join(dependencies)
 85 |             return f"npm install {dev_flag} {deps}"
 86 |             
 87 |         elif package_manager == PackageManager.PIP:
 88 |             dev_flag = "-D" if dev else ""
 89 |             deps = " ".join(dependencies)
 90 |             return f"pip install {dev_flag} {deps}"
 91 |             
 92 |         elif package_manager == PackageManager.CARGO:
 93 |             dev_flag = "--dev" if dev else ""
 94 |             deps = " ".join(dependencies)
 95 |             return f"cargo add {dev_flag} {deps}"
 96 |             
 97 |         else:
 98 |             raise PackageError(f"Unsupported package manager: {package_manager}")
 99 |             
100 |     def _build_update_command(
101 |         self,
102 |         package_manager: PackageManager,
103 |         dependencies: Optional[List[str]] = None
104 |     ) -> str:
105 |         """Build dependency update command."""
106 |         if package_manager == PackageManager.NPM:
107 |             return "npm update" if not dependencies else f"npm update {' '.join(dependencies)}"
108 |             
109 |         elif package_manager == PackageManager.PIP:
110 |             return "pip install -U -r requirements.txt" if not dependencies else f"pip install -U {' '.join(dependencies)}"
111 |             
112 |         elif package_manager == PackageManager.CARGO:
113 |             return "cargo update" if not dependencies else f"cargo update {' '.join(dependencies)}"
114 |             
115 |         else:
116 |             raise PackageError(f"Unsupported package manager: {package_manager}")
117 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/test/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Test system integration for MCP Development Server."""
  2 | 
  3 | import asyncio
  4 | from typing import Dict, List, Optional, Any
  5 | from enum import Enum
  6 | from datetime import datetime
  7 | from ..utils.errors import TestError
  8 | from ..utils.logging import setup_logging
  9 | 
 10 | logger = setup_logging(__name__)
 11 | 
 12 | class TestStatus(str, Enum):
 13 |     """Test execution status."""
 14 |     PENDING = "pending"
 15 |     RUNNING = "running"
 16 |     SUCCESS = "success"
 17 |     FAILED = "failed"
 18 |     ERROR = "error"
 19 | 
 20 | class TestManager:
 21 |     """Manages test execution and reporting."""
 22 |     
 23 |     def __init__(self, env_manager):
 24 |         self.env_manager = env_manager
 25 |         self.test_runs: Dict[str, Dict[str, Any]] = {}
 26 |         
 27 |     async def run_tests(
 28 |         self,
 29 |         environment: str,
 30 |         config: Dict[str, Any]
 31 |     ) -> str:
 32 |         """Start a test run."""
 33 |         try:
 34 |             test_id = f"test_{len(self.test_runs)}"
 35 |             
 36 |             # Initialize test run
 37 |             self.test_runs[test_id] = {
 38 |                 "environment": environment,
 39 |                 "config": config,
 40 |                 "status": TestStatus.PENDING,
 41 |                 "results": [],
 42 |                 "start_time": datetime.now(),
 43 |                 "end_time": None
 44 |             }
 45 |             
 46 |             # Start test execution
 47 |             asyncio.create_task(self._execute_tests(test_id))
 48 |             
 49 |             return test_id
 50 |             
 51 |         except Exception as e:
 52 |             raise TestError(f"Failed to start tests: {str(e)}")
 53 |             
 54 |     async def _execute_tests(self, test_id: str) -> None:
 55 |         """Execute test suite."""
 56 |         try:
 57 |             test_run = self.test_runs[test_id]
 58 |             test_run["status"] = TestStatus.RUNNING
 59 |             
 60 |             # Run test command
 61 |             result = await self.env_manager.execute_in_environment(
 62 |                 test_run["environment"],
 63 |                 test_run["config"].get("command", "npm test"),
 64 |                 workdir=test_run["config"].get("workdir")
 65 |             )
 66 |             
 67 |             # Parse and store results
 68 |             test_run["results"] = self._parse_test_output(
 69 |                 result["output"],
 70 |                 test_run["config"].get("format", "jest")
 71 |             )
 72 |             
 73 |             # Update test status
 74 |             test_run["end_time"] = datetime.now()
 75 |             test_run["status"] = (
 76 |                 TestStatus.SUCCESS
 77 |                 if result["exit_code"] == 0
 78 |                 else TestStatus.FAILED
 79 |             )
 80 |             
 81 |         except Exception as e:
 82 |             logger.error(f"Test execution error: {str(e)}")
 83 |             test_run["status"] = TestStatus.ERROR
 84 |             test_run["error"] = str(e)
 85 |             
 86 |     async def get_test_status(self, test_id: str) -> Dict[str, Any]:
 87 |         """Get status and results of a test run."""
 88 |         if test_run := self.test_runs.get(test_id):
 89 |             return {
 90 |                 "id": test_id,
 91 |                 "status": test_run["status"],
 92 |                 "results": test_run["results"],
 93 |                 "start_time": test_run["start_time"],
 94 |                 "end_time": test_run["end_time"],
 95 |                 "error": test_run.get("error")
 96 |             }
 97 |         raise TestError(f"Test run not found: {test_id}")
 98 |         
 99 |     def _parse_test_output(
100 |         self,
101 |         output: str,
102 |         format: str
103 |     ) -> List[Dict[str, Any]]:
104 |         """Parse test output into structured results."""
105 |         if format == "jest":
106 |             return self._parse_jest_output(output)
107 |         elif format == "pytest":
108 |             return self._parse_pytest_output(output)
109 |         else:
110 |             logger.warning(f"Unknown test output format: {format}")
111 |             return [{"raw_output": output}]
112 |             
113 |     def _parse_jest_output(self, output: str) -> List[Dict[str, Any]]:
114 |         """Parse Jest test output."""
115 |         results = []
116 |         # Implement Jest output parsing
117 |         return results
118 |         
119 |     def _parse_pytest_output(self, output: str) -> List[Dict[str, Any]]:
120 |         """Parse pytest output."""
121 |         results = []
122 |         # Implement pytest output parsing
123 |         return results
124 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/server.py:
--------------------------------------------------------------------------------

```python
  1 | """MCP Development Server implementation."""
  2 | from typing import Dict, Any, Optional, Sequence
  3 | import logging
  4 | import sys
  5 | import json
  6 | 
  7 | # Import MCP components
  8 | from mcp.server import Server as MCPServer
  9 | from mcp.server.stdio import stdio_server
 10 | import mcp.types as types
 11 | 
 12 | from .models import Config, InputResponse, MCPDevServerError
 13 | from .managers import (
 14 |     ProjectManager, 
 15 |     TemplateManager,
 16 |     BuildManager,
 17 |     DependencyManager,
 18 |     TestManager,
 19 |     WorkflowManager
 20 | )
 21 | from .handlers import InputRequestHandler
 22 | 
 23 | # Configure logging to stderr to keep stdout clean
 24 | logger = logging.getLogger(__name__)
 25 | handler = logging.StreamHandler(sys.stderr)
 26 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 27 | handler.setFormatter(formatter)
 28 | logger.addHandler(handler)
 29 | logger.setLevel(logging.DEBUG)  # Set to DEBUG for development
 30 | 
 31 | class MCPDevServer:
 32 |     """MCP Development Server implementation."""
 33 |     
 34 |     def __init__(self):
 35 |         """Initialize the MCP Development Server."""
 36 |         logger.info("Initializing MCP Development Server")
 37 |         
 38 |         try:
 39 |             # Initialize server
 40 |             self.server = MCPServer("mcp-dev-server")
 41 |             
 42 |             # Initialize configuration
 43 |             self.config = Config()
 44 |             
 45 |             # Initialize all managers
 46 |             self.project_manager = ProjectManager(self.config)
 47 |             self.template_manager = TemplateManager()
 48 |             self.build_manager = BuildManager()
 49 |             self.dependency_manager = DependencyManager()
 50 |             self.test_manager = TestManager()
 51 |             self.workflow_manager = WorkflowManager()
 52 |             self.input_handler = InputRequestHandler()
 53 |             
 54 |             # Setup request handlers
 55 |             self._setup_resource_handlers()
 56 |             self._setup_tool_handlers()
 57 |             self._setup_prompt_handlers()
 58 |             
 59 |             logger.info("Server initialization completed successfully")
 60 |             
 61 |         except Exception as e:
 62 |             logger.error(f"Failed to initialize server: {e}")
 63 |             raise
 64 | 
 65 |     def _setup_resource_handlers(self):
 66 |         """Set up resource request handlers."""
 67 |         @self.server.list_resources()
 68 |         async def list_resources() -> list[types.Resource]:
 69 |             """List available resources."""
 70 |             logger.debug("Listing resources")
 71 |             return []
 72 | 
 73 |         @self.server.read_resource()
 74 |         async def read_resource(uri: str) -> str:
 75 |             """Read resource content."""
 76 |             logger.debug(f"Reading resource: {uri}")
 77 |             return ""
 78 | 
 79 |     def _setup_tool_handlers(self):
 80 |         """Set up tool request handlers."""
 81 |         @self.server.list_tools()
 82 |         async def list_tools() -> list[types.Tool]:
 83 |             """List available tools."""
 84 |             logger.debug("Listing tools")
 85 |             return []
 86 | 
 87 |         @self.server.call_tool()
 88 |         async def call_tool(name: str, arguments: Dict[str, Any]) -> Sequence[types.TextContent]:
 89 |             """Execute a tool."""
 90 |             logger.debug(f"Calling tool {name} with arguments {arguments}")
 91 |             return [types.TextContent(type="text", text="Tool execution result")]
 92 | 
 93 |     def _setup_prompt_handlers(self):
 94 |         """Set up prompt request handlers."""
 95 |         @self.server.list_prompts()
 96 |         async def list_prompts() -> list[types.Prompt]:
 97 |             """List available prompts."""
 98 |             logger.debug("Listing prompts")
 99 |             return []
100 | 
101 |     async def run(self):
102 |         """Run the MCP Development Server."""
103 |         try:
104 |             logger.info(f"Starting {self.server.name}...")
105 |             
106 |             # Use stdio transport
107 |             async with stdio_server() as streams:
108 |                 logger.info("Using stdio transport")
109 |                 await self.server.run(
110 |                     streams[0],  # read stream
111 |                     streams[1],  # write stream
112 |                     self.server.create_initialization_options(),
113 |                     raise_exceptions=True  # Enable for debugging
114 |                 )
115 |                 
116 |         except Exception as e:
117 |             logger.error(f"Server error: {str(e)}")
118 |             raise MCPDevServerError(f"Server error: {str(e)}")
119 | 
120 |         finally:
121 |             logger.info("Server shutdown")
122 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/prompts/templates.py:
--------------------------------------------------------------------------------

```python
  1 | """Input request templates for common scenarios."""
  2 | from typing import Dict
  3 | from .input_protocol import InputRequest, InputField
  4 | 
  5 | ENVIRONMENT_SETUP = InputRequest(
  6 |     request_id="environment_setup",
  7 |     title="Setup Development Environment",
  8 |     description="Configure your development environment",
  9 |     fields=[
 10 |         InputField(
 11 |             name="language",
 12 |             type="select",
 13 |             description="Primary programming language",
 14 |             options=[
 15 |                 {"value": "python", "label": "Python"},
 16 |                 {"value": "node", "label": "Node.js"},
 17 |                 {"value": "both", "label": "Python & Node.js"}
 18 |             ]
 19 |         ),
 20 |         InputField(
 21 |             name="python_version",
 22 |             type="select",
 23 |             description="Python version",
 24 |             options=[
 25 |                 {"value": "3.12", "label": "Python 3.12"},
 26 |                 {"value": "3.11", "label": "Python 3.11"},
 27 |                 {"value": "3.10", "label": "Python 3.10"}
 28 |             ],
 29 |             required=False
 30 |         ),
 31 |         InputField(
 32 |             name="node_version",
 33 |             type="select",
 34 |             description="Node.js version",
 35 |             options=[
 36 |                 {"value": "20", "label": "Node.js 20 LTS"},
 37 |                 {"value": "18", "label": "Node.js 18 LTS"}
 38 |             ],
 39 |             required=False
 40 |         ),
 41 |         InputField(
 42 |             name="include_docker",
 43 |             type="confirm",
 44 |             description="Include Docker support?",
 45 |             default=False
 46 |         )
 47 |     ]
 48 | )
 49 | 
 50 | TEST_CONFIGURATION = InputRequest(
 51 |     request_id="test_configuration",
 52 |     title="Configure Test Environment",
 53 |     description="Set up testing parameters",
 54 |     fields=[
 55 |         InputField(
 56 |             name="test_framework",
 57 |             type="select",
 58 |             description="Testing framework",
 59 |             options=[
 60 |                 {"value": "pytest", "label": "pytest"},
 61 |                 {"value": "unittest", "label": "unittest"},
 62 |                 {"value": "jest", "label": "Jest"},
 63 |                 {"value": "mocha", "label": "Mocha"}
 64 |             ]
 65 |         ),
 66 |         InputField(
 67 |             name="include_coverage",
 68 |             type="confirm",
 69 |             description="Include coverage reporting?",
 70 |             default=True
 71 |         ),
 72 |         InputField(
 73 |             name="parallel",
 74 |             type="confirm",
 75 |             description="Run tests in parallel?",
 76 |             default=False
 77 |         ),
 78 |         InputField(
 79 |             name="test_path",
 80 |             type="text",
 81 |             description="Test directory or file pattern",
 82 |             default="tests/",
 83 |             required=False
 84 |         )
 85 |     ]
 86 | )
 87 | 
 88 | DEPLOYMENT_CONFIG = InputRequest(
 89 |     request_id="deployment_config",
 90 |     title="Configure Deployment",
 91 |     description="Set up deployment parameters",
 92 |     fields=[
 93 |         InputField(
 94 |             name="environment",
 95 |             type="select",
 96 |             description="Deployment environment",
 97 |             options=[
 98 |                 {"value": "development", "label": "Development"},
 99 |                 {"value": "staging", "label": "Staging"},
100 |                 {"value": "production", "label": "Production"}
101 |             ]
102 |         ),
103 |         InputField(
104 |             name="deploy_method",
105 |             type="select",
106 |             description="Deployment method",
107 |             options=[
108 |                 {"value": "docker", "label": "Docker Container"},
109 |                 {"value": "kubernetes", "label": "Kubernetes"},
110 |                 {"value": "serverless", "label": "Serverless"}
111 |             ]
112 |         ),
113 |         InputField(
114 |             name="auto_deploy",
115 |             type="confirm",
116 |             description="Enable automatic deployment?",
117 |             default=False
118 |         ),
119 |         InputField(
120 |             name="rollback_enabled",
121 |             type="confirm",
122 |             description="Enable automatic rollback?",
123 |             default=True
124 |         )
125 |     ]
126 | )
127 | 
128 | DEBUG_CONFIG = InputRequest(
129 |     request_id="debug_config",
130 |     title="Configure Debugging Session",
131 |     description="Set up debugging parameters",
132 |     fields=[
133 |         InputField(
134 |             name="debug_type",
135 |             type="select",
136 |             description="Type of debugging",
137 |             options=[
138 |                 {"value": "python", "label": "Python Debugger"},
139 |                 {"value": "node", "label": "Node.js Debugger"},
140 |                 {"value": "remote", "label": "Remote Debugging"}
141 |             ]
142 |         ),
143 |         InputField(
144 |             name="port",
145 |             type="number",
146 |             description="Debug port",
147 |             default=9229,
148 |             validation={"min": 1024, "max": 65535}
149 |         ),
150 |         InputField(
151 |             name="break_on_entry",
152 |             type="confirm",
153 |             description="Break on entry point?",
154 |             default=True
155 |         )
156 |     ]
157 | )
158 | 
159 | TEMPLATE_REQUESTS: Dict[str, InputRequest] = {
160 |     "environment_setup": ENVIRONMENT_SETUP,
161 |     "test_configuration": TEST_CONFIGURATION,
162 |     "deployment_config": DEPLOYMENT_CONFIG,
163 |     "debug_config": DEBUG_CONFIG
164 | }
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/project_types.py:
--------------------------------------------------------------------------------

```python
  1 | """Project type definitions and configurations."""
  2 | from typing import Dict, Any, List
  3 | from enum import Enum
  4 | 
  5 | class BuildSystem(str, Enum):
  6 |     """Build system types."""
  7 |     MAVEN = "maven"
  8 |     GRADLE = "gradle"
  9 |     NPM = "npm"
 10 |     YARN = "yarn"
 11 |     PIP = "pip"
 12 |     POETRY = "poetry"
 13 |     DOTNET = "dotnet"
 14 |     CARGO = "cargo"
 15 |     GO = "go"
 16 |     SBT = "sbt"
 17 | 
 18 | class ProjectType:
 19 |     """Base project type configuration."""
 20 |     
 21 |     def __init__(
 22 |         self,
 23 |         name: str,
 24 |         description: str,
 25 |         file_structure: Dict[str, Any],
 26 |         build_systems: List[BuildSystem],
 27 |         default_build_system: BuildSystem,
 28 |         config_files: List[str],
 29 |         environment_variables: Dict[str, str],
 30 |         docker_templates: List[str],
 31 |         input_templates: List[str]
 32 |     ):
 33 |         self.name = name
 34 |         self.description = description
 35 |         self.file_structure = file_structure
 36 |         self.build_systems = build_systems
 37 |         self.default_build_system = default_build_system
 38 |         self.config_files = config_files
 39 |         self.environment_variables = environment_variables
 40 |         self.docker_templates = docker_templates
 41 |         self.input_templates = input_templates
 42 | 
 43 | # Define standard project types
 44 | JAVA_PROJECT = ProjectType(
 45 |     name="java",
 46 |     description="Java project",
 47 |     file_structure={
 48 |         "src/": {
 49 |             "main/": {
 50 |                 "java/": {},
 51 |                 "resources/": {}
 52 |             },
 53 |             "test/": {
 54 |                 "java/": {},
 55 |                 "resources/": {}
 56 |             }
 57 |         },
 58 |         "target/": {},
 59 |     },
 60 |     build_systems=[BuildSystem.MAVEN, BuildSystem.GRADLE],
 61 |     default_build_system=BuildSystem.MAVEN,
 62 |     config_files=["pom.xml", "build.gradle", ".gitignore", "README.md"],
 63 |     environment_variables={
 64 |         "JAVA_HOME": "",
 65 |         "MAVEN_HOME": "",
 66 |         "GRADLE_HOME": ""
 67 |     },
 68 |     docker_templates=["java-maven", "java-gradle"],
 69 |     input_templates=["java_config", "maven_config", "gradle_config"]
 70 | )
 71 | 
 72 | DOTNET_PROJECT = ProjectType(
 73 |     name="dotnet",
 74 |     description=".NET project",
 75 |     file_structure={
 76 |         "src/": {},
 77 |         "tests/": {},
 78 |         "docs/": {}
 79 |     },
 80 |     build_systems=[BuildSystem.DOTNET],
 81 |     default_build_system=BuildSystem.DOTNET,
 82 |     config_files=[".csproj", ".sln", "global.json", ".gitignore", "README.md"],
 83 |     environment_variables={
 84 |         "DOTNET_ROOT": "",
 85 |         "ASPNETCORE_ENVIRONMENT": "Development"
 86 |     },
 87 |     docker_templates=["dotnet-sdk", "dotnet-runtime"],
 88 |     input_templates=["dotnet_config", "aspnet_config"]
 89 | )
 90 | 
 91 | NODE_PROJECT = ProjectType(
 92 |     name="node",
 93 |     description="Node.js project",
 94 |     file_structure={
 95 |         "src/": {},
 96 |         "tests/": {},
 97 |         "dist/": {},
 98 |         "public/": {}
 99 |     },
100 |     build_systems=[BuildSystem.NPM, BuildSystem.YARN],
101 |     default_build_system=BuildSystem.NPM,
102 |     config_files=["package.json", "tsconfig.json", ".gitignore", "README.md"],
103 |     environment_variables={
104 |         "NODE_ENV": "development",
105 |         "NPM_TOKEN": ""
106 |     },
107 |     docker_templates=["node-dev", "node-prod"],
108 |     input_templates=["node_config", "npm_config", "typescript_config"]
109 | )
110 | 
111 | PYTHON_PROJECT = ProjectType(
112 |     name="python",
113 |     description="Python project",
114 |     file_structure={
115 |         "src/": {},
116 |         "tests/": {},
117 |         "docs/": {},
118 |         "notebooks/": {}
119 |     },
120 |     build_systems=[BuildSystem.PIP, BuildSystem.POETRY],
121 |     default_build_system=BuildSystem.POETRY,
122 |     config_files=["pyproject.toml", "setup.py", "requirements.txt", ".gitignore", "README.md"],
123 |     environment_variables={
124 |         "PYTHONPATH": "src",
125 |         "PYTHON_ENV": "development"
126 |     },
127 |     docker_templates=["python-dev", "python-prod"],
128 |     input_templates=["python_config", "poetry_config", "pytest_config"]
129 | )
130 | 
131 | GOLANG_PROJECT = ProjectType(
132 |     name="golang",
133 |     description="Go project",
134 |     file_structure={
135 |         "cmd/": {},
136 |         "internal/": {},
137 |         "pkg/": {},
138 |         "api/": {}
139 |     },
140 |     build_systems=[BuildSystem.GO],
141 |     default_build_system=BuildSystem.GO,
142 |     config_files=["go.mod", "go.sum", ".gitignore", "README.md"],
143 |     environment_variables={
144 |         "GOPATH": "",
145 |         "GO111MODULE": "on"
146 |     },
147 |     docker_templates=["golang-dev", "golang-prod"],
148 |     input_templates=["golang_config", "go_mod_config"]
149 | )
150 | 
151 | RUST_PROJECT = ProjectType(
152 |     name="rust",
153 |     description="Rust project",
154 |     file_structure={
155 |         "src/": {},
156 |         "tests/": {},
157 |         "benches/": {},
158 |         "examples/": {}
159 |     },
160 |     build_systems=[BuildSystem.CARGO],
161 |     default_build_system=BuildSystem.CARGO,
162 |     config_files=["Cargo.toml", "Cargo.lock", ".gitignore", "README.md"],
163 |     environment_variables={
164 |         "RUST_BACKTRACE": "1",
165 |         "CARGO_HOME": ""
166 |     },
167 |     docker_templates=["rust-dev", "rust-prod"],
168 |     input_templates=["rust_config", "cargo_config"]
169 | )
170 | 
171 | # Map of all available project types
172 | PROJECT_TYPES: Dict[str, ProjectType] = {
173 |     "java": JAVA_PROJECT,
174 |     "dotnet": DOTNET_PROJECT,
175 |     "node": NODE_PROJECT,
176 |     "python": PYTHON_PROJECT,
177 |     "golang": GOLANG_PROJECT,
178 |     "rust": RUST_PROJECT
179 | }
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/git.py:
--------------------------------------------------------------------------------

```python
  1 | """Git integration for MCP Development Server."""
  2 | import os
  3 | from typing import List, Optional
  4 | from git import Repo, GitCommandError
  5 | from git.objects import Commit
  6 | 
  7 | from ..utils.logging import setup_logging
  8 | from ..utils.errors import GitError
  9 | 
 10 | logger = setup_logging(__name__)
 11 | 
 12 | class GitManager:
 13 |     """Manages Git operations for a project."""
 14 |     
 15 |     def __init__(self, project_path: str):
 16 |         self.project_path = project_path
 17 |         self.repo: Optional[Repo] = None
 18 |         
 19 |     async def initialize(self) -> None:
 20 |         """Initialize Git repository."""
 21 |         try:
 22 |             self.repo = Repo.init(self.project_path)
 23 |             
 24 |             # Create default .gitignore if it doesn't exist
 25 |             gitignore_path = os.path.join(self.project_path, '.gitignore')
 26 |             if not os.path.exists(gitignore_path):
 27 |                 with open(gitignore_path, 'w') as f:
 28 |                     f.write('\n'.join([
 29 |                         '# Python',
 30 |                         '__pycache__/',
 31 |                         '*.pyc',
 32 |                         '*.pyo',
 33 |                         '*.pyd',
 34 |                         '.Python',
 35 |                         'env/',
 36 |                         'venv/',
 37 |                         '.env',
 38 |                         '.venv',
 39 |                         '',
 40 |                         '# IDE',
 41 |                         '.idea/',
 42 |                         '.vscode/',
 43 |                         '*.swp',
 44 |                         '*.swo',
 45 |                         '',
 46 |                         '# Project specific',
 47 |                         '.mcp/',
 48 |                         'dist/',
 49 |                         'build/',
 50 |                         '*.egg-info/',
 51 |                         ''
 52 |                     ]))
 53 |                 
 54 |             # Initial commit
 55 |             if not self.repo.heads:
 56 |                 self.repo.index.add(['.gitignore'])
 57 |                 self.repo.index.commit("Initial commit")
 58 |                 
 59 |             logger.info(f"Initialized Git repository at {self.project_path}")
 60 |             
 61 |         except Exception as e:
 62 |             raise GitError(f"Git initialization failed: {str(e)}")
 63 |             
 64 |     async def get_status(self) -> dict:
 65 |         """Get repository status."""
 66 |         try:
 67 |             if not self.repo:
 68 |                 raise GitError("Git repository not initialized")
 69 |                 
 70 |             return {
 71 |                 "branch": self.repo.active_branch.name,
 72 |                 "changed_files": [item.a_path for item in self.repo.index.diff(None)],
 73 |                 "untracked_files": self.repo.untracked_files,
 74 |                 "is_dirty": self.repo.is_dirty(),
 75 |                 "head_commit": {
 76 |                     "hash": self.repo.head.commit.hexsha,
 77 |                     "message": self.repo.head.commit.message,
 78 |                     "author": str(self.repo.head.commit.author),
 79 |                     "date": str(self.repo.head.commit.authored_datetime)
 80 |                 }
 81 |             }
 82 |             
 83 |         except Exception as e:
 84 |             raise GitError(f"Failed to get Git status: {str(e)}")
 85 |             
 86 |     async def commit(self, message: str, files: Optional[List[str]] = None) -> str:
 87 |         """Create a new commit."""
 88 |         try:
 89 |             if not self.repo:
 90 |                 raise GitError("Git repository not initialized")
 91 |                 
 92 |             # Add specified files or all changes
 93 |             if files:
 94 |                 self.repo.index.add(files)
 95 |             else:
 96 |                 self.repo.index.add('.')
 97 |                 
 98 |             # Create commit
 99 |             commit = self.repo.index.commit(message)
100 |             logger.info(f"Created commit: {commit.hexsha}")
101 |             
102 |             return commit.hexsha
103 |             
104 |         except Exception as e:
105 |             raise GitError(f"Failed to create commit: {str(e)}")
106 |             
107 |     async def get_commit_history(
108 |         self,
109 |         max_count: Optional[int] = None
110 |     ) -> List[dict]:
111 |         """Get commit history."""
112 |         try:
113 |             if not self.repo:
114 |                 raise GitError("Git repository not initialized")
115 |                 
116 |             commits = []
117 |             for commit in self.repo.iter_commits(max_count=max_count):
118 |                 commits.append({
119 |                     "hash": commit.hexsha,
120 |                     "message": commit.message,
121 |                     "author": str(commit.author),
122 |                     "date": str(commit.authored_datetime),
123 |                     "files": list(commit.stats.files.keys())
124 |                 })
125 |                 
126 |             return commits
127 |             
128 |         except Exception as e:
129 |             raise GitError(f"Failed to get commit history: {str(e)}")
130 |             
131 |     async def create_branch(self, name: str) -> None:
132 |         """Create a new branch."""
133 |         try:
134 |             if not self.repo:
135 |                 raise GitError("Git repository not initialized")
136 |                 
137 |             self.repo.create_head(name)
138 |             logger.info(f"Created branch: {name}")
139 |             
140 |         except Exception as e:
141 |             raise GitError(f"Failed to create branch: {str(e)}")
142 |             
143 |     async def checkout(self, branch: str) -> None:
144 |         """Checkout a branch."""
145 |         try:
146 |             if not self.repo:
147 |                 raise GitError("Git repository not initialized")
148 |                 
149 |             self.repo.git.checkout(branch)
150 |             logger.info(f"Checked out branch: {branch}")
151 |             
152 |         except Exception as e:
153 |             raise GitError(f"Failed to checkout branch: {str(e)}")
154 |             
155 |     async def get_diff(
156 |         self,
157 |         commit_a: Optional[str] = None,
158 |         commit_b: Optional[str] = None
159 |     ) -> str:
160 |         """Get diff between commits or working directory."""
161 |         try:
162 |             if not self.repo:
163 |                 raise GitError("Git repository not initialized")
164 |                 
165 |             return self.repo.git.diff(commit_a, commit_b)
166 |             
167 |         except Exception as e:
168 |             raise GitError(f"Failed to get diff: {str(e)}")
169 |             
170 |     async def cleanup(self) -> None:
171 |         """Clean up Git resources."""
172 |         self.repo = None
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/environments/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Environment management for MCP Development Server."""
  2 | import os
  3 | import json
  4 | from typing import Dict, List, Optional, Any
  5 | from pathlib import Path
  6 | 
  7 | from ..docker.manager import DockerManager
  8 | from ..docker.volumes import VolumeManager
  9 | from ..docker.templates import DockerTemplates
 10 | from ..utils.logging import setup_logging
 11 | from ..utils.errors import EnvironmentError
 12 | 
 13 | logger = setup_logging(__name__)
 14 | 
 15 | class EnvironmentManager:
 16 |     """Manages development environments."""
 17 |     
 18 |     def __init__(self):
 19 |         self.docker_manager = DockerManager()
 20 |         self.volume_manager = VolumeManager()
 21 |         self.environments: Dict[str, Dict[str, Any]] = {}
 22 |         
 23 |     async def create_environment(
 24 |         self,
 25 |         name: str,
 26 |         project_path: str,
 27 |         env_type: str,
 28 |         config: Optional[Dict[str, Any]] = None
 29 |     ) -> str:
 30 |         """Create a new development environment."""
 31 |         try:
 32 |             config = config or {}
 33 |             
 34 |             # Create environment directory
 35 |             env_path = os.path.join(project_path, '.mcp', 'environments', name)
 36 |             os.makedirs(env_path, exist_ok=True)
 37 |             
 38 |             # Generate Dockerfile
 39 |             dockerfile_content = DockerTemplates.get_template(env_type, config)
 40 |             dockerfile_path = os.path.join(env_path, 'Dockerfile')
 41 |             with open(dockerfile_path, 'w') as f:
 42 |                 f.write(dockerfile_content)
 43 |             
 44 |             # Create volumes for persistence
 45 |             volumes = {}
 46 |             for volume_name in ['src', 'deps', 'cache']:
 47 |                 volume = await self.volume_manager.create_volume(
 48 |                     f"mcp-{name}-{volume_name}",
 49 |                     labels={
 50 |                         'mcp.environment': name,
 51 |                         'mcp.volume.type': volume_name
 52 |                     }
 53 |                 )
 54 |                 volumes[volume] = {'bind': f'/app/{volume_name}', 'mode': 'rw'}
 55 |             
 56 |             # Create container
 57 |             container_id = await self.docker_manager.create_container(
 58 |                 project_path=project_path,
 59 |                 environment=name,
 60 |                 dockerfile=dockerfile_path,
 61 |                 volumes=volumes,
 62 |                 environment_vars=config.get('env_vars'),
 63 |                 ports=config.get('ports')
 64 |             )
 65 |             
 66 |             # Store environment configuration
 67 |             self.environments[name] = {
 68 |                 'id': container_id,
 69 |                 'type': env_type,
 70 |                 'path': env_path,
 71 |                 'config': config,
 72 |                 'volumes': volumes
 73 |             }
 74 |             
 75 |             # Save environment metadata
 76 |             self._save_environment_metadata(name)
 77 |             
 78 |             logger.info(f"Created environment: {name}")
 79 |             return container_id
 80 |             
 81 |         except Exception as e:
 82 |             raise EnvironmentError(f"Failed to create environment: {str(e)}")
 83 |             
 84 |     async def remove_environment(self, name: str) -> None:
 85 |         """Remove a development environment."""
 86 |         try:
 87 |             if env := self.environments.get(name):
 88 |                 # Stop container
 89 |                 await self.docker_manager.stop_container(name)
 90 |                 
 91 |                 # Remove volumes
 92 |                 for volume in env['volumes']:
 93 |                     await self.volume_manager.remove_volume(volume)
 94 |                 
 95 |                 # Remove environment directory
 96 |                 import shutil
 97 |                 shutil.rmtree(env['path'])
 98 |                 
 99 |                 # Remove from environments dict
100 |                 del self.environments[name]
101 |                 
102 |                 logger.info(f"Removed environment: {name}")
103 |             else:
104 |                 raise EnvironmentError(f"Environment not found: {name}")
105 |                 
106 |         except Exception as e:
107 |             raise EnvironmentError(f"Failed to remove environment: {str(e)}")
108 |             
109 |     async def execute_in_environment(
110 |         self,
111 |         name: str,
112 |         command: str,
113 |         workdir: Optional[str] = None
114 |     ) -> Dict[str, Any]:
115 |         """Execute a command in an environment."""
116 |         try:
117 |             if name not in self.environments:
118 |                 raise EnvironmentError(f"Environment not found: {name}")
119 |                 
120 |             return await self.docker_manager.execute_command(
121 |                 environment=name,
122 |                 command=command,
123 |                 workdir=workdir
124 |             )
125 |             
126 |         except Exception as e:
127 |             raise EnvironmentError(f"Failed to execute command: {str(e)}")
128 |             
129 |     async def get_environment_status(self, name: str) -> Dict[str, Any]:
130 |         """Get environment status including container and volumes."""
131 |         try:
132 |             if env := self.environments.get(name):
133 |                 container_status = await self.docker_manager.get_container_status(name)
134 |                 
135 |                 volumes_status = {}
136 |                 for volume in env['volumes']:
137 |                     volumes_status[volume] = await self.volume_manager.get_volume_info(volume)
138 |                 
139 |                 return {
140 |                     'container': container_status,
141 |                     'volumes': volumes_status,
142 |                     'type': env['type'],
143 |                     'config': env['config']
144 |                 }
145 |             else:
146 |                 raise EnvironmentError(f"Environment not found: {name}")
147 |                 
148 |         except Exception as e:
149 |             raise EnvironmentError(f"Failed to get environment status: {str(e)}")
150 |             
151 |     def _save_environment_metadata(self, name: str) -> None:
152 |         """Save environment metadata to disk."""
153 |         if env := self.environments.get(name):
154 |             metadata_path = os.path.join(env['path'], 'metadata.json')
155 |             with open(metadata_path, 'w') as f:
156 |                 json.dump({
157 |                     'name': name,
158 |                     'type': env['type'],
159 |                     'config': env['config'],
160 |                     'volumes': list(env['volumes'].keys())
161 |                 }, f, indent=2)
162 |                 
163 |     async def cleanup(self) -> None:
164 |         """Clean up all environments."""
165 |         for name in list(self.environments.keys()):
166 |             try:
167 |                 await self.remove_environment(name)
168 |             except Exception as e:
169 |                 logger.error(f"Error cleaning up environment {name}: {str(e)}")
170 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/prompts/project_templates.py:
--------------------------------------------------------------------------------

```python
  1 | """Project-specific input templates."""
  2 | from typing import Dict
  3 | from .input_protocol import InputRequest, InputField
  4 | 
  5 | # Java Project Templates
  6 | JAVA_CONFIG = InputRequest(
  7 |     request_id="java_config",
  8 |     title="Java Project Configuration",
  9 |     description="Configure Java project settings",
 10 |     fields=[
 11 |         InputField(
 12 |             name="java_version",
 13 |             type="select",
 14 |             description="Java version",
 15 |             options=[
 16 |                 {"value": "21", "label": "Java 21 (LTS)"},
 17 |                 {"value": "17", "label": "Java 17 (LTS)"},
 18 |                 {"value": "11", "label": "Java 11 (LTS)"},
 19 |                 {"value": "8", "label": "Java 8"}
 20 |             ]
 21 |         ),
 22 |         InputField(
 23 |             name="project_type",
 24 |             type="select",
 25 |             description="Project type",
 26 |             options=[
 27 |                 {"value": "spring-boot", "label": "Spring Boot"},
 28 |                 {"value": "jakarta-ee", "label": "Jakarta EE"},
 29 |                 {"value": "android", "label": "Android"},
 30 |                 {"value": "library", "label": "Java Library"}
 31 |             ]
 32 |         ),
 33 |         InputField(
 34 |             name="packaging",
 35 |             type="select",
 36 |             description="Packaging type",
 37 |             options=[
 38 |                 {"value": "jar", "label": "JAR"},
 39 |                 {"value": "war", "label": "WAR"},
 40 |                 {"value": "ear", "label": "EAR"}
 41 |             ]
 42 |         )
 43 |     ]
 44 | )
 45 | 
 46 | # .NET Project Templates
 47 | DOTNET_CONFIG = InputRequest(
 48 |     request_id="dotnet_config",
 49 |     title=".NET Project Configuration",
 50 |     description="Configure .NET project settings",
 51 |     fields=[
 52 |         InputField(
 53 |             name="dotnet_version",
 54 |             type="select",
 55 |             description=".NET version",
 56 |             options=[
 57 |                 {"value": "8.0", "label": ".NET 8.0"},
 58 |                 {"value": "7.0", "label": ".NET 7.0"},
 59 |                 {"value": "6.0", "label": ".NET 6.0 (LTS)"}
 60 |             ]
 61 |         ),
 62 |         InputField(
 63 |             name="project_type",
 64 |             type="select",
 65 |             description="Project type",
 66 |             options=[
 67 |                 {"value": "webapi", "label": "ASP.NET Core Web API"},
 68 |                 {"value": "mvc", "label": "ASP.NET Core MVC"},
 69 |                 {"value": "blazor", "label": "Blazor"},
 70 |                 {"value": "maui", "label": ".NET MAUI"},
 71 |                 {"value": "library", "label": "Class Library"}
 72 |             ]
 73 |         ),
 74 |         InputField(
 75 |             name="authentication",
 76 |             type="select",
 77 |             description="Authentication type",
 78 |             options=[
 79 |                 {"value": "none", "label": "None"},
 80 |                 {"value": "individual", "label": "Individual Accounts"},
 81 |                 {"value": "microsoft", "label": "Microsoft Identity Platform"},
 82 |                 {"value": "windows", "label": "Windows Authentication"}
 83 |             ]
 84 |         )
 85 |     ]
 86 | )
 87 | 
 88 | # Node.js Project Templates
 89 | NODE_CONFIG = InputRequest(
 90 |     request_id="node_config",
 91 |     title="Node.js Project Configuration",
 92 |     description="Configure Node.js project settings",
 93 |     fields=[
 94 |         InputField(
 95 |             name="node_version",
 96 |             type="select",
 97 |             description="Node.js version",
 98 |             options=[
 99 |                 {"value": "20", "label": "Node.js 20 (LTS)"},
100 |                 {"value": "18", "label": "Node.js 18 (LTS)"}
101 |             ]
102 |         ),
103 |         InputField(
104 |             name="project_type",
105 |             type="select",
106 |             description="Project type",
107 |             options=[
108 |                 {"value": "express", "label": "Express.js"},
109 |                 {"value": "next", "label": "Next.js"},
110 |                 {"value": "nest", "label": "NestJS"},
111 |                 {"value": "library", "label": "NPM Package"}
112 |             ]
113 |         ),
114 |         InputField(
115 |             name="typescript",
116 |             type="confirm",
117 |             description="Use TypeScript?",
118 |             default=True
119 |         )
120 |     ]
121 | )
122 | 
123 | # Python Project Templates
124 | PYTHON_CONFIG = InputRequest(
125 |     request_id="python_config",
126 |     title="Python Project Configuration",
127 |     description="Configure Python project settings",
128 |     fields=[
129 |         InputField(
130 |             name="python_version",
131 |             type="select",
132 |             description="Python version",
133 |             options=[
134 |                 {"value": "3.12", "label": "Python 3.12"},
135 |                 {"value": "3.11", "label": "Python 3.11"},
136 |                 {"value": "3.10", "label": "Python 3.10"}
137 |             ]
138 |         ),
139 |         InputField(
140 |             name="project_type",
141 |             type="select",
142 |             description="Project type",
143 |             options=[
144 |                 {"value": "fastapi", "label": "FastAPI"},
145 |                 {"value": "django", "label": "Django"},
146 |                 {"value": "flask", "label": "Flask"},
147 |                 {"value": "library", "label": "Python Package"}
148 |             ]
149 |         ),
150 |         InputField(
151 |             name="dependency_management",
152 |             type="select",
153 |             description="Dependency management",
154 |             options=[
155 |                 {"value": "poetry", "label": "Poetry"},
156 |                 {"value": "pip", "label": "pip + requirements.txt"},
157 |                 {"value": "pipenv", "label": "Pipenv"}
158 |             ]
159 |         )
160 |     ]
161 | )
162 | 
163 | # Golang Project Templates
164 | GOLANG_CONFIG = InputRequest(
165 |     request_id="golang_config",
166 |     title="Go Project Configuration",
167 |     description="Configure Go project settings",
168 |     fields=[
169 |         InputField(
170 |             name="go_version",
171 |             type="select",
172 |             description="Go version",
173 |             options=[
174 |                 {"value": "1.22", "label": "Go 1.22"},
175 |                 {"value": "1.21", "label": "Go 1.21"},
176 |                 {"value": "1.20", "label": "Go 1.20"}
177 |             ]
178 |         ),
179 |         InputField(
180 |             name="project_type",
181 |             type="select",
182 |             description="Project type",
183 |             options=[
184 |                 {"value": "gin", "label": "Gin Web Framework"},
185 |                 {"value": "echo", "label": "Echo Framework"},
186 |                 {"value": "cli", "label": "CLI Application"},
187 |                 {"value": "library", "label": "Go Module"}
188 |             ]
189 |         ),
190 |         InputField(
191 |             name="module_path",
192 |             type="text",
193 |             description="Module path (e.g., github.com/user/repo)",
194 |             validation={"pattern": r"^[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+(/[a-zA-Z0-9_.-]+)?$"}
195 |         )
196 |     ]
197 | )
198 | 
199 | # All project templates
200 | PROJECT_TEMPLATES: Dict[str, InputRequest] = {
201 |     "java_config": JAVA_CONFIG,
202 |     "dotnet_config": DOTNET_CONFIG,
203 |     "node_config": NODE_CONFIG,
204 |     "python_config": PYTHON_CONFIG,
205 |     "golang_config": GOLANG_CONFIG
206 | }
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/templates.py:
--------------------------------------------------------------------------------

```python
  1 | """Template system for project creation."""
  2 | import os
  3 | import shutil
  4 | from pathlib import Path
  5 | from typing import Dict, Any, List
  6 | import jinja2
  7 | import yaml
  8 | 
  9 | from ..utils.logging import setup_logging
 10 | from ..utils.errors import ProjectError
 11 | 
 12 | logger = setup_logging(__name__)
 13 | 
 14 | class TemplateManager:
 15 |     """Manages project templates."""
 16 |     
 17 |     def __init__(self):
 18 |         """Initialize template manager."""
 19 |         self.template_dir = self._get_template_dir()
 20 |         self.env = jinja2.Environment(
 21 |             loader=jinja2.FileSystemLoader(str(self.template_dir)),
 22 |             autoescape=jinja2.select_autoescape()
 23 |         )
 24 |         
 25 |     def _get_template_dir(self) -> Path:
 26 |         """Get templates directory path."""
 27 |         if os.name == "nt":  # Windows
 28 |             template_dir = Path(os.getenv("APPDATA")) / "Claude" / "templates"
 29 |         else:  # macOS/Linux
 30 |             template_dir = Path.home() / ".config" / "claude" / "templates"
 31 |             
 32 |         template_dir.mkdir(parents=True, exist_ok=True)
 33 |         
 34 |         # Initialize with basic template if empty
 35 |         if not any(template_dir.iterdir()):
 36 |             self._initialize_basic_template(template_dir)
 37 |             
 38 |         return template_dir
 39 |         
 40 |     def _initialize_basic_template(self, template_dir: Path):
 41 |         """Initialize basic project template.
 42 |         
 43 |         Args:
 44 |             template_dir: Templates directory path
 45 |         """
 46 |         basic_dir = template_dir / "basic"
 47 |         basic_dir.mkdir(exist_ok=True)
 48 |         
 49 |         # Create template configuration
 50 |         config = {
 51 |             "name": "basic",
 52 |             "description": "Basic project template",
 53 |             "version": "1.0.0",
 54 |             "files": [
 55 |                 "README.md",
 56 |                 "requirements.txt",
 57 |                 ".gitignore",
 58 |                 "src/__init__.py",
 59 |                 "tests/__init__.py"
 60 |             ],
 61 |             "variables": {
 62 |                 "project_name": "",
 63 |                 "description": ""
 64 |             },
 65 |             "features": {
 66 |                 "git": True,
 67 |                 "tests": True,
 68 |                 "docker": False
 69 |             }
 70 |         }
 71 |         
 72 |         with open(basic_dir / "template.yaml", "w") as f:
 73 |             yaml.dump(config, f)
 74 |             
 75 |         # Create template files
 76 |         readme_content = """# {{ project_name }}
 77 | 
 78 | {{ description }}
 79 | 
 80 | ## Installation
 81 | 
 82 | ```bash
 83 | pip install -r requirements.txt
 84 | ```
 85 | 
 86 | ## Usage
 87 | 
 88 | ```python
 89 | from {{ project_name.lower() }} import main
 90 | ```
 91 | 
 92 | ## Testing
 93 | 
 94 | ```bash
 95 | pytest tests/
 96 | ```
 97 | """
 98 |         
 99 |         with open(basic_dir / "README.md", "w") as f:
100 |             f.write(readme_content)
101 |             
102 |         # Create source directory
103 |         src_dir = basic_dir / "src"
104 |         src_dir.mkdir(exist_ok=True)
105 |         
106 |         with open(src_dir / "__init__.py", "w") as f:
107 |             f.write('"""{{ project_name }} package."""\n')
108 |             
109 |         # Create tests directory
110 |         tests_dir = basic_dir / "tests"
111 |         tests_dir.mkdir(exist_ok=True)
112 |         
113 |         with open(tests_dir / "__init__.py", "w") as f:
114 |             f.write('"""Tests for {{ project_name }}."""\n')
115 |             
116 |         # Create requirements.txt
117 |         with open(basic_dir / "requirements.txt", "w") as f:
118 |             f.write("pytest>=7.0.0\n")
119 |             
120 |         # Create .gitignore
121 |         gitignore_content = """__pycache__/
122 | *.py[cod]
123 | *$py.class
124 | *.so
125 | .Python
126 | build/
127 | develop-eggs/
128 | dist/
129 | downloads/
130 | eggs/
131 | .eggs/
132 | lib/
133 | lib64/
134 | parts/
135 | sdist/
136 | var/
137 | wheels/
138 | *.egg-info/
139 | .installed.cfg
140 | *.egg
141 | MANIFEST
142 | """
143 |         
144 |         with open(basic_dir / ".gitignore", "w") as f:
145 |             f.write(gitignore_content)
146 |             
147 |     async def apply_template(self, template_name: str, project: Any) -> None:
148 |         """Apply template to project.
149 |         
150 |         Args:
151 |             template_name: Name of template to apply
152 |             project: Project instance
153 |         """
154 |         try:
155 |             template_path = self.template_dir / template_name
156 |             if not template_path.exists():
157 |                 raise ProjectError(f"Template not found: {template_name}")
158 |                 
159 |             # Load template configuration
160 |             with open(template_path / "template.yaml", "r") as f:
161 |                 template_config = yaml.safe_load(f)
162 |                 
163 |             # Prepare template variables
164 |             variables = {
165 |                 "project_name": project.config.name,
166 |                 "description": project.config.description
167 |             }
168 |             
169 |             # Process each template file
170 |             for file_path in template_config["files"]:
171 |                 template_file = template_path / file_path
172 |                 if template_file.exists():
173 |                     # Create target directory if needed
174 |                     target_path = Path(project.path) / file_path
175 |                     target_path.parent.mkdir(parents=True, exist_ok=True)
176 |                     
177 |                     # Render template content
178 |                     template = self.env.get_template(f"{template_name}/{file_path}")
179 |                     content = template.render(**variables)
180 |                     
181 |                     # Write rendered content
182 |                     with open(target_path, "w") as f:
183 |                         f.write(content)
184 |                         
185 |             logger.info(f"Applied template {template_name} to project {project.config.name}")
186 |             
187 |         except Exception as e:
188 |             logger.error(f"Failed to apply template: {str(e)}")
189 |             raise ProjectError(f"Template application failed: {str(e)}")
190 |             
191 |     async def template_has_git(self, template_name: str) -> bool:
192 |         """Check if template includes Git initialization.
193 |         
194 |         Args:
195 |             template_name: Template name
196 |             
197 |         Returns:
198 |             bool: True if template includes Git
199 |         """
200 |         try:
201 |             template_path = self.template_dir / template_name
202 |             if not template_path.exists():
203 |                 return False
204 |                 
205 |             # Load template configuration
206 |             with open(template_path / "template.yaml", "r") as f:
207 |                 template_config = yaml.safe_load(f)
208 |                 
209 |             return template_config.get("features", {}).get("git", False)
210 |             
211 |         except Exception:
212 |             return False
213 |             
214 |     def list_templates(self) -> List[Dict[str, Any]]:
215 |         """Get list of available templates.
216 |         
217 |         Returns:
218 |             List[Dict[str, Any]]: Template information
219 |         """
220 |         templates = []
221 |         
222 |         for template_dir in self.template_dir.iterdir():
223 |             if template_dir.is_dir():
224 |                 config_path = template_dir / "template.yaml"
225 |                 if config_path.exists():
226 |                     with open(config_path, "r") as f:
227 |                         config = yaml.safe_load(f)
228 |                         templates.append(config)
229 |                         
230 |         return templates
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/context.py:
--------------------------------------------------------------------------------

```python
  1 | """Project context management for MCP Development Server."""
  2 | import os
  3 | import json
  4 | import uuid
  5 | from datetime import datetime
  6 | from pathlib import Path
  7 | from typing import Dict, List, Optional, Any
  8 | 
  9 | from pydantic import BaseModel
 10 | from ..utils.config import ProjectConfig
 11 | from ..utils.logging import setup_logging
 12 | from ..utils.errors import ProjectError, FileOperationError
 13 | 
 14 | logger = setup_logging(__name__)
 15 | 
 16 | class ProjectState(BaseModel):
 17 |     """Project state tracking."""
 18 |     initialized: bool = False
 19 |     last_build_time: Optional[datetime] = None
 20 |     last_build_status: Optional[str] = None
 21 |     last_test_time: Optional[datetime] = None
 22 |     last_test_status: Optional[str] = None
 23 |     git_initialized: bool = False
 24 | 
 25 | class ProjectContext:
 26 |     """Manages the context and state of a development project."""
 27 |     
 28 |     def __init__(self, config: ProjectConfig):
 29 |         self.id = str(uuid.uuid4())
 30 |         self.config = config
 31 |         self.path = config.path
 32 |         self.state = ProjectState()
 33 |         self._file_watchers: Dict[str, Any] = {}
 34 |         
 35 |     async def initialize(self) -> None:
 36 |         """Initialize project structure and state."""
 37 |         try:
 38 |             # Create project directory
 39 |             os.makedirs(self.path, exist_ok=True)
 40 |             
 41 |             # Create project structure
 42 |             await self._create_project_structure()
 43 |             
 44 |             # Initialize state file
 45 |             await self._init_state_file()
 46 |             
 47 |             # Set up file watchers
 48 |             await self._setup_file_watchers()
 49 |             
 50 |             self.state.initialized = True
 51 |             logger.info(f"Initialized project {self.config.name} at {self.path}")
 52 |             
 53 |         except Exception as e:
 54 |             raise ProjectError(f"Project initialization failed: {str(e)}")
 55 |             
 56 |     async def _create_project_structure(self) -> None:
 57 |         """Create initial project directory structure."""
 58 |         try:
 59 |             # Create standard directories
 60 |             for dir_name in ['.mcp', 'src', 'tests', 'docs']:
 61 |                 os.makedirs(os.path.join(self.path, dir_name), exist_ok=True)
 62 |                 
 63 |             # Create basic configuration files
 64 |             config_path = os.path.join(self.path, '.mcp', 'project.json')
 65 |             with open(config_path, 'w') as f:
 66 |                 json.dump(self.config.dict(), f, indent=2, default=str)
 67 |                 
 68 |         except Exception as e:
 69 |             raise FileOperationError(f"Failed to create project structure: {str(e)}")
 70 |             
 71 |     async def _init_state_file(self) -> None:
 72 |         """Initialize project state file."""
 73 |         try:
 74 |             state_path = os.path.join(self.path, '.mcp', 'state.json')
 75 |             with open(state_path, 'w') as f:
 76 |                 json.dump(self.state.dict(), f, indent=2, default=str)
 77 |                 
 78 |         except Exception as e:
 79 |             raise FileOperationError(f"Failed to initialize state file: {str(e)}")
 80 |             
 81 |     async def _setup_file_watchers(self) -> None:
 82 |         """Set up file system watchers for project directories."""
 83 |         # To be implemented with file watching functionality
 84 |         pass
 85 |         
 86 |     def get_structure(self) -> Dict[str, Any]:
 87 |         """Get project structure as a dictionary."""
 88 |         structure = {"name": self.config.name, "type": "directory", "children": []}
 89 |         
 90 |         def scan_directory(path: Path, current_dict: Dict[str, Any]) -> None:
 91 |             try:
 92 |                 for item in path.iterdir():
 93 |                     # Skip hidden files and .mcp directory
 94 |                     if item.name.startswith('.'):
 95 |                         continue
 96 |                         
 97 |                     if item.is_file():
 98 |                         current_dict["children"].append({
 99 |                             "name": item.name,
100 |                             "type": "file",
101 |                             "size": item.stat().st_size
102 |                         })
103 |                     elif item.is_dir():
104 |                         dir_dict = {
105 |                             "name": item.name,
106 |                             "type": "directory",
107 |                             "children": []
108 |                         }
109 |                         current_dict["children"].append(dir_dict)
110 |                         scan_directory(item, dir_dict)
111 |                         
112 |             except Exception as e:
113 |                 logger.error(f"Error scanning directory {path}: {str(e)}")
114 |                 
115 |         scan_directory(Path(self.path), structure)
116 |         return structure
117 |         
118 |     def get_file_content(self, relative_path: str) -> str:
119 |         """Get content of a project file."""
120 |         try:
121 |             file_path = os.path.join(self.path, relative_path)
122 |             if not os.path.exists(file_path):
123 |                 raise FileOperationError(f"File not found: {relative_path}")
124 |                 
125 |             # Basic security check
126 |             if not os.path.normpath(file_path).startswith(str(self.path)):
127 |                 raise FileOperationError("Invalid file path")
128 |                 
129 |             with open(file_path, 'r') as f:
130 |                 return f.read()
131 |                 
132 |         except Exception as e:
133 |             raise FileOperationError(f"Failed to read file {relative_path}: {str(e)}")
134 |             
135 |     async def update_file(self, relative_path: str, content: str) -> None:
136 |         """Update content of a project file."""
137 |         try:
138 |             file_path = os.path.join(self.path, relative_path)
139 |             
140 |             # Create directories if needed
141 |             os.makedirs(os.path.dirname(file_path), exist_ok=True)
142 |             
143 |             # Security check
144 |             if not os.path.normpath(file_path).startswith(str(self.path)):
145 |                 raise FileOperationError("Invalid file path")
146 |                 
147 |             with open(file_path, 'w') as f:
148 |                 f.write(content)
149 |                 
150 |             logger.info(f"Updated file: {relative_path}")
151 |             
152 |         except Exception as e:
153 |             raise FileOperationError(f"Failed to update file {relative_path}: {str(e)}")
154 |             
155 |     async def delete_file(self, relative_path: str) -> None:
156 |         """Delete a project file."""
157 |         try:
158 |             file_path = os.path.join(self.path, relative_path)
159 |             
160 |             # Security check
161 |             if not os.path.normpath(file_path).startswith(str(self.path)):
162 |                 raise FileOperationError("Invalid file path")
163 |                 
164 |             if os.path.exists(file_path):
165 |                 os.remove(file_path)
166 |                 logger.info(f"Deleted file: {relative_path}")
167 |             else:
168 |                 logger.warning(f"File not found: {relative_path}")
169 |                 
170 |         except Exception as e:
171 |             raise FileOperationError(f"Failed to delete file {relative_path}: {str(e)}")
172 |             
173 |     async def update_state(self, **kwargs) -> None:
174 |         """Update project state."""
175 |         try:
176 |             # Update state object
177 |             for key, value in kwargs.items():
178 |                 if hasattr(self.state, key):
179 |                     setattr(self.state, key, value)
180 |                     
181 |             # Save to state file
182 |             state_path = os.path.join(self.path, '.mcp', 'state.json')
183 |             with open(state_path, 'w') as f:
184 |                 json.dump(self.state.dict(), f, indent=2, default=str)
185 |                 
186 |             logger.info(f"Updated project state: {kwargs}")
187 |             
188 |         except Exception as e:
189 |             raise ProjectError(f"Failed to update project state: {str(e)}")
190 |             
191 |     async def cleanup(self) -> None:
192 |         """Clean up project resources."""
193 |         try:
194 |             # Stop file watchers
195 |             for watcher in self._file_watchers.values():
196 |                 await watcher.stop()
197 |                 
198 |             logger.info(f"Cleaned up project resources for {self.config.name}")
199 |             
200 |         except Exception as e:
201 |             logger.error(f"Error during project cleanup: {str(e)}")
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Docker integration for MCP Development Server."""
  2 | import asyncio
  3 | import docker
  4 | from typing import Dict, Any, Optional, List
  5 | from pathlib import Path
  6 | import tempfile
  7 | import yaml
  8 | import jinja2
  9 | 
 10 | from ..utils.logging import setup_logging
 11 | from ..utils.errors import MCPDevServerError
 12 | 
 13 | logger = setup_logging(__name__)
 14 | 
 15 | class DockerManager:
 16 |     """Manages Docker containers and environments."""
 17 |     
 18 |     def __init__(self):
 19 |         """Initialize Docker manager."""
 20 |         self.client = docker.from_env()
 21 |         self.active_containers: Dict[str, Any] = {}
 22 |         self._setup_template_environment()
 23 |         
 24 |     def _setup_template_environment(self):
 25 |         """Set up Jinja2 template environment."""
 26 |         template_dir = Path(__file__).parent / "templates"
 27 |         self.template_env = jinja2.Environment(
 28 |             loader=jinja2.FileSystemLoader(str(template_dir)),
 29 |             autoescape=jinja2.select_autoescape()
 30 |         )
 31 |         
 32 |     async def create_environment(
 33 |         self,
 34 |         name: str,
 35 |         image: str,
 36 |         project_path: str,
 37 |         env_vars: Optional[Dict[str, str]] = None,
 38 |         ports: Optional[Dict[str, str]] = None,
 39 |         volumes: Optional[Dict[str, Dict[str, str]]] = None
 40 |     ) -> str:
 41 |         """Create a new Docker environment.
 42 |         
 43 |         Args:
 44 |             name: Environment name
 45 |             image: Docker image name
 46 |             project_path: Project directory path
 47 |             env_vars: Environment variables
 48 |             ports: Port mappings
 49 |             volumes: Additional volume mappings
 50 |             
 51 |         Returns:
 52 |             str: Environment ID
 53 |         """
 54 |         try:
 55 |             # Ensure image is available
 56 |             try:
 57 |                 self.client.images.get(image)
 58 |             except docker.errors.ImageNotFound:
 59 |                 logger.info(f"Pulling image: {image}")
 60 |                 self.client.images.pull(image)
 61 |                 
 62 |             # Setup default volumes
 63 |             container_volumes = {
 64 |                 project_path: {
 65 |                     "bind": "/workspace",
 66 |                     "mode": "rw"
 67 |                 }
 68 |             }
 69 |             if volumes:
 70 |                 container_volumes.update(volumes)
 71 |                 
 72 |             # Create container
 73 |             container = self.client.containers.run(
 74 |                 image=image,
 75 |                 name=f"mcp-env-{name}",
 76 |                 detach=True,
 77 |                 volumes=container_volumes,
 78 |                 environment=env_vars or {},
 79 |                 ports=ports or {},
 80 |                 working_dir="/workspace",
 81 |                 remove=True
 82 |             )
 83 |             
 84 |             env_id = container.id
 85 |             self.active_containers[env_id] = {
 86 |                 "name": name,
 87 |                 "container": container,
 88 |                 "status": "running"
 89 |             }
 90 |             
 91 |             logger.info(f"Created environment: {name} ({env_id})")
 92 |             return env_id
 93 |             
 94 |         except Exception as e:
 95 |             logger.error(f"Failed to create environment: {str(e)}")
 96 |             raise MCPDevServerError(f"Environment creation failed: {str(e)}")
 97 |             
 98 |     async def generate_dockerfile(
 99 |         self,
100 |         template: str,
101 |         variables: Dict[str, Any],
102 |         output_path: Optional[str] = None
103 |     ) -> str:
104 |         """Generate Dockerfile from template.
105 |         
106 |         Args:
107 |             template: Template name
108 |             variables: Template variables
109 |             output_path: Optional path to save Dockerfile
110 |             
111 |         Returns:
112 |             str: Generated Dockerfile content
113 |         """
114 |         try:
115 |             template = self.template_env.get_template(f"{template}.dockerfile")
116 |             content = template.render(**variables)
117 |             
118 |             if output_path:
119 |                 with open(output_path, "w") as f:
120 |                     f.write(content)
121 |                     
122 |             return content
123 |             
124 |         except Exception as e:
125 |             logger.error(f"Failed to generate Dockerfile: {str(e)}")
126 |             raise MCPDevServerError(f"Dockerfile generation failed: {str(e)}")
127 |             
128 |     async def create_compose_config(
129 |         self,
130 |         name: str,
131 |         services: Dict[str, Any],
132 |         output_path: Optional[str] = None
133 |     ) -> str:
134 |         """Create Docker Compose configuration.
135 |         
136 |         Args:
137 |             name: Project name
138 |             services: Service configurations
139 |             output_path: Optional path to save docker-compose.yml
140 |             
141 |         Returns:
142 |             str: Generated docker-compose.yml content
143 |         """
144 |         try:
145 |             compose_config = {
146 |                 "version": "3.8",
147 |                 "services": services,
148 |                 "networks": {
149 |                     "mcp-network": {
150 |                         "driver": "bridge"
151 |                     }
152 |                 }
153 |             }
154 |             
155 |             content = yaml.dump(compose_config, default_flow_style=False)
156 |             
157 |             if output_path:
158 |                 with open(output_path, "w") as f:
159 |                     f.write(content)
160 |                     
161 |             return content
162 |             
163 |         except Exception as e:
164 |             logger.error(f"Failed to create Docker Compose config: {str(e)}")
165 |             raise MCPDevServerError(f"Compose config creation failed: {str(e)}")
166 |             
167 |     async def execute_command(
168 |         self,
169 |         env_id: str,
170 |         command: str,
171 |         workdir: Optional[str] = None,
172 |         stream: bool = False
173 |     ) -> Dict[str, Any]:
174 |         """Execute command in Docker environment.
175 |         
176 |         Args:
177 |             env_id: Environment ID
178 |             command: Command to execute
179 |             workdir: Working directory
180 |             stream: Stream output in real-time
181 |             
182 |         Returns:
183 |             Dict[str, Any]: Command execution results
184 |         """
185 |         try:
186 |             if env_id not in self.active_containers:
187 |                 raise MCPDevServerError(f"Environment not found: {env_id}")
188 |                 
189 |             container = self.active_containers[env_id]["container"]
190 |             exec_result = container.exec_run(
191 |                 command,
192 |                 workdir=workdir or "/workspace",
193 |                 stream=True
194 |             )
195 |             
196 |             if stream:
197 |                 output = []
198 |                 for line in exec_result.output:
199 |                     decoded_line = line.decode().strip()
200 |                     output.append(decoded_line)
201 |                     yield decoded_line
202 |                     
203 |                 return {
204 |                     "exit_code": exec_result.exit_code,
205 |                     "output": output
206 |                 }
207 |             else:
208 |                 output = []
209 |                 for line in exec_result.output:
210 |                     output.append(line.decode().strip())
211 |                     
212 |                 return {
213 |                     "exit_code": exec_result.exit_code,
214 |                     "output": output
215 |                 }
216 |                 
217 |         except Exception as e:
218 |             logger.error(f"Command execution failed: {str(e)}")
219 |             raise MCPDevServerError(f"Command execution failed: {str(e)}")
220 |             
221 |     async def cleanup(self):
222 |         """Clean up Docker resources."""
223 |         try:
224 |             for env_id in list(self.active_containers.keys()):
225 |                 await self.destroy_environment(env_id)
226 |                 
227 |         except Exception as e:
228 |             logger.error(f"Docker cleanup failed: {str(e)}")
229 |             raise MCPDevServerError(f"Docker cleanup failed: {str(e)}")
230 |             
231 |     def get_logs(self, env_id: str, tail: Optional[int] = None) -> str:
232 |         """Get container logs.
233 |         
234 |         Args:
235 |             env_id: Environment ID
236 |             tail: Number of lines to return from the end
237 |             
238 |         Returns:
239 |             str: Container logs
240 |         """
241 |         try:
242 |             if env_id not in self.active_containers:
243 |                 raise MCPDevServerError(f"Environment not found: {env_id}")
244 |                 
245 |             container = self.active_containers[env_id]["container"]
246 |             return container.logs(tail=tail).decode()
247 |             
248 |         except Exception as e:
249 |             logger.error(f"Failed to get logs: {str(e)}")
250 |             raise MCPDevServerError(f"Log retrieval failed: {str(e)}")
251 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/project.py:
--------------------------------------------------------------------------------

```python
  1 | """Project representation and management."""
  2 | import uuid
  3 | from typing import Dict, Any, Optional, List
  4 | from pathlib import Path
  5 | import git
  6 | from pydantic import BaseModel
  7 | 
  8 | class ProjectConfig(BaseModel):
  9 |     """Project configuration model."""
 10 |     
 11 |     name: str
 12 |     template: str
 13 |     description: str = ""
 14 |     version: str = "0.1.0"
 15 |     
 16 | class ProjectState:
 17 |     """Project state tracking."""
 18 |     
 19 |     def __init__(self):
 20 |         """Initialize project state."""
 21 |         self.git_initialized: bool = False
 22 |         self.last_build: Optional[Dict[str, Any]] = None
 23 |         self.last_test_run: Optional[Dict[str, Any]] = None
 24 |         self.active_environments: List[str] = []
 25 |         
 26 | class Project:
 27 |     """Project instance representation."""
 28 |     
 29 |     def __init__(self, path: str, config: ProjectConfig, state: ProjectState):
 30 |         """Initialize project instance.
 31 |         
 32 |         Args:
 33 |             path: Project directory path
 34 |             config: Project configuration
 35 |             state: Project state
 36 |         """
 37 |         self.id = str(uuid.uuid4())
 38 |         self.path = path
 39 |         self.config = config
 40 |         self.state = state
 41 |         
 42 |     def get_structure(self) -> Dict[str, Any]:
 43 |         """Get project directory structure.
 44 |         
 45 |         Returns:
 46 |             Dict[str, Any]: Directory structure
 47 |         """
 48 |         def scan_dir(path: Path) -> Dict[str, Any]:
 49 |             structure = {}
 50 |             
 51 |             for item in path.iterdir():
 52 |                 if item.name.startswith("."):
 53 |                     continue
 54 |                     
 55 |                 if item.is_file():
 56 |                     structure[item.name] = "file"
 57 |                 elif item.is_dir():
 58 |                     structure[item.name] = scan_dir(item)
 59 |                     
 60 |             return structure
 61 |             
 62 |         return scan_dir(Path(self.path))
 63 |         
 64 |     def get_git_status(self) -> Dict[str, Any]:
 65 |         """Get Git repository status.
 66 |         
 67 |         Returns:
 68 |             Dict[str, Any]: Git status information
 69 |         """
 70 |         if not self.state.git_initialized:
 71 |             return {"initialized": False}
 72 |             
 73 |         try:
 74 |             repo = git.Repo(self.path)
 75 |             return {
 76 |                 "initialized": True,
 77 |                 "branch": repo.active_branch.name,
 78 |                 "changed_files": [item.a_path for item in repo.index.diff(None)],
 79 |                 "untracked_files": repo.untracked_files,
 80 |                 "ahead": sum(1 for c in repo.iter_commits("origin/main..main")),
 81 |                 "behind": sum(1 for c in repo.iter_commits("main..origin/main"))
 82 |             }
 83 |         except Exception as e:
 84 |             return {
 85 |                 "initialized": False,
 86 |                 "error": str(e)
 87 |             }
 88 |             
 89 |     async def create_git_commit(self, message: str, files: Optional[List[str]] = None) -> Dict[str, Any]:
 90 |         """Create a Git commit.
 91 |         
 92 |         Args:
 93 |             message: Commit message
 94 |             files: Optional list of files to commit
 95 |             
 96 |         Returns:
 97 |             Dict[str, Any]: Commit information
 98 |         """
 99 |         if not self.state.git_initialized:
100 |             raise ValueError("Git is not initialized for this project")
101 |             
102 |         try:
103 |             repo = git.Repo(self.path)
104 |             
105 |             if files:
106 |                 repo.index.add(files)
107 |             else:
108 |                 repo.index.add("*")
109 |                 
110 |             commit = repo.index.commit(message)
111 |             
112 |             return {
113 |                 "commit_id": commit.hexsha,
114 |                 "message": message,
115 |                 "author": str(commit.author),
116 |                 "files": [item.a_path for item in commit.stats.files]
117 |             }
118 |         except Exception as e:
119 |             raise ValueError(f"Failed to create commit: {str(e)}")
120 |             
121 |     def get_dependencies(self) -> Dict[str, Any]:
122 |         """Get project dependencies.
123 |         
124 |         Returns:
125 |             Dict[str, Any]: Dependency information
126 |         """
127 |         dependencies = {}
128 |         
129 |         # Check Python dependencies
130 |         req_file = Path(self.path) / "requirements.txt"
131 |         if req_file.exists():
132 |             with open(req_file, "r") as f:
133 |                 dependencies["python"] = f.read().splitlines()
134 |                 
135 |         # Check Node.js dependencies
136 |         package_file = Path(self.path) / "package.json"
137 |         if package_file.exists():
138 |             import json
139 |             with open(package_file, "r") as f:
140 |                 package_data = json.load(f)
141 |                 dependencies["node"] = {
142 |                     "dependencies": package_data.get("dependencies", {}),
143 |                     "devDependencies": package_data.get("devDependencies", {})
144 |                 }
145 |                 
146 |         return dependencies
147 |         
148 |     def analyze_code(self) -> Dict[str, Any]:
149 |         """Analyze project code.
150 |         
151 |         Returns:
152 |             Dict[str, Any]: Code analysis results
153 |         """
154 |         analysis = {
155 |             "files": {},
156 |             "summary": {
157 |                 "total_files": 0,
158 |                 "total_lines": 0,
159 |                 "code_lines": 0,
160 |                 "comment_lines": 0,
161 |                 "blank_lines": 0
162 |             }
163 |         }
164 |         
165 |         def analyze_file(path: Path) -> Dict[str, Any]:
166 |             with open(path, "r", encoding="utf-8") as f:
167 |                 lines = f.readlines()
168 |                 
169 |             total_lines = len(lines)
170 |             blank_lines = sum(1 for line in lines if not line.strip())
171 |             comment_lines = sum(1 for line in lines if line.strip().startswith("#"))
172 |             code_lines = total_lines - blank_lines - comment_lines
173 |             
174 |             return {
175 |                 "total_lines": total_lines,
176 |                 "code_lines": code_lines,
177 |                 "comment_lines": comment_lines,
178 |                 "blank_lines": blank_lines
179 |             }
180 |             
181 |         for root, _, files in os.walk(self.path):
182 |             for file in files:
183 |                 if file.endswith(".py"):
184 |                     file_path = Path(root) / file
185 |                     try:
186 |                         file_analysis = analyze_file(file_path)
187 |                         relative_path = str(file_path.relative_to(self.path))
188 |                         analysis["files"][relative_path] = file_analysis
189 |                         
190 |                         # Update summary
191 |                         for key in ["total_lines", "code_lines", "comment_lines", "blank_lines"]:
192 |                             analysis["summary"][key] += file_analysis[key]
193 |                             
194 |                         analysis["summary"]["total_files"] += 1
195 |                     except Exception:
196 |                         continue
197 |                         
198 |         return analysis
199 |         
200 |     def get_test_coverage(self) -> Dict[str, Any]:
201 |         """Get test coverage information.
202 |         
203 |         Returns:
204 |             Dict[str, Any]: Test coverage data
205 |         """
206 |         try:
207 |             import coverage
208 |             
209 |             cov = coverage.Coverage()
210 |             cov.load()
211 |             
212 |             return {
213 |                 "total_coverage": cov.report(),
214 |                 "missing_lines": dict(cov.analysis2()),
215 |                 "branch_coverage": cov.get_option("branch"),
216 |                 "excluded_lines": cov.get_exclude_list()
217 |             }
218 |         except Exception:
219 |             return {
220 |                 "error": "Coverage data not available"
221 |             }
222 |             
223 |     def get_ci_config(self) -> Dict[str, Any]:
224 |         """Get CI configuration.
225 |         
226 |         Returns:
227 |             Dict[str, Any]: CI configuration data
228 |         """
229 |         ci_configs = {}
230 |         
231 |         # Check GitHub Actions
232 |         github_dir = Path(self.path) / ".github" / "workflows"
233 |         if github_dir.exists():
234 |             ci_configs["github_actions"] = []
235 |             for workflow in github_dir.glob("*.yml"):
236 |                 with open(workflow, "r") as f:
237 |                     ci_configs["github_actions"].append({
238 |                         "name": workflow.stem,
239 |                         "config": f.read()
240 |                     })
241 |                     
242 |         # Check GitLab CI
243 |         gitlab_file = Path(self.path) / ".gitlab-ci.yml"
244 |         if gitlab_file.exists():
245 |             with open(gitlab_file, "r") as f:
246 |                 ci_configs["gitlab"] = f.read()
247 |                 
248 |         return ci_configs
249 |         
250 |     async def cleanup(self):
251 |         """Clean up project resources."""
252 |         # Implementation will depend on what resources need cleanup
253 |         pass
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/core/server.py:
--------------------------------------------------------------------------------

```python
  1 | import asyncio
  2 | import json
  3 | import websockets
  4 | from typing import Callable, Any, Dict, Optional
  5 | import logging
  6 | import traceback
  7 | 
  8 | logger = logging.getLogger(__name__)
  9 | 
 10 | class Server:
 11 |     """Core server class implementing JSON-RPC 2.0 protocol."""
 12 |     
 13 |     def __init__(self, name: str):
 14 |         """Initialize the server.
 15 |         
 16 |         Args:
 17 |             name: Server name
 18 |         """
 19 |         self.name = name
 20 |         self.websocket = None
 21 |         self.input_request_handlers = {}
 22 |         self.input_response_handlers = {}
 23 |         self.initialized = False
 24 |         self.capabilities = {}
 25 |         
 26 |     async def start(self, host: str = "localhost", port: int = 8000):
 27 |         """Start the WebSocket server.
 28 |         
 29 |         Args:
 30 |             host: Host to bind to
 31 |             port: Port to listen on
 32 |         """
 33 |         async def handler(websocket, path):
 34 |             self.websocket = websocket
 35 |             try:
 36 |                 logger.info(f"New WebSocket connection from {websocket.remote_address}")
 37 |                 async for message in websocket:
 38 |                     response = None
 39 |                     try:
 40 |                         # Parse JSON-RPC message
 41 |                         data = json.loads(message)
 42 |                         if not isinstance(data, dict):
 43 |                             raise ValueError("Invalid JSON-RPC message")
 44 |                             
 45 |                         # Handle message
 46 |                         response = await self.handle_jsonrpc(data)
 47 |                         
 48 |                     except json.JSONDecodeError as e:
 49 |                         logger.error(f"JSON decode error: {str(e)}")
 50 |                         response = {
 51 |                             "jsonrpc": "2.0",
 52 |                             "error": {
 53 |                                 "code": -32700,
 54 |                                 "message": "Parse error",
 55 |                                 "data": str(e)
 56 |                             },
 57 |                             "id": None
 58 |                         }
 59 |                         
 60 |                     except Exception as e:
 61 |                         logger.error(f"Error handling message: {str(e)}", exc_info=True)
 62 |                         response = {
 63 |                             "jsonrpc": "2.0",
 64 |                             "error": {
 65 |                                 "code": -32603,
 66 |                                 "message": "Internal error",
 67 |                                 "data": {
 68 |                                     "error": str(e),
 69 |                                     "traceback": traceback.format_exc()
 70 |                                 }
 71 |                             },
 72 |                             "id": getattr(data, "id", None) if isinstance(data, dict) else None
 73 |                         }
 74 | 
 75 |                     # Ensure we always send a properly formatted JSON-RPC response
 76 |                     if response:
 77 |                         try:
 78 |                             if not isinstance(response, dict):
 79 |                                 response = {"result": response}
 80 |                             
 81 |                             response["jsonrpc"] = "2.0"
 82 |                             if isinstance(data, dict) and "id" in data:
 83 |                                 response["id"] = data["id"]
 84 |                                 
 85 |                             # Validate JSON before sending
 86 |                             response_str = json.dumps(response)
 87 |                             await websocket.send(response_str)
 88 |                             
 89 |                         except Exception as e:
 90 |                             logger.error(f"Error sending response: {str(e)}", exc_info=True)
 91 |                             error_response = {
 92 |                                 "jsonrpc": "2.0",
 93 |                                 "error": {
 94 |                                     "code": -32603,
 95 |                                     "message": "Error sending response",
 96 |                                     "data": str(e)
 97 |                                 },
 98 |                                 "id": data.get("id") if isinstance(data, dict) else None
 99 |                             }
100 |                             await websocket.send(json.dumps(error_response))
101 |                         
102 |             except websockets.exceptions.ConnectionClosed:
103 |                 logger.info("WebSocket connection closed")
104 |             finally:
105 |                 self.websocket = None
106 |                 
107 |         try:
108 |             self.server = await websockets.serve(
109 |                 handler,
110 |                 host,
111 |                 port,
112 |                 ping_interval=20,
113 |                 ping_timeout=20
114 |             )
115 |             logger.info(f"Server started on ws://{host}:{port}")
116 |         except Exception as e:
117 |             logger.error(f"Failed to start server: {str(e)}", exc_info=True)
118 |             raise
119 |         
120 |     async def handle_jsonrpc(self, data: Dict) -> Optional[Dict]:
121 |         """Handle JSON-RPC message.
122 |         
123 |         Args:
124 |             data: Parsed JSON-RPC message
125 |             
126 |         Returns:
127 |             Optional response message
128 |         """
129 |         try:
130 |             method = data.get("method")
131 |             params = data.get("params", {})
132 |             
133 |             logger.info(f"Handling method: {method} with params: {params}")
134 |             
135 |             if method == "initialize":
136 |                 self.capabilities = params.get("capabilities", {})
137 |                 self.initialized = True
138 |                 return {
139 |                     "result": {
140 |                         "capabilities": self.capabilities
141 |                     }
142 |                 }
143 |                 
144 |             if not self.initialized:
145 |                 return {
146 |                     "error": {
147 |                         "code": -32002,
148 |                         "message": "Server not initialized"
149 |                     }
150 |                 }
151 |                 
152 |             if method == "input/request":
153 |                 handler = self.input_request_handlers.get("input_request")
154 |                 if handler:
155 |                     try:
156 |                         result = await handler(
157 |                             params.get("type", ""),
158 |                             params.get("context", {})
159 |                         )
160 |                         return {"result": result}
161 |                     except Exception as e:
162 |                         logger.error(f"Error in input request handler: {str(e)}", exc_info=True)
163 |                         return {
164 |                             "error": {
165 |                                 "code": -32000,
166 |                                 "message": str(e),
167 |                                 "data": {
168 |                                     "traceback": traceback.format_exc()
169 |                                 }
170 |                             }
171 |                         }
172 |                         
173 |             elif method == "input/response":
174 |                 handler = self.input_response_handlers.get("input_response")
175 |                 if handler:
176 |                     try:
177 |                         await handler(params)
178 |                         return {"result": None}
179 |                     except Exception as e:
180 |                         logger.error(f"Error in input response handler: {str(e)}", exc_info=True)
181 |                         return {
182 |                             "error": {
183 |                                 "code": -32000,
184 |                                 "message": str(e),
185 |                                 "data": {
186 |                                     "traceback": traceback.format_exc()
187 |                                 }
188 |                             }
189 |                         }
190 |                         
191 |             return {
192 |                 "error": {
193 |                     "code": -32601,
194 |                     "message": f"Method not found: {method}"
195 |                 }
196 |             }
197 |             
198 |         except Exception as e:
199 |             logger.error(f"Error in handle_jsonrpc: {str(e)}", exc_info=True)
200 |             return {
201 |                 "error": {
202 |                     "code": -32603,
203 |                     "message": "Internal error",
204 |                     "data": {
205 |                         "error": str(e),
206 |                         "traceback": traceback.format_exc()
207 |                     }
208 |                 }
209 |             }
210 |                 
211 |     def request_input(self) -> Callable:
212 |         """Decorator for input request handlers."""
213 |         def decorator(func: Callable) -> Callable:
214 |             self.input_request_handlers["input_request"] = func
215 |             return func
216 |         return decorator
217 |         
218 |     def handle_input(self) -> Callable:
219 |         """Decorator for input response handlers."""
220 |         def decorator(func: Callable) -> Callable:
221 |             self.input_response_handlers["input_response"] = func
222 |             return func
223 |         return decorator
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/base_project.py:
--------------------------------------------------------------------------------

```python
  1 | """Base project class definition."""
  2 | import os
  3 | import uuid
  4 | import xml.etree.ElementTree as ET
  5 | import json
  6 | import tomli
  7 | from pathlib import Path
  8 | from typing import Dict, Any, Optional, List
  9 | import git
 10 | 
 11 | from .project_types import ProjectType, BuildSystem
 12 | from ..utils.errors import ProjectError
 13 | from ..utils.logging import setup_logging
 14 | 
 15 | logger = setup_logging(__name__)
 16 | 
 17 | class Project:
 18 |     """Base project class."""
 19 |     
 20 |     def __init__(self, path: str, config: Dict[str, Any], project_type: ProjectType):
 21 |         """Initialize project instance."""
 22 |         self.id = str(uuid.uuid4())
 23 |         self.path = path
 24 |         self.config = config
 25 |         self.project_type = project_type
 26 |         self.build_system = BuildSystem(config["build_system"])
 27 |         
 28 |     def get_dependencies(self) -> Dict[str, Any]:
 29 |         """Get project dependencies."""
 30 |         if self.build_system == BuildSystem.MAVEN:
 31 |             return self._get_maven_dependencies()
 32 |         elif self.build_system == BuildSystem.GRADLE:
 33 |             return self._get_gradle_dependencies()
 34 |         elif self.build_system in [BuildSystem.NPM, BuildSystem.YARN]:
 35 |             return self._get_node_dependencies()
 36 |         elif self.build_system == BuildSystem.POETRY:
 37 |             return self._get_poetry_dependencies()
 38 |         elif self.build_system == BuildSystem.DOTNET:
 39 |             return self._get_dotnet_dependencies()
 40 |         elif self.build_system == BuildSystem.GO:
 41 |             return self._get_go_dependencies()
 42 |         else:
 43 |             return {}
 44 | 
 45 |     def _get_maven_dependencies(self) -> Dict[str, Any]:
 46 |         """Get Maven project dependencies."""
 47 |         pom_path = Path(self.path) / "pom.xml"
 48 |         if not pom_path.exists():
 49 |             return {}
 50 | 
 51 |         try:
 52 |             tree = ET.parse(pom_path)
 53 |             root = tree.getroot()
 54 |             ns = {'maven': 'http://maven.apache.org/POM/4.0.0'}
 55 |             
 56 |             dependencies = []
 57 |             for dep in root.findall('.//maven:dependency', ns):
 58 |                 dependencies.append({
 59 |                     'groupId': dep.find('maven:groupId', ns).text,
 60 |                     'artifactId': dep.find('maven:artifactId', ns).text,
 61 |                     'version': dep.find('maven:version', ns).text if dep.find('maven:version', ns) is not None else None,
 62 |                     'scope': dep.find('maven:scope', ns).text if dep.find('maven:scope', ns) is not None else 'compile'
 63 |                 })
 64 |                 
 65 |             return {'maven': dependencies}
 66 |         except Exception as e:
 67 |             logger.error(f"Error parsing Maven dependencies: {e}")
 68 |             return {}
 69 | 
 70 |     def _get_node_dependencies(self) -> Dict[str, Any]:
 71 |         """Get Node.js project dependencies."""
 72 |         package_path = Path(self.path) / "package.json"
 73 |         if not package_path.exists():
 74 |             return {}
 75 | 
 76 |         try:
 77 |             with open(package_path) as f:
 78 |                 package_data = json.load(f)
 79 |                 return {
 80 |                     'dependencies': package_data.get('dependencies', {}),
 81 |                     'devDependencies': package_data.get('devDependencies', {})
 82 |                 }
 83 |         except Exception as e:
 84 |             logger.error(f"Error parsing Node.js dependencies: {e}")
 85 |             return {}
 86 | 
 87 |     def _get_poetry_dependencies(self) -> Dict[str, Any]:
 88 |         """Get Poetry project dependencies."""
 89 |         pyproject_path = Path(self.path) / "pyproject.toml"
 90 |         if not pyproject_path.exists():
 91 |             return {}
 92 | 
 93 |         try:
 94 |             with open(pyproject_path, "rb") as f:
 95 |                 pyproject_data = tomli.load(f)
 96 |                 tool_poetry = pyproject_data.get('tool', {}).get('poetry', {})
 97 |                 return {
 98 |                     'dependencies': tool_poetry.get('dependencies', {}),
 99 |                     'dev-dependencies': tool_poetry.get('dev-dependencies', {})
100 |                 }
101 |         except Exception as e:
102 |             logger.error(f"Error parsing Poetry dependencies: {e}")
103 |             return {}
104 | 
105 |     def _get_dotnet_dependencies(self) -> Dict[str, Any]:
106 |         """Get .NET project dependencies."""
107 |         try:
108 |             # Find all .csproj files
109 |             csproj_files = list(Path(self.path).glob("**/*.csproj"))
110 |             dependencies = {}
111 |             
112 |             for csproj in csproj_files:
113 |                 tree = ET.parse(csproj)
114 |                 root = tree.getroot()
115 |                 project_deps = []
116 |                 
117 |                 for item_group in root.findall('.//PackageReference'):
118 |                     project_deps.append({
119 |                         'Include': item_group.get('Include'),
120 |                         'Version': item_group.get('Version')
121 |                     })
122 |                     
123 |                 dependencies[csproj.stem] = project_deps
124 |                 
125 |             return dependencies
126 |         except Exception as e:
127 |             logger.error(f"Error parsing .NET dependencies: {e}")
128 |             return {}
129 | 
130 |     def _get_go_dependencies(self) -> Dict[str, Any]:
131 |         """Get Go project dependencies."""
132 |         go_mod_path = Path(self.path) / "go.mod"
133 |         if not go_mod_path.exists():
134 |             return {}
135 | 
136 |         try:
137 |             result = subprocess.run(
138 |                 ['go', 'list', '-m', 'all'],
139 |                 capture_output=True,
140 |                 text=True,
141 |                 cwd=self.path
142 |             )
143 |             if result.returncode == 0:
144 |                 dependencies = []
145 |                 for line in result.stdout.splitlines()[1:]:  # Skip first line (module name)
146 |                     parts = line.split()
147 |                     if len(parts) >= 2:
148 |                         dependencies.append({
149 |                             'module': parts[0],
150 |                             'version': parts[1]
151 |                         })
152 |                 return {'modules': dependencies}
153 |         except Exception as e:
154 |             logger.error(f"Error parsing Go dependencies: {e}")
155 |             return {}
156 | 
157 |     async def update_dependencies(self, options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
158 |         """Update project dependencies."""
159 |         if self.build_system == BuildSystem.MAVEN:
160 |             cmd = "mvn versions:use-latest-versions"
161 |         elif self.build_system == BuildSystem.GRADLE:
162 |             cmd = "./gradlew dependencyUpdates"
163 |         elif self.build_system == BuildSystem.NPM:
164 |             cmd = "npm update"
165 |         elif self.build_system == BuildSystem.YARN:
166 |             cmd = "yarn upgrade"
167 |         elif self.build_system == BuildSystem.POETRY:
168 |             cmd = "poetry update"
169 |         elif self.build_system == BuildSystem.DOTNET:
170 |             cmd = "dotnet restore"
171 |         else:
172 |             raise ProjectError(f"Dependency updates not supported for {self.build_system}")
173 |             
174 |         return await self.execute_command(cmd)
175 | 
176 |     async def get_project_analysis(self) -> Dict[str, Any]:
177 |         """Get project analysis results."""
178 |         analysis = {
179 |             "structure": self.get_structure(),
180 |             "dependencies": self.get_dependencies(),
181 |             "metadata": {
182 |                 "name": self.config["name"],
183 |                 "type": self.project_type.name,
184 |                 "build_system": self.build_system.value,
185 |                 "config": self.config
186 |             }
187 |         }
188 | 
189 |         # Add Git information if available
190 |         git_info = self.get_git_status()
191 |         if git_info.get("initialized", False):
192 |             analysis["git"] = git_info
193 | 
194 |         # Add build/test status if available
195 |         if hasattr(self, 'last_build'):
196 |             analysis["last_build"] = self.last_build
197 |         if hasattr(self, 'last_test_run'):
198 |             analysis["last_test_run"] = self.last_test_run
199 | 
200 |         return analysis
201 | 
202 |     def get_structure(self) -> Dict[str, Any]:
203 |         """Get project structure."""
204 |         def scan_dir(path: Path) -> Dict[str, Any]:
205 |             structure = {}
206 |             ignore_patterns = ['.git', '__pycache__', 'node_modules', 'target', 'build']
207 |             
208 |             for item in path.iterdir():
209 |                 if item.name in ignore_patterns:
210 |                     continue
211 |                     
212 |                 if item.is_file():
213 |                     structure[item.name] = {
214 |                         "type": "file",
215 |                         "size": item.stat().st_size
216 |                     }
217 |                 elif item.is_dir():
218 |                     structure[item.name] = {
219 |                         "type": "directory",
220 |                         "contents": scan_dir(item)
221 |                     }
222 |                     
223 |             return structure
224 |             
225 |         return scan_dir(Path(self.path))
226 | 
227 |     async def cleanup(self):
228 |         """Clean up project resources."""
229 |         try:
230 |             # Clean build artifacts
231 |             if self.build_system == BuildSystem.MAVEN:
232 |                 await self.execute_command("mvn clean")
233 |             elif self.build_system == BuildSystem.GRADLE:
234 |                 await self.execute_command("./gradlew clean")
235 |             elif self.build_system == BuildSystem.NPM:
236 |                 await self.execute_command("npm run clean")
237 | 
238 |             logger.info(f"Cleaned up project: {self.config['name']}")
239 |         except Exception as e:
240 |             logger.error(f"Project cleanup failed: {e}")
241 |             raise ProjectError(f"Cleanup failed: {str(e)}")
242 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/environments/workflow.py:
--------------------------------------------------------------------------------

```python
  1 | """Development workflow management for environments."""
  2 | from typing import Dict, List, Optional, Any, Callable
  3 | from enum import Enum
  4 | import asyncio
  5 | 
  6 | from ..utils.logging import setup_logging
  7 | from ..utils.errors import WorkflowError
  8 | 
  9 | logger = setup_logging(__name__)
 10 | 
 11 | class TaskStatus(str, Enum):
 12 |     """Workflow task status."""
 13 |     PENDING = "pending"
 14 |     RUNNING = "running"
 15 |     COMPLETED = "completed"
 16 |     FAILED = "failed"
 17 |     SKIPPED = "skipped"
 18 | 
 19 | class Task:
 20 |     """Represents a workflow task."""
 21 |     
 22 |     def __init__(
 23 |         self,
 24 |         name: str,
 25 |         command: str,
 26 |         environment: str,
 27 |         dependencies: Optional[List[str]] = None,
 28 |         timeout: Optional[int] = None,
 29 |         retry_count: int = 0,
 30 |         on_success: Optional[Callable] = None,
 31 |         on_failure: Optional[Callable] = None
 32 |     ):
 33 |         self.name = name
 34 |         self.command = command
 35 |         self.environment = environment
 36 |         self.dependencies = dependencies or []
 37 |         self.timeout = timeout
 38 |         self.retry_count = retry_count
 39 |         self.status = TaskStatus.PENDING
 40 |         self.result: Optional[Dict[str, Any]] = None
 41 |         self.on_success = on_success
 42 |         self.on_failure = on_failure
 43 |         self.attempts = 0
 44 | 
 45 | class Workflow:
 46 |     """Manages development workflows."""
 47 |     
 48 |     def __init__(self, env_manager):
 49 |         self.env_manager = env_manager
 50 |         self.tasks: Dict[str, Task] = {}
 51 |         self.running = False
 52 |         
 53 |     def add_task(self, task: Task) -> None:
 54 |         """Add a task to the workflow."""
 55 |         self.tasks[task.name] = task
 56 |         
 57 |     def remove_task(self, task_name: str) -> None:
 58 |         """Remove a task from the workflow."""
 59 |         if task_name in self.tasks:
 60 |             del self.tasks[task_name]
 61 |             
 62 |     async def execute(self) -> Dict[str, Any]:
 63 |         """Execute the workflow."""
 64 |         try:
 65 |             self.running = True
 66 |             results = {}
 67 |             
 68 |             # Build dependency graph
 69 |             graph = self._build_dependency_graph()
 70 |             
 71 |             # Execute tasks in order
 72 |             for task_group in graph:
 73 |                 # Execute tasks in group concurrently
 74 |                 tasks = [self._execute_task(task_name) for task_name in task_group]
 75 |                 group_results = await asyncio.gather(*tasks, return_exceptions=True)
 76 |                 
 77 |                 # Process results
 78 |                 for task_name, result in zip(task_group, group_results):
 79 |                     if isinstance(result, Exception):
 80 |                         self.tasks[task_name].status = TaskStatus.FAILED
 81 |                         results[task_name] = {
 82 |                             "status": TaskStatus.FAILED,
 83 |                             "error": str(result)
 84 |                         }
 85 |                     else:
 86 |                         results[task_name] = result
 87 |                 
 88 |             return results
 89 |             
 90 |         except Exception as e:
 91 |             raise WorkflowError(f"Workflow execution failed: {str(e)}")
 92 |         finally:
 93 |             self.running = False
 94 |             
 95 |     async def _execute_task(self, task_name: str) -> Dict[str, Any]:
 96 |         """Execute a single task."""
 97 |         task = self.tasks[task_name]
 98 |         
 99 |         # Check dependencies
100 |         for dep in task.dependencies:
101 |             dep_task = self.tasks.get(dep)
102 |             if not dep_task or dep_task.status != TaskStatus.COMPLETED:
103 |                 task.status = TaskStatus.SKIPPED
104 |                 return {
105 |                     "status": TaskStatus.SKIPPED,
106 |                     "reason": f"Dependency {dep} not satisfied"
107 |                 }
108 |         
109 |         task.status = TaskStatus.RUNNING
110 |         task.attempts += 1
111 |         
112 |         try:
113 |             # Execute the command
114 |             result = await asyncio.wait_for(
115 |                 self.env_manager.execute_in_environment(
116 |                     task.environment,
117 |                     task.command
118 |                 ),
119 |                 timeout=task.timeout
120 |             )
121 |             
122 |             # Handle execution result
123 |             if result['exit_code'] == 0:
124 |                 task.status = TaskStatus.COMPLETED
125 |                 if task.on_success:
126 |                     await task.on_success(result)
127 |                 return {
128 |                     "status": TaskStatus.COMPLETED,
129 |                     "result": result
130 |                 }
131 |             else:
132 |                 # Handle retry logic
133 |                 if task.attempts < task.retry_count + 1:
134 |                     logger.info(f"Retrying task {task_name} (attempt {task.attempts})")
135 |                     return await self._execute_task(task_name)
136 |                 
137 |                 task.status = TaskStatus.FAILED
138 |                 if task.on_failure:
139 |                     await task.on_failure(result)
140 |                 return {
141 |                     "status": TaskStatus.FAILED,
142 |                     "result": result
143 |                 }
144 |                 
145 |         except asyncio.TimeoutError:
146 |             task.status = TaskStatus.FAILED
147 |             return {
148 |                 "status": TaskStatus.FAILED,
149 |                 "error": "Task timeout"
150 |             }
151 |             
152 |         except Exception as e:
153 |             task.status = TaskStatus.FAILED
154 |             return {
155 |                 "status": TaskStatus.FAILED,
156 |                 "error": str(e)
157 |             }
158 |             
159 |     def _build_dependency_graph(self) -> List[List[str]]:
160 |         """Build ordered list of task groups based on dependencies."""
161 |         # Initialize variables
162 |         graph: List[List[str]] = []
163 |         completed = set()
164 |         remaining = set(self.tasks.keys())
165 |         
166 |         while remaining:
167 |             # Find tasks with satisfied dependencies
168 |             group = set()
169 |             for task_name in remaining:
170 |                 task = self.tasks[task_name]
171 |                 if all(dep in completed for dep in task.dependencies):
172 |                     group.add(task_name)
173 |             
174 |             if not group:
175 |                 # Circular dependency detected
176 |                 raise WorkflowError("Circular dependency detected in workflow")
177 |             
178 |             # Add group to graph
179 |             graph.append(list(group))
180 |             completed.update(group)
181 |             remaining.difference_update(group)
182 |             
183 |         return graph
184 |         
185 |     def get_status(self) -> Dict[str, Any]:
186 |         """Get workflow status."""
187 |         return {
188 |             "running": self.running,
189 |             "tasks": {
190 |                 name: {
191 |                     "status": task.status,
192 |                     "attempts": task.attempts,
193 |                     "dependencies": task.dependencies
194 |                 }
195 |                 for name, task in self.tasks.items()
196 |             }
197 |         }
198 |         
199 |     def reset(self) -> None:
200 |         """Reset workflow state."""
201 |         for task in self.tasks.values():
202 |             task.status = TaskStatus.PENDING
203 |             task.attempts = 0
204 |             task.result = None
205 |         self.running = False
206 | 
207 | # Example workflow definitions for common development tasks
208 | class CommonWorkflows:
209 |     """Predefined development workflows."""
210 |     
211 |     @staticmethod
212 |     def create_build_workflow(env_manager, environment: str) -> Workflow:
213 |         """Create a standard build workflow."""
214 |         workflow = Workflow(env_manager)
215 |         
216 |         # Install dependencies
217 |         workflow.add_task(Task(
218 |             name="install_deps",
219 |             command="npm install",
220 |             environment=environment,
221 |             retry_count=2
222 |         ))
223 |         
224 |         # Run linter
225 |         workflow.add_task(Task(
226 |             name="lint",
227 |             command="npm run lint",
228 |             environment=environment,
229 |             dependencies=["install_deps"]
230 |         ))
231 |         
232 |         # Run tests
233 |         workflow.add_task(Task(
234 |             name="test",
235 |             command="npm run test",
236 |             environment=environment,
237 |             dependencies=["install_deps"]
238 |         ))
239 |         
240 |         # Build
241 |         workflow.add_task(Task(
242 |             name="build",
243 |             command="npm run build",
244 |             environment=environment,
245 |             dependencies=["lint", "test"]
246 |         ))
247 |         
248 |         return workflow
249 |         
250 |     @staticmethod
251 |     def create_test_workflow(env_manager, environment: str) -> Workflow:
252 |         """Create a standard test workflow."""
253 |         workflow = Workflow(env_manager)
254 |         
255 |         # Install test dependencies
256 |         workflow.add_task(Task(
257 |             name="install_test_deps",
258 |             command="npm install --only=dev",
259 |             environment=environment,
260 |             retry_count=2
261 |         ))
262 |         
263 |         # Run unit tests
264 |         workflow.add_task(Task(
265 |             name="unit_tests",
266 |             command="npm run test:unit",
267 |             environment=environment,
268 |             dependencies=["install_test_deps"]
269 |         ))
270 |         
271 |         # Run integration tests
272 |         workflow.add_task(Task(
273 |             name="integration_tests",
274 |             command="npm run test:integration",
275 |             environment=environment,
276 |             dependencies=["install_test_deps"]
277 |         ))
278 |         
279 |         # Generate coverage report
280 |         workflow.add_task(Task(
281 |             name="coverage",
282 |             command="npm run coverage",
283 |             environment=environment,
284 |             dependencies=["unit_tests", "integration_tests"]
285 |         ))
286 |         
287 |         return workflow
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/environments/tools.py:
--------------------------------------------------------------------------------

```python
  1 | """Development tools integration for environments."""
  2 | import shutil
  3 | import subprocess
  4 | from typing import Dict, Optional, Any
  5 | from pathlib import Path
  6 | 
  7 | from ..utils.logging import setup_logging
  8 | from ..utils.errors import ToolError
  9 | 
 10 | logger = setup_logging(__name__)
 11 | 
 12 | class ToolManager:
 13 |     """Manages development tools in environments."""
 14 |     
 15 |     def __init__(self, env_manager):
 16 |         self.env_manager = env_manager
 17 |         
 18 |     async def setup_package_manager(
 19 |         self,
 20 |         environment: str,
 21 |         package_manager: str,
 22 |         config: Optional[Dict[str, Any]] = None
 23 |     ) -> Dict[str, Any]:
 24 |         """Set up package manager in an environment."""
 25 |         try:
 26 |             config = config or {}
 27 |             
 28 |             if package_manager == "npm":
 29 |                 return await self._setup_npm(environment, config)
 30 |             elif package_manager == "pip":
 31 |                 return await self._setup_pip(environment, config)
 32 |             else:
 33 |                 raise ToolError(f"Unsupported package manager: {package_manager}")
 34 |                 
 35 |         except Exception as e:
 36 |             raise ToolError(f"Failed to setup package manager: {str(e)}")
 37 |             
 38 |     async def setup_build_tool(
 39 |         self,
 40 |         environment: str,
 41 |         build_tool: str,
 42 |         config: Optional[Dict[str, Any]] = None
 43 |     ) -> Dict[str, Any]:
 44 |         """Set up build tool in an environment."""
 45 |         try:
 46 |             config = config or {}
 47 |             
 48 |             if build_tool == "webpack":
 49 |                 return await self._setup_webpack(environment, config)
 50 |             elif build_tool == "vite":
 51 |                 return await self._setup_vite(environment, config)
 52 |             else:
 53 |                 raise ToolError(f"Unsupported build tool: {build_tool}")
 54 |                 
 55 |         except Exception as e:
 56 |             raise ToolError(f"Failed to setup build tool: {str(e)}")
 57 |             
 58 |     async def setup_test_framework(
 59 |         self,
 60 |         environment: str,
 61 |         test_framework: str,
 62 |         config: Optional[Dict[str, Any]] = None
 63 |     ) -> Dict[str, Any]:
 64 |         """Set up testing framework in an environment."""
 65 |         try:
 66 |             config = config or {}
 67 |             
 68 |             if test_framework == "jest":
 69 |                 return await self._setup_jest(environment, config)
 70 |             elif test_framework == "pytest":
 71 |                 return await self._setup_pytest(environment, config)
 72 |             else:
 73 |                 raise ToolError(f"Unsupported test framework: {test_framework}")
 74 |                 
 75 |         except Exception as e:
 76 |             raise ToolError(f"Failed to setup test framework: {str(e)}")
 77 |             
 78 |     async def _setup_npm(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
 79 |         """Set up NPM package manager."""
 80 |         try:
 81 |             # Initialize package.json if needed
 82 |             if not config.get('skip_init'):
 83 |                 result = await self.env_manager.execute_in_environment(
 84 |                     environment,
 85 |                     'npm init -y'
 86 |                 )
 87 |                 if result['exit_code'] != 0:
 88 |                     raise ToolError(f"npm init failed: {result['error']}")
 89 |             
 90 |             # Install dependencies if specified
 91 |             if deps := config.get('dependencies'):
 92 |                 deps_str = ' '.join(deps)
 93 |                 result = await self.env_manager.execute_in_environment(
 94 |                     environment,
 95 |                     f'npm install {deps_str}'
 96 |                 )
 97 |                 if result['exit_code'] != 0:
 98 |                     raise ToolError(f"npm install failed: {result['error']}")
 99 |                     
100 |             return {"status": "success"}
101 |             
102 |         except Exception as e:
103 |             raise ToolError(f"NPM setup failed: {str(e)}")
104 |             
105 |     async def _setup_pip(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
106 |         """Set up Pip package manager."""
107 |         try:
108 |             # Create virtual environment if needed
109 |             if not config.get('skip_venv'):
110 |                 result = await self.env_manager.execute_in_environment(
111 |                     environment,
112 |                     'python -m venv .venv'
113 |                 )
114 |                 if result['exit_code'] != 0:
115 |                     raise ToolError(f"venv creation failed: {result['error']}")
116 |             
117 |             # Install dependencies if specified
118 |             if deps := config.get('dependencies'):
119 |                 deps_str = ' '.join(deps)
120 |                 result = await self.env_manager.execute_in_environment(
121 |                     environment,
122 |                     f'pip install {deps_str}'
123 |                 )
124 |                 if result['exit_code'] != 0:
125 |                     raise ToolError(f"pip install failed: {result['error']}")
126 |                     
127 |             return {"status": "success"}
128 |             
129 |         except Exception as e:
130 |             raise ToolError(f"Pip setup failed: {str(e)}")
131 |             
132 |     async def _setup_webpack(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
133 |         """Set up Webpack build tool."""
134 |         try:
135 |             # Install webpack and dependencies
136 |             result = await self.env_manager.execute_in_environment(
137 |                 environment,
138 |                 'npm install webpack webpack-cli --save-dev'
139 |             )
140 |             if result['exit_code'] != 0:
141 |                 raise ToolError(f"webpack installation failed: {result['error']}")
142 |                 
143 |             # Create webpack config if not exists
144 |             config_content = """
145 |             const path = require('path');
146 |             
147 |             module.exports = {
148 |               entry: './src/index.js',
149 |               output: {
150 |                 path: path.resolve(__dirname, 'dist'),
151 |                 filename: 'bundle.js'
152 |               }
153 |             };
154 |             """
155 |             
156 |             config_path = Path(self.env_manager.environments[environment]['path']) / 'webpack.config.js'
157 |             config_path.write_text(config_content)
158 |             
159 |             return {"status": "success"}
160 |             
161 |         except Exception as e:
162 |             raise ToolError(f"Webpack setup failed: {str(e)}")
163 |             
164 |     async def _setup_vite(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
165 |         """Set up Vite build tool."""
166 |         try:
167 |             # Install vite
168 |             result = await self.env_manager.execute_in_environment(
169 |                 environment,
170 |                 'npm install vite --save-dev'
171 |             )
172 |             if result['exit_code'] != 0:
173 |                 raise ToolError(f"vite installation failed: {result['error']}")
174 |                 
175 |             # Create vite config if not exists
176 |             config_content = """
177 |             export default {
178 |               root: 'src',
179 |               build: {
180 |                 outDir: '../dist'
181 |               }
182 |             }
183 |             """
184 |             
185 |             config_path = Path(self.env_manager.environments[environment]['path']) / 'vite.config.js'
186 |             config_path.write_text(config_content)
187 |             
188 |             return {"status": "success"}
189 |             
190 |         except Exception as e:
191 |             raise ToolError(f"Vite setup failed: {str(e)}")
192 |             
193 |     async def _setup_jest(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
194 |         """Set up Jest testing framework."""
195 |         try:
196 |             # Install jest and dependencies
197 |             result = await self.env_manager.execute_in_environment(
198 |                 environment,
199 |                 'npm install jest @types/jest --save-dev'
200 |             )
201 |             if result['exit_code'] != 0:
202 |                 raise ToolError(f"jest installation failed: {result['error']}")
203 |                 
204 |             # Create jest config if not exists
205 |             config_content = """
206 |             module.exports = {
207 |               testEnvironment: 'node',
208 |               testMatch: ['**/*.test.js'],
209 |               collectCoverage: true
210 |             };
211 |             """
212 |             
213 |             config_path = Path(self.env_manager.environments[environment]['path']) / 'jest.config.js'
214 |             config_path.write_text(config_content)
215 |             
216 |             return {"status": "success"}
217 |             
218 |         except Exception as e:
219 |             raise ToolError(f"Jest setup failed: {str(e)}")
220 |             
221 |     async def _setup_pytest(self, environment: str, config: Dict[str, Any]) -> Dict[str, Any]:
222 |         """Set up Pytest testing framework."""
223 |         try:
224 |             # Install pytest and dependencies
225 |             result = await self.env_manager.execute_in_environment(
226 |                 environment,
227 |                 'pip install pytest pytest-cov'
228 |             )
229 |             if result['exit_code'] != 0:
230 |                 raise ToolError(f"pytest installation failed: {result['error']}")
231 |                 
232 |             # Create pytest config if not exists
233 |             config_content = """
234 |             [pytest]
235 |             testpaths = tests
236 |             python_files = test_*.py
237 |             addopts = --cov=src
238 |             """
239 |             
240 |             config_path = Path(self.env_manager.environments[environment]['path']) / 'pytest.ini'
241 |             config_path.write_text(config_content)
242 |             
243 |             return {"status": "success"}
244 |             
245 |         except Exception as e:
246 |             raise ToolError(f"Pytest setup failed: {str(e)}")
247 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/project_manager/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Project management system for MCP Development Server."""
  2 | import asyncio
  3 | import json
  4 | from pathlib import Path
  5 | from typing import Dict, Any, Optional, List
  6 | import git
  7 | 
  8 | from .project_types import PROJECT_TYPES, ProjectType, BuildSystem
  9 | from .templates import TemplateManager
 10 | from ..prompts.project_templates import PROJECT_TEMPLATES
 11 | from ..utils.logging import setup_logging
 12 | from ..utils.errors import ProjectError
 13 | from ..docker.manager import DockerManager
 14 | 
 15 | logger = setup_logging(__name__)
 16 | 
 17 | class ProjectManager:
 18 |     """Manages development projects."""
 19 |     
 20 |     def __init__(self, config):
 21 |         """Initialize project manager.
 22 |         
 23 |         Args:
 24 |             config: Server configuration instance
 25 |         """
 26 |         self.config = config
 27 |         self.template_manager = TemplateManager()
 28 |         self.docker_manager = DockerManager()
 29 |         self.current_project = None
 30 |         self.projects = {}
 31 |         
 32 |     def get_available_project_types(self) -> Dict[str, Dict[str, Any]]:
 33 |         """Get list of available project types.
 34 |         
 35 |         Returns:
 36 |             Dict[str, Dict[str, Any]]: Project type information
 37 |         """
 38 |         return {
 39 |             name: {
 40 |                 "name": pt.name,
 41 |                 "description": pt.description,
 42 |                 "build_systems": [bs.value for bs in pt.build_systems],
 43 |                 "default_build_system": pt.default_build_system.value
 44 |             }
 45 |             for name, pt in PROJECT_TYPES.items()
 46 |         }
 47 |         
 48 |     async def create_project(
 49 |         self,
 50 |         name: str,
 51 |         project_type: str,
 52 |         project_config: Dict[str, Any],
 53 |         path: Optional[str] = None,
 54 |         description: str = ""
 55 |     ) -> Any:
 56 |         """Create a new project.
 57 |         
 58 |         Args:
 59 |             name: Project name
 60 |             project_type: Type of project (e.g., java, dotnet, node)
 61 |             project_config: Project-specific configuration
 62 |             path: Project directory path (optional)
 63 |             description: Project description
 64 |             
 65 |         Returns:
 66 |             Project instance
 67 |         """
 68 |         try:
 69 |             if project_type not in PROJECT_TYPES:
 70 |                 raise ProjectError(f"Unsupported project type: {project_type}")
 71 |                 
 72 |             project_type_info = PROJECT_TYPES[project_type]
 73 |             
 74 |             # Determine project path
 75 |             if not path:
 76 |                 projects_dir = Path(self.config.get("projectsDir"))
 77 |                 path = str(projects_dir / name)
 78 |                 
 79 |             project_path = Path(path)
 80 |             if project_path.exists():
 81 |                 raise ProjectError(f"Project path already exists: {path}")
 82 |                 
 83 |             # Create project directory
 84 |             project_path.mkdir(parents=True, exist_ok=True)
 85 |             
 86 |             # Create project configuration
 87 |             project_config.update({
 88 |                 "name": name,
 89 |                 "type": project_type,
 90 |                 "description": description,
 91 |                 "build_system": project_config.get("build_system", 
 92 |                     project_type_info.default_build_system.value)
 93 |             })
 94 |             
 95 |             # Save project configuration
 96 |             config_path = project_path / "project.json"
 97 |             with open(config_path, "w") as f:
 98 |                 json.dump(project_config, f, indent=2)
 99 |                 
100 |             # Create project structure
101 |             await self._create_project_structure(project_path, project_type_info)
102 |             
103 |             # Initialize build system
104 |             await self._initialize_build_system(
105 |                 project_path, 
106 |                 project_type_info, 
107 |                 project_config
108 |             )
109 |             
110 |             # Set up Docker environment if requested
111 |             if project_config.get("setup_docker", False):
112 |                 await self._setup_docker_environment(
113 |                     project_path,
114 |                     project_type_info,
115 |                     project_config
116 |                 )
117 |                 
118 |             # Initialize Git repository if requested
119 |             if project_config.get("initialize_git", True):
120 |                 repo = git.Repo.init(path)
121 |                 repo.index.add("*")
122 |                 repo.index.commit("Initial commit")
123 |                 
124 |             # Create project instance
125 |             project = await self._create_project_instance(
126 |                 path,
127 |                 project_config,
128 |                 project_type_info
129 |             )
130 |             
131 |             # Store project reference
132 |             self.projects[project.id] = project
133 |             self.current_project = project
134 |             
135 |             logger.info(f"Created {project_type} project: {name} at {path}")
136 |             return project
137 |             
138 |         except Exception as e:
139 |             logger.error(f"Failed to create project: {str(e)}")
140 |             raise ProjectError(f"Project creation failed: {str(e)}")
141 |             
142 |     async def _create_project_structure(
143 |         self,
144 |         project_path: Path,
145 |         project_type: ProjectType
146 |     ):
147 |         """Create project directory structure.
148 |         
149 |         Args:
150 |             project_path: Project directory path
151 |             project_type: Project type information
152 |         """
153 |         def create_directory_structure(base_path: Path, structure: Dict[str, Any]):
154 |             for name, content in structure.items():
155 |                 path = base_path / name
156 |                 if isinstance(content, dict):
157 |                     path.mkdir(exist_ok=True)
158 |                     create_directory_structure(path, content)
159 |                     
160 |         create_directory_structure(project_path, project_type.file_structure)
161 |         
162 |     async def _initialize_build_system(
163 |         self,
164 |         project_path: Path,
165 |         project_type: ProjectType,
166 |         project_config: Dict[str, Any]
167 |     ):
168 |         """Initialize project build system.
169 |         
170 |         Args:
171 |             project_path: Project directory path
172 |             project_type: Project type information
173 |             project_config: Project configuration
174 |         """
175 |         build_system = BuildSystem(project_config["build_system"])
176 |         
177 |         # Generate build system configuration files
178 |         if build_system == BuildSystem.MAVEN:
179 |             await self.template_manager.generate_maven_pom(
180 |                 project_path, project_config
181 |             )
182 |         elif build_system == BuildSystem.GRADLE:
183 |             await self.template_manager.generate_gradle_build(
184 |                 project_path, project_config
185 |             )
186 |         elif build_system == BuildSystem.DOTNET:
187 |             await self.template_manager.generate_dotnet_project(
188 |                 project_path, project_config
189 |             )
190 |         elif build_system in [BuildSystem.NPM, BuildSystem.YARN]:
191 |             await self.template_manager.generate_package_json(
192 |                 project_path, project_config
193 |             )
194 |         elif build_system == BuildSystem.POETRY:
195 |             await self.template_manager.generate_pyproject_toml(
196 |                 project_path, project_config
197 |             )
198 |             
199 |     async def _setup_docker_environment(
200 |         self,
201 |         project_path: Path,
202 |         project_type: ProjectType,
203 |         project_config: Dict[str, Any]
204 |     ):
205 |         """Set up Docker environment for the project.
206 |         
207 |         Args:
208 |             project_path: Project directory path
209 |             project_type: Project type information
210 |             project_config: Project configuration
211 |         """
212 |         # Generate Dockerfile from template
213 |         dockerfile_template = project_type.docker_templates[0]  # Use first template
214 |         dockerfile_content = await self.docker_manager.generate_dockerfile(
215 |             dockerfile_template,
216 |             project_config
217 |         )
218 |         
219 |         dockerfile_path = project_path / "Dockerfile"
220 |         with open(dockerfile_path, "w") as f:
221 |             f.write(dockerfile_content)
222 |             
223 |         # Generate docker-compose.yml if needed
224 |         if project_config.get("use_docker_compose", False):
225 |             services = {
226 |                 "app": {
227 |                     "build": ".",
228 |                     "volumes": [
229 |                         "./:/workspace"
230 |                     ],
231 |                     "environment": project_type.environment_variables
232 |                 }
233 |             }
234 |             
235 |             compose_content = await self.docker_manager.create_compose_config(
236 |                 project_config["name"],
237 |                 services,
238 |                 project_path / "docker-compose.yml"
239 |             )
240 |             
241 |     async def _create_project_instance(
242 |         self,
243 |         path: str,
244 |         config: Dict[str, Any],
245 |         project_type: ProjectType
246 |     ) -> Any:
247 |         """Create project instance based on type.
248 |         
249 |         Args:
250 |             path: Project directory path
251 |             config: Project configuration
252 |             project_type: Project type information
253 |             
254 |         Returns:
255 |             Project instance
256 |         """
257 |         # Import appropriate project class based on type
258 |         if project_type.name == "java":
259 |             from .java_project import JavaProject
260 |             return JavaProject(path, config, project_type)
261 |         elif project_type.name == "dotnet":
262 |             from .dotnet_project import DotNetProject
263 |             return DotNetProject(path, config, project_type)
264 |         elif project_type.name == "node":
265 |             from .node_project import NodeProject
266 |             return NodeProject(path, config, project_type)
267 |         elif project_type.name == "python":
268 |             from .python_project import PythonProject
269 |             return PythonProject(path, config, project_type)
270 |         elif project_type.name == "golang":
271 |             from .golang_project import GolangProject
272 |             return GolangProject(path, config, project_type)
273 |         else:
274 |             from .base_project import Project
275 |             return Project(path, config, project_type)
276 | 
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/workflow/manager.py:
--------------------------------------------------------------------------------

```python
  1 | """Development workflow management for MCP Development Server."""
  2 | 
  3 | from typing import Dict, List, Optional, Any
  4 | from enum import Enum
  5 | from datetime import datetime
  6 | import asyncio
  7 | 
  8 | from ..utils.errors import WorkflowError
  9 | from ..utils.logging import setup_logging
 10 | 
 11 | logger = setup_logging(__name__)
 12 | 
 13 | class WorkflowStatus(str, Enum):
 14 |     """Workflow execution status."""
 15 |     PENDING = "pending"
 16 |     RUNNING = "running"
 17 |     COMPLETED = "completed"
 18 |     FAILED = "failed"
 19 | 
 20 | class WorkflowStep:
 21 |     """Individual step in a workflow."""
 22 |     
 23 |     def __init__(
 24 |         self,
 25 |         name: str,
 26 |         command: str,
 27 |         environment: str,
 28 |         depends_on: Optional[List[str]] = None,
 29 |         timeout: Optional[int] = None,
 30 |         retry_count: int = 0
 31 |     ):
 32 |         self.name = name
 33 |         self.command = command
 34 |         self.environment = environment
 35 |         self.depends_on = depends_on or []
 36 |         self.timeout = timeout
 37 |         self.retry_count = retry_count
 38 |         self.status = WorkflowStatus.PENDING
 39 |         self.result: Optional[Dict[str, Any]] = None
 40 |         self.attempts = 0
 41 | 
 42 | class WorkflowManager:
 43 |     """Manages development workflows."""
 44 |     
 45 |     def __init__(self, env_manager):
 46 |         self.env_manager = env_manager
 47 |         self.workflows: Dict[str, Dict[str, Any]] = {}
 48 |         
 49 |     async def create_workflow(
 50 |         self,
 51 |         steps: List[WorkflowStep],
 52 |         config: Optional[Dict[str, Any]] = None
 53 |     ) -> str:
 54 |         """Create a new workflow."""
 55 |         try:
 56 |             workflow_id = f"workflow_{len(self.workflows)}"
 57 |             
 58 |             # Initialize workflow
 59 |             self.workflows[workflow_id] = {
 60 |                 "steps": steps,
 61 |                 "config": config or {},
 62 |                 "status": WorkflowStatus.PENDING,
 63 |                 "start_time": None,
 64 |                 "end_time": None
 65 |             }
 66 |             
 67 |             return workflow_id
 68 |             
 69 |         except Exception as e:
 70 |             raise WorkflowError(f"Failed to create workflow: {str(e)}")
 71 |             
 72 |     async def start_workflow(self, workflow_id: str) -> None:
 73 |         """Start workflow execution."""
 74 |         try:
 75 |             if workflow := self.workflows.get(workflow_id):
 76 |                 workflow["status"] = WorkflowStatus.RUNNING
 77 |                 workflow["start_time"] = datetime.now()
 78 |                 
 79 |                 # Execute workflow steps
 80 |                 asyncio.create_task(self._execute_workflow(workflow_id))
 81 |                 
 82 |             else:
 83 |                 raise WorkflowError(f"Workflow not found: {workflow_id}")
 84 |                 
 85 |         except Exception as e:
 86 |             raise WorkflowError(f"Failed to start workflow: {str(e)}")
 87 |             
 88 |     async def _execute_workflow(self, workflow_id: str) -> None:
 89 |         """Execute workflow steps in order."""
 90 |         workflow = self.workflows[workflow_id]
 91 |         
 92 |         try:
 93 |             # Build execution graph
 94 |             graph = self._build_execution_graph(workflow["steps"])
 95 |             
 96 |             # Execute steps in dependency order
 97 |             for step_group in graph:
 98 |                 results = await asyncio.gather(
 99 |                     *[self._execute_step(workflow_id, step) for step in step_group],
100 |                     return_exceptions=True
101 |                 )
102 |                 
103 |                 # Check for failures
104 |                 if any(isinstance(r, Exception) for r in results):
105 |                     workflow["status"] = WorkflowStatus.FAILED
106 |                     return
107 |                     
108 |             workflow["status"] = WorkflowStatus.COMPLETED
109 |             
110 |         except Exception as e:
111 |             logger.error(f"Workflow execution error: {str(e)}")
112 |             workflow["status"] = WorkflowStatus.FAILED
113 |             workflow["error"] = str(e)
114 |             
115 |         finally:
116 |             workflow["end_time"] = datetime.now()
117 |             
118 |     async def _execute_step(
119 |         self,
120 |         workflow_id: str,
121 |         step: WorkflowStep
122 |     ) -> None:
123 |         """Execute a single workflow step."""
124 |         try:
125 |             step.status = WorkflowStatus.RUNNING
126 |             step.attempts += 1
127 |             
128 |             # Execute step command
129 |             result = await asyncio.wait_for(
130 |                 self.env_manager.execute_in_environment(
131 |                     step.environment,
132 |                     step.command
133 |                 ),
134 |                 timeout=step.timeout
135 |                 )
136 |             
137 |             # Handle step result
138 |             success = result["exit_code"] == 0
139 |             step.result = {
140 |                 "output": result["output"],
141 |                 "error": result.get("error"),
142 |                 "exit_code": result["exit_code"]
143 |             }
144 |             
145 |             if success:
146 |                 step.status = WorkflowStatus.COMPLETED
147 |             else:
148 |                 # Handle retry logic
149 |                 if step.attempts < step.retry_count + 1:
150 |                     logger.info(f"Retrying step {step.name} (attempt {step.attempts})")
151 |                     return await self._execute_step(workflow_id, step)
152 |                 step.status = WorkflowStatus.FAILED
153 |                 
154 |         except asyncio.TimeoutError:
155 |             step.status = WorkflowStatus.FAILED
156 |             step.result = {
157 |                 "error": "Step execution timed out"
158 |             }
159 |         except Exception as e:
160 |             step.status = WorkflowStatus.FAILED
161 |             step.result = {
162 |                 "error": str(e)
163 |             }
164 |             
165 |     def _build_execution_graph(
166 |         self,
167 |         steps: List[WorkflowStep]
168 |     ) -> List[List[WorkflowStep]]:
169 |         """Build ordered list of step groups based on dependencies."""
170 |         # Initialize variables
171 |         graph: List[List[WorkflowStep]] = []
172 |         completed = set()
173 |         remaining = set(step.name for step in steps)
174 |         steps_by_name = {step.name: step for step in steps}
175 |         
176 |         while remaining:
177 |             # Find steps with satisfied dependencies
178 |             group = set()
179 |             for step_name in remaining:
180 |                 step = steps_by_name[step_name]
181 |                 if all(dep in completed for dep in step.depends_on):
182 |                     group.add(step_name)
183 |             
184 |             if not group:
185 |                 # Circular dependency detected
186 |                 raise WorkflowError("Circular dependency detected in workflow steps")
187 |             
188 |             # Add group to graph
189 |             graph.append([steps_by_name[name] for name in group])
190 |             completed.update(group)
191 |             remaining.difference_update(group)
192 |             
193 |         return graph
194 |         
195 |     async def get_workflow_status(self, workflow_id: str) -> Dict[str, Any]:
196 |         """Get status and results of a workflow."""
197 |         if workflow := self.workflows.get(workflow_id):
198 |             return {
199 |                 "id": workflow_id,
200 |                 "status": workflow["status"],
201 |                 "steps": [
202 |                     {
203 |                         "name": step.name,
204 |                         "status": step.status,
205 |                         "result": step.result,
206 |                         "attempts": step.attempts
207 |                     }
208 |                     for step in workflow["steps"]
209 |                 ],
210 |                 "start_time": workflow["start_time"],
211 |                 "end_time": workflow["end_time"],
212 |                 "error": workflow.get("error")
213 |             }
214 |         raise WorkflowError(f"Workflow not found: {workflow_id}")
215 | 
216 |     def get_common_workflows(self) -> Dict[str, List[WorkflowStep]]:
217 |         """Get predefined common workflow templates."""
218 |         return {
219 |             "build": [
220 |                 WorkflowStep(
221 |                     name="install",
222 |                     command="npm install",
223 |                     environment="default"
224 |                 ),
225 |                 WorkflowStep(
226 |                     name="lint",
227 |                     command="npm run lint",
228 |                     environment="default",
229 |                     depends_on=["install"]
230 |                 ),
231 |                 WorkflowStep(
232 |                     name="test",
233 |                     command="npm test",
234 |                     environment="default", 
235 |                     depends_on=["install"]
236 |                 ),
237 |                 WorkflowStep(
238 |                     name="build",
239 |                     command="npm run build",
240 |                     environment="default",
241 |                     depends_on=["lint", "test"]
242 |                 )
243 |             ],
244 |             "test": [
245 |                 WorkflowStep(
246 |                     name="install_deps",
247 |                     command="npm install",
248 |                     environment="default"
249 |                 ),
250 |                 WorkflowStep(
251 |                     name="unit_tests",
252 |                     command="npm run test:unit",
253 |                     environment="default",
254 |                     depends_on=["install_deps"]
255 |                 ),
256 |                 WorkflowStep(
257 |                     name="integration_tests", 
258 |                     command="npm run test:integration",
259 |                     environment="default",
260 |                     depends_on=["install_deps"]
261 |                 ),
262 |                 WorkflowStep(
263 |                     name="coverage",
264 |                     command="npm run coverage",
265 |                     environment="default",
266 |                     depends_on=["unit_tests", "integration_tests"]
267 |                 )
268 |             ],
269 |             "release": [
270 |                 WorkflowStep(
271 |                     name="bump_version",
272 |                     command="npm version patch",
273 |                     environment="default"
274 |                 ),
275 |                 WorkflowStep(
276 |                     name="build",
277 |                     command="npm run build",
278 |                     environment="default",
279 |                     depends_on=["bump_version"]
280 |                 ),
281 |                 WorkflowStep(
282 |                     name="publish",
283 |                     command="npm publish",
284 |                     environment="default",
285 |                     depends_on=["build"]
286 |                 )
287 |             ]
288 |         }
```

--------------------------------------------------------------------------------
/src/mcp_dev_server/docker/streams.py:
--------------------------------------------------------------------------------

```python
  1 | """Container output streaming and file synchronization."""
  2 | import os
  3 | import time
  4 | import asyncio
  5 | import hashlib
  6 | import collections
  7 | from enum import Enum
  8 | from datetime import datetime
  9 | from typing import Dict, List, Optional, AsyncGenerator, Any
 10 | from pathlib import Path
 11 | from watchdog.observers import Observer
 12 | from watchdog.events import FileSystemEventHandler
 13 | 
 14 | from ..utils.logging import setup_logging
 15 | from ..utils.errors import StreamError, SyncError
 16 | 
 17 | logger = setup_logging(__name__)
 18 | 
 19 | class OutputFormat(str, Enum):
 20 |     """Output stream formats."""
 21 |     STDOUT = "stdout"
 22 |     STDERR = "stderr"
 23 |     COMBINED = "combined"
 24 |     FORMATTED = "formatted"
 25 | 
 26 | class StreamConfig:
 27 |     """Stream configuration."""
 28 |     def __init__(
 29 |         self,
 30 |         format: OutputFormat = OutputFormat.COMBINED,
 31 |         buffer_size: int = 1024,
 32 |         filters: Optional[List[str]] = None,
 33 |         timestamp: bool = False
 34 |     ):
 35 |         self.format = format
 36 |         self.buffer_size = buffer_size
 37 |         self.filters = filters or []
 38 |         self.timestamp = timestamp
 39 | 
 40 | class SyncConfig:
 41 |     """Synchronization configuration."""
 42 |     def __init__(
 43 |         self,
 44 |         ignore_patterns: Optional[List[str]] = None,
 45 |         sync_interval: float = 1.0,
 46 |         atomic: bool = True
 47 |     ):
 48 |         self.ignore_patterns = ignore_patterns or []
 49 |         self.sync_interval = sync_interval
 50 |         self.atomic = atomic
 51 | 
 52 | class StreamInfo:
 53 |     """Information about an active stream."""
 54 |     def __init__(self, task: asyncio.Task, config: StreamConfig):
 55 |         self.task = task
 56 |         self.config = config
 57 |         self.start_time = datetime.now()
 58 | 
 59 | class EnhancedOutputStreamManager:
 60 |     """Enhanced streaming output manager."""
 61 |     
 62 |     def __init__(self, docker_manager):
 63 |         self.docker_manager = docker_manager
 64 |         self.active_streams: Dict[str, StreamInfo] = {}
 65 |         self._buffer = collections.deque(maxlen=1000)  # Keep last 1000 messages
 66 |         
 67 |     async def start_stream(
 68 |         self,
 69 |         container_name: str,
 70 |         command: str,
 71 |         config: StreamConfig,
 72 |         callback: Optional[callable] = None
 73 |     ) -> AsyncGenerator[str, None]:
 74 |         """Start enhanced output stream."""
 75 |         try:
 76 |             container = self.docker_manager.containers.get(container_name)
 77 |             if not container:
 78 |                 raise StreamError(f"Container not found: {container_name}")
 79 | 
 80 |             # Create execution with specified format
 81 |             exec_result = container.exec_run(
 82 |                 command,
 83 |                 stream=True,
 84 |                 demux=True,
 85 |                 socket=True  # Use socket for better streaming
 86 |             )
 87 | 
 88 |             async def stream_handler():
 89 |                 buffer = []
 90 |                 try:
 91 |                     async for data in exec_result.output:
 92 |                         # Apply format and filtering
 93 |                         processed_data = self._process_stream_data(data, config)
 94 |                         
 95 |                         if processed_data:
 96 |                             buffer.extend(processed_data)
 97 |                             if len(buffer) >= config.buffer_size:
 98 |                                 output = ''.join(buffer)
 99 |                                 buffer.clear()
100 |                                 
101 |                                 self._buffer.append(output)
102 |                                 
103 |                                 if callback:
104 |                                     await callback(output)
105 |                                 yield output
106 |                 except Exception as e:
107 |                     logger.error(f"Stream processing error: {str(e)}")
108 |                     raise StreamError(f"Stream processing error: {str(e)}")
109 |                 finally:
110 |                     if buffer:
111 |                         output = ''.join(buffer)
112 |                         self._buffer.append(output)
113 |                         if callback:
114 |                             await callback(output)
115 |                         yield output
116 | 
117 |                     if container_name in self.active_streams:
118 |                         del self.active_streams[container_name]
119 | 
120 |             # Create and store stream task
121 |             stream_task = asyncio.create_task(stream_handler())
122 |             self.active_streams[container_name] = StreamInfo(stream_task, config)
123 |             
124 |             async for output in stream_task:
125 |                 yield output
126 | 
127 |         except Exception as e:
128 |             logger.error(f"Failed to start stream: {str(e)}")
129 |             raise StreamError(f"Failed to start stream: {str(e)}")
130 | 
131 |     def _process_stream_data(
132 |         self,
133 |         data: bytes,
134 |         config: StreamConfig
135 |     ) -> Optional[str]:
136 |         """Process stream data according to config."""
137 |         if not data:
138 |             return None
139 |             
140 |         # Split streams if demuxed
141 |         stdout, stderr = data if isinstance(data, tuple) else (data, None)
142 |         
143 |         # Apply format
144 |         if config.format == OutputFormat.STDOUT and stdout:
145 |             output = stdout.decode()
146 |         elif config.format == OutputFormat.STDERR and stderr:
147 |             output = stderr.decode()
148 |         elif config.format == OutputFormat.COMBINED:
149 |             output = ''
150 |             if stdout:
151 |                 output += stdout.decode()
152 |             if stderr:
153 |                 output += stderr.decode()
154 |         elif config.format == OutputFormat.FORMATTED:
155 |             output = self._format_output(stdout, stderr)
156 |         else:
157 |             return None
158 |             
159 |         # Apply filters
160 |         for filter_pattern in config.filters:
161 |             if filter_pattern in output:
162 |                 return None
163 |                 
164 |         # Add timestamp if requested
165 |         if config.timestamp:
166 |             output = f"[{datetime.now().isoformat()}] {output}"
167 |             
168 |         return output
169 |         
170 |     @staticmethod
171 |     def _format_output(stdout: Optional[bytes], stderr: Optional[bytes]) -> str:
172 |         """Format output with colors and prefixes."""
173 |         output = []
174 |         
175 |         if stdout:
176 |             output.append(f"\033[32m[OUT]\033[0m {stdout.decode()}")
177 |         if stderr:
178 |             output.append(f"\033[31m[ERR]\033[0m {stderr.decode()}")
179 |             
180 |         return '\n'.join(output)
181 | 
182 |     async def stop_stream(self, container_name: str) -> None:
183 |         """Stop streaming from a container."""
184 |         if stream_info := self.active_streams.get(container_name):
185 |             stream_info.task.cancel()
186 |             try:
187 |                 await stream_info.task
188 |             except asyncio.CancelledError:
189 |                 pass
190 |             del self.active_streams[container_name]
191 | 
192 | class BiDirectionalSync:
193 |     """Enhanced bi-directional file synchronization."""
194 |     
195 |     def __init__(self, docker_manager):
196 |         self.docker_manager = docker_manager
197 |         self.sync_handlers: Dict[str, EnhancedSyncHandler] = {}
198 |         self.observer = Observer()
199 |         self.observer.start()
200 |         
201 |     async def start_sync(
202 |         self,
203 |         container_name: str,
204 |         host_path: str,
205 |         container_path: str,
206 |         config: SyncConfig
207 |     ) -> None:
208 |         """Start bi-directional file sync."""
209 |         try:
210 |             # Validate paths
211 |             if not os.path.exists(host_path):
212 |                 raise SyncError(f"Host path does not exist: {host_path}")
213 |             
214 |             container = self.docker_manager.containers.get(container_name)
215 |             if not container:
216 |                 raise SyncError(f"Container not found: {container_name}")
217 |             
218 |             # Create sync handler
219 |             handler = EnhancedSyncHandler(
220 |                 container=container,
221 |                 container_path=container_path,
222 |                 host_path=host_path,
223 |                 config=config
224 |             )
225 |             
226 |             # Start watching both directions
227 |             self.observer.schedule(
228 |                 handler,
229 |                 host_path,
230 |                 recursive=True
231 |             )
232 |             
233 |             # Start container file watcher
234 |             await handler.start_container_watcher()
235 |             
236 |             self.sync_handlers[container_name] = handler
237 |             logger.info(f"Started bi-directional sync for container: {container_name}")
238 |             
239 |         except Exception as e:
240 |             raise SyncError(f"Failed to start sync: {str(e)}")
241 | 
242 |     async def stop_sync(self, container_name: str) -> None:
243 |         """Stop synchronization for a container."""
244 |         if handler := self.sync_handlers.get(container_name):
245 |             self.observer.unschedule_all()
246 |             await handler.stop_container_watcher()
247 |             del self.sync_handlers[container_name]
248 |             logger.info(f"Stopped sync for container: {container_name}")
249 | 
250 |     async def cleanup(self) -> None:
251 |         """Clean up all synchronization handlers."""
252 |         for container_name in list(self.sync_handlers.keys()):
253 |             await self.stop_sync(container_name)
254 |         self.observer.stop()
255 |         self.observer.join()
256 | 
257 | class EnhancedSyncHandler(FileSystemEventHandler):
258 |     """Enhanced sync handler with bi-directional support."""
259 |     
260 |     def __init__(
261 |         self,
262 |         container,
263 |         container_path: str,
264 |         host_path: str,
265 |         config: SyncConfig
266 |     ):
267 |         super().__init__()
268 |         self.container = container
269 |         self.container_path = container_path
270 |         self.host_path = host_path
271 |         self.config = config
272 |         self.sync_lock = asyncio.Lock()
273 |         self.pending_syncs: Dict[str, float] = {}
274 |         self._container_watcher: Optional[asyncio.Task] = None
275 |         
276 |     async def start_container_watcher(self) -> None:
277 |         """Start watching container files."""
278 |         cmd = f"""
279 |         inotifywait -m -r -e modify,create,delete,move {self.container_path}
280 |         """
281 |         
282 |         exec_result = self.container.exec_run(
283 |             cmd,
284 |             stream=True,
285 |             detach=True
286 |         )
287 |         
288 |         self._container_watcher = asyncio.create_task(
289 |             self._handle_container_events(exec_result.output)
290 |         )
291 |         
292 |     async def stop_container_watcher(self) -> None:
293 |         """Stop container file watcher."""
294 |         if self._container_watcher:
295 |             self._container_watcher.cancel()
296 |             try:
297 |                 await self._container_watcher
298 |             except asyncio.CancelledError:
299 |                 pass
300 |             self._container_watcher = None
301 |         
302 |     async def _handle_container_events(self, output_stream: AsyncGenerator) -> None:
303 |         """Handle container file events."""
304 |         try:
305 |             async for event in output_stream:
306 |                 await self._handle_container_change(event.decode())
307 |         except Exception as e:
308 |             logger.error(f"Container watcher error: {str(e)}")
309 |             
310 |     async def _handle_container_change(self, event: str) -> None:
311 |         """Handle container file change."""
312 |         try:
313 |             # Parse inotify event
314 |             parts = event.strip().split()
315 |             if len(parts) >= 3:
316 |                 path = parts[0]
317 |                 change_type = parts[1]
318 |                 filename = parts[2]
319 |                 
320 |                 container_path = os.path.join(path, filename)
321 |                 host_path = self._container_to_host_path(container_path)
322 |                 
323 |                 # Apply filters
324 |                 if self._should_ignore(host_path):
325 |                     return
326 |                     
327 |                 async with self.sync_lock:
328 |                     # Check if change is from host sync
329 |                     if host_path in self.pending_syncs:
330 |                         if time.time() - self.pending_syncs[host_path] < self.config.sync_interval:
331 |                             return
332 |                             
333 |                     # Sync from container to host
334 |                     await self._sync_to_host(container_path, host_path)
335 |                     
336 |         except Exception as e:
337 |             logger.error(f"Error handling container change: {str(e)}")
338 |             
339 |     def _container_to_host_path(self, container_path: str) -> str:
340 |         """Convert container path to host path."""
341 |         rel_path = os.path.relpath(container_path, self.container_path)
342 |         return os.path.join(self.host_path, rel_path)
343 | 
344 |     def _should_ignore(self, path: str) -> bool:
345 |         """Check if path should be ignored."""
346 |         return any(pattern in path for pattern in self.config.ignore_patterns)
347 |         
348 |     async def _sync_to_host(
349 |         self,
350 |         container_path: str,
351 |         host_path: str
352 |     ) -> None:
353 |         """Sync file from container to host."""
354 |         try:
355 |             # Get file from container
356 |             stream, stat = self.container.get_archive(container_path)
357 |             
358 |             # Create parent directories
359 |             os.makedirs(os.path.dirname(host_path), exist_ok=True)
360 |             
361 |             if self.config.atomic:
362 |                 # Save file atomically using temporary file
363 |                 tmp_path = f"{host_path}.tmp"
364 |                 with open(tmp_path, 'wb') as f:
365 |                     for chunk in stream:
366 |                         f.write(chunk)
367 |                 os.rename(tmp_path, host_path)
368 |             else:
369 |                 # Direct write
370 |                 with open(host_path, 'wb') as f:
371 |                     for chunk in stream:
372 |                         f.write(chunk)
373 |             
374 |             # Update sync tracking
375 |             self.pending_syncs[host_path] = time.time()
376 |             
377 |         except Exception as e:
378 |             logger.error(f"Error syncing to host: {str(e)}")
379 |             raise SyncError(f"Failed to sync file {container_path}: {str(e)}")
```