This is page 19 of 24. Use http://codebase.md/sparesparrow/mcp-project-orchestrator?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .cursorrules
├── .env.example
├── .github
│ └── workflows
│ ├── build.yml
│ ├── ci-cd.yml
│ ├── ci.yml
│ ├── deploy.yml
│ ├── ecosystem-monitor.yml
│ ├── fan-out-orchestrator.yml
│ └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── AUTOMOTIVE_CAMERA_SYSTEM_SUMMARY.md
├── automotive-camera-system
│ ├── docs
│ │ └── IMPLEMENTACE_CS.md
│ └── README.md
├── AWS_MCP_IMPLEMENTATION_SUMMARY.md
├── AWS_MCP_QUICKSTART.md
├── AWS_SIP_TRUNK_DEPLOYMENT_COMPLETE.md
├── aws-sip-trunk
│ ├── .gitignore
│ ├── config
│ │ ├── extensions.conf.j2
│ │ └── pjsip.conf.j2
│ ├── DEPLOYMENT_SUMMARY.md
│ ├── docs
│ │ ├── DEPLOYMENT.md
│ │ └── TROUBLESHOOTING.md
│ ├── PROJECT_INDEX.md
│ ├── pyproject.toml
│ ├── QUICKSTART.md
│ ├── README.md
│ ├── scripts
│ │ ├── deploy-asterisk-aws.sh
│ │ └── user-data.sh
│ ├── terraform
│ │ ├── ec2.tf
│ │ ├── main.tf
│ │ ├── monitoring.tf
│ │ ├── networking.tf
│ │ ├── outputs.tf
│ │ ├── storage.tf
│ │ ├── terraform.tfvars.example
│ │ └── variables.tf
│ ├── tests
│ │ └── test_sip_connectivity.py
│ └── VERIFICATION_CHECKLIST.md
├── CLAUDE.md
├── component_templates.json
├── conanfile.py
├── config
│ ├── default.json
│ └── project_orchestration.json
├── Containerfile
├── cursor-templates
│ └── openssl
│ ├── linux-dev.mdc.jinja2
│ └── shared.mdc.jinja2
├── data
│ └── prompts
│ └── templates
│ ├── advanced-multi-server-template.json
│ ├── analysis-assistant.json
│ ├── analyze-mermaid-diagram.json
│ ├── architecture-design-assistant.json
│ ├── code-diagram-documentation-creator.json
│ ├── code-refactoring-assistant.json
│ ├── code-review-assistant.json
│ ├── collaborative-development.json
│ ├── consolidated-interfaces-template.json
│ ├── could-you-interpret-the-assumed-applicat.json
│ ├── data-analysis-template.json
│ ├── database-query-assistant.json
│ ├── debugging-assistant.json
│ ├── development-system-prompt-zcna0.json
│ ├── development-system-prompt.json
│ ├── development-workflow.json
│ ├── docker-compose-prompt-combiner.json
│ ├── docker-containerization-guide.json
│ ├── docker-mcp-servers-orchestration.json
│ ├── foresight-assistant.json
│ ├── generate-different-types-of-questions-ab.json
│ ├── generate-mermaid-diagram.json
│ ├── image-1-describe-the-icon-in-one-sen.json
│ ├── initialize-project-setup-for-a-new-micro.json
│ ├── install-dependencies-build-run-test.json
│ ├── mcp-code-generator.json
│ ├── mcp-integration-assistant.json
│ ├── mcp-resources-explorer.json
│ ├── mcp-resources-integration.json
│ ├── mcp-server-configurator.json
│ ├── mcp-server-dev-prompt-combiner.json
│ ├── mcp-server-integration-template.json
│ ├── mcp-template-system.json
│ ├── mermaid-analysis-expert.json
│ ├── mermaid-class-diagram-generator.json
│ ├── mermaid-diagram-generator.json
│ ├── mermaid-diagram-modifier.json
│ ├── modify-mermaid-diagram.json
│ ├── monorepo-migration-guide.json
│ ├── multi-resource-context.json
│ ├── project-analysis-assistant.json
│ ├── prompt-combiner-interface.json
│ ├── prompt-templates.json
│ ├── repository-explorer.json
│ ├── research-assistant.json
│ ├── sequential-data-analysis.json
│ ├── solid-code-analysis-visualizer.json
│ ├── task-list-helper-8ithy.json
│ ├── template-based-mcp-integration.json
│ ├── templates.json
│ ├── test-prompt.json
│ └── you-are-limited-to-respond-yes-or-no-onl.json
├── docs
│ ├── AWS_MCP.md
│ ├── AWS.md
│ ├── CONAN.md
│ └── integration.md
├── elevenlabs-agents
│ ├── agent-prompts.json
│ └── README.md
├── IMPLEMENTATION_STATUS.md
├── integration_plan.md
├── LICENSE
├── MANIFEST.in
├── mcp-project-orchestrator
│ └── openssl
│ ├── .github
│ │ └── workflows
│ │ └── validate-cursor-config.yml
│ ├── conanfile.py
│ ├── CURSOR_DEPLOYMENT_POLISH.md
│ ├── cursor-rules
│ │ ├── mcp.json.jinja2
│ │ ├── prompts
│ │ │ ├── fips-compliance.md.jinja2
│ │ │ ├── openssl-coding-standards.md.jinja2
│ │ │ └── pr-review.md.jinja2
│ │ └── rules
│ │ ├── ci-linux.mdc.jinja2
│ │ ├── linux-dev.mdc.jinja2
│ │ ├── macos-dev.mdc.jinja2
│ │ ├── shared.mdc.jinja2
│ │ └── windows-dev.mdc.jinja2
│ ├── docs
│ │ └── cursor-configuration-management.md
│ ├── examples
│ │ └── example-workspace
│ │ ├── .cursor
│ │ │ ├── mcp.json
│ │ │ └── rules
│ │ │ ├── linux-dev.mdc
│ │ │ └── shared.mdc
│ │ ├── .gitignore
│ │ ├── CMakeLists.txt
│ │ ├── conanfile.py
│ │ ├── profiles
│ │ │ ├── linux-gcc-debug.profile
│ │ │ └── linux-gcc-release.profile
│ │ ├── README.md
│ │ └── src
│ │ ├── crypto_utils.cpp
│ │ ├── crypto_utils.h
│ │ └── main.cpp
│ ├── IMPLEMENTATION_SUMMARY.md
│ ├── mcp_orchestrator
│ │ ├── __init__.py
│ │ ├── cli.py
│ │ ├── conan_integration.py
│ │ ├── cursor_config.py
│ │ ├── cursor_deployer.py
│ │ ├── deploy_cursor.py
│ │ ├── env_config.py
│ │ ├── platform_detector.py
│ │ └── yaml_validator.py
│ ├── openssl-cursor-example-workspace-20251014_121133.zip
│ ├── pyproject.toml
│ ├── README.md
│ ├── requirements.txt
│ ├── scripts
│ │ └── create_example_workspace.py
│ ├── setup.py
│ ├── test_deployment.py
│ └── tests
│ ├── __init__.py
│ ├── test_cursor_deployer.py
│ └── test_template_validation.py
├── printcast-agent
│ ├── .env.example
│ ├── config
│ │ └── asterisk
│ │ └── extensions.conf
│ ├── Containerfile
│ ├── docker-compose.yml
│ ├── pyproject.toml
│ ├── README.md
│ ├── scripts
│ │ └── docker-entrypoint.sh
│ ├── src
│ │ ├── integrations
│ │ │ ├── __init__.py
│ │ │ ├── asterisk.py
│ │ │ ├── content.py
│ │ │ ├── delivery.py
│ │ │ ├── elevenlabs.py
│ │ │ └── printing.py
│ │ ├── mcp_server
│ │ │ ├── __init__.py
│ │ │ ├── main.py
│ │ │ └── server.py
│ │ └── orchestration
│ │ ├── __init__.py
│ │ └── workflow.py
│ └── tests
│ └── test_mcp_server.py
├── project_orchestration.json
├── project_templates.json
├── pyproject.toml
├── README.md
├── REFACTORING_COMPLETED.md
├── REFACTORING_RECOMMENDATIONS.md
├── requirements.txt
├── scripts
│ ├── archive
│ │ ├── init_claude_test.sh
│ │ ├── init_postgres.sh
│ │ ├── start_mcp_servers.sh
│ │ └── test_claude_desktop.sh
│ ├── consolidate_mermaid.py
│ ├── consolidate_prompts.py
│ ├── consolidate_resources.py
│ ├── consolidate_templates.py
│ ├── INSTRUCTIONS.md
│ ├── README.md
│ ├── setup_aws_mcp.sh
│ ├── setup_mcp.sh
│ ├── setup_orchestrator.sh
│ ├── setup_project.py
│ └── test_mcp.sh
├── src
│ └── mcp_project_orchestrator
│ ├── __init__.py
│ ├── __main__.py
│ ├── aws_mcp.py
│ ├── cli
│ │ └── __init__.py
│ ├── cli.py
│ ├── commands
│ │ └── openssl_cli.py
│ ├── core
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── config.py
│ │ ├── exceptions.py
│ │ ├── fastmcp.py
│ │ ├── logging.py
│ │ └── managers.py
│ ├── cursor_deployer.py
│ ├── ecosystem_monitor.py
│ ├── fan_out_orchestrator.py
│ ├── fastmcp.py
│ ├── mcp-py
│ │ ├── AggregateVersions.py
│ │ ├── CustomBashTool.py
│ │ ├── FileAnnotator.py
│ │ ├── mcp-client.py
│ │ ├── mcp-server.py
│ │ ├── MermaidDiagramGenerator.py
│ │ ├── NamingAgent.py
│ │ └── solid-analyzer-agent.py
│ ├── mermaid
│ │ ├── __init__.py
│ │ ├── generator.py
│ │ ├── mermaid_orchestrator.py
│ │ ├── renderer.py
│ │ ├── templates
│ │ │ ├── AbstractFactory-diagram.json
│ │ │ ├── Adapter-diagram.json
│ │ │ ├── Analyze_Mermaid_Diagram.json
│ │ │ ├── Builder-diagram.json
│ │ │ ├── Chain-diagram.json
│ │ │ ├── Code_Diagram_Documentation_Creator.json
│ │ │ ├── Command-diagram.json
│ │ │ ├── Decorator-diagram.json
│ │ │ ├── Facade-diagram.json
│ │ │ ├── Factory-diagram.json
│ │ │ ├── flowchart
│ │ │ │ ├── AbstractFactory-diagram.json
│ │ │ │ ├── Adapter-diagram.json
│ │ │ │ ├── Analyze_Mermaid_Diagram.json
│ │ │ │ ├── Builder-diagram.json
│ │ │ │ ├── Chain-diagram.json
│ │ │ │ ├── Code_Diagram_Documentation_Creator.json
│ │ │ │ ├── Command-diagram.json
│ │ │ │ ├── Decorator-diagram.json
│ │ │ │ ├── Facade-diagram.json
│ │ │ │ ├── Factory-diagram.json
│ │ │ │ ├── Generate_Mermaid_Diagram.json
│ │ │ │ ├── generated_diagram.json
│ │ │ │ ├── integration.json
│ │ │ │ ├── Iterator-diagram.json
│ │ │ │ ├── Mediator-diagram.json
│ │ │ │ ├── Memento-diagram.json
│ │ │ │ ├── Mermaid_Analysis_Expert.json
│ │ │ │ ├── Mermaid_Class_Diagram_Generator.json
│ │ │ │ ├── Mermaid_Diagram_Generator.json
│ │ │ │ ├── Mermaid_Diagram_Modifier.json
│ │ │ │ ├── Modify_Mermaid_Diagram.json
│ │ │ │ ├── Observer-diagram.json
│ │ │ │ ├── Prototype-diagram.json
│ │ │ │ ├── Proxy-diagram.json
│ │ │ │ ├── README.json
│ │ │ │ ├── Singleton-diagram.json
│ │ │ │ ├── State-diagram.json
│ │ │ │ ├── Strategy-diagram.json
│ │ │ │ ├── TemplateMethod-diagram.json
│ │ │ │ ├── theme_dark.json
│ │ │ │ ├── theme_default.json
│ │ │ │ ├── theme_pastel.json
│ │ │ │ ├── theme_vibrant.json
│ │ │ │ └── Visitor-diagram.json
│ │ │ ├── Generate_Mermaid_Diagram.json
│ │ │ ├── generated_diagram.json
│ │ │ ├── index.json
│ │ │ ├── integration.json
│ │ │ ├── Iterator-diagram.json
│ │ │ ├── Mediator-diagram.json
│ │ │ ├── Memento-diagram.json
│ │ │ ├── Mermaid_Analysis_Expert.json
│ │ │ ├── Mermaid_Class_Diagram_Generator.json
│ │ │ ├── Mermaid_Diagram_Generator.json
│ │ │ ├── Mermaid_Diagram_Modifier.json
│ │ │ ├── Modify_Mermaid_Diagram.json
│ │ │ ├── Observer-diagram.json
│ │ │ ├── Prototype-diagram.json
│ │ │ ├── Proxy-diagram.json
│ │ │ ├── README.json
│ │ │ ├── Singleton-diagram.json
│ │ │ ├── State-diagram.json
│ │ │ ├── Strategy-diagram.json
│ │ │ ├── TemplateMethod-diagram.json
│ │ │ ├── theme_dark.json
│ │ │ ├── theme_default.json
│ │ │ ├── theme_pastel.json
│ │ │ ├── theme_vibrant.json
│ │ │ └── Visitor-diagram.json
│ │ └── types.py
│ ├── project_orchestration.py
│ ├── prompt_manager
│ │ ├── __init__.py
│ │ ├── loader.py
│ │ ├── manager.py
│ │ └── template.py
│ ├── prompts
│ │ ├── __dirname.json
│ │ ├── __image_1___describe_the_icon_in_one_sen___.json
│ │ ├── __init__.py
│ │ ├── __type.json
│ │ ├── _.json
│ │ ├── _DEFAULT_OPEN_DELIMITER.json
│ │ ├── _emojiRegex.json
│ │ ├── _UUID_CHARS.json
│ │ ├── a.json
│ │ ├── A.json
│ │ ├── Aa.json
│ │ ├── aAnnotationPadding.json
│ │ ├── absoluteThresholdGroup.json
│ │ ├── add.json
│ │ ├── ADDITIONAL_PROPERTY_FLAG.json
│ │ ├── Advanced_Multi-Server_Integration_Template.json
│ │ ├── allOptionsList.json
│ │ ├── analysis
│ │ │ ├── Data_Analysis_Template.json
│ │ │ ├── index.json
│ │ │ ├── Mermaid_Analysis_Expert.json
│ │ │ ├── Sequential_Data_Analysis_with_MCP_Integration.json
│ │ │ └── SOLID_Code_Analysis_Visualizer.json
│ │ ├── Analysis_Assistant.json
│ │ ├── Analyze_Mermaid_Diagram.json
│ │ ├── ANDROID_EVERGREEN_FIRST.json
│ │ ├── ANSI_ESCAPE_BELL.json
│ │ ├── architecture
│ │ │ ├── index.json
│ │ │ └── PromptCombiner_Interface.json
│ │ ├── Architecture_Design_Assistant.json
│ │ ├── argsTag.json
│ │ ├── ARROW.json
│ │ ├── assistant
│ │ │ ├── Analysis_Assistant.json
│ │ │ ├── Architecture_Design_Assistant.json
│ │ │ ├── Code_Refactoring_Assistant.json
│ │ │ ├── Code_Review_Assistant.json
│ │ │ ├── Database_Query_Assistant.json
│ │ │ ├── Debugging_Assistant.json
│ │ │ ├── Foresight_Assistant.json
│ │ │ ├── index.json
│ │ │ ├── MCP_Integration_Assistant.json
│ │ │ ├── Project_Analysis_Assistant.json
│ │ │ └── Research_Assistant.json
│ │ ├── astralRange.json
│ │ ├── at.json
│ │ ├── authorization_endpoint.json
│ │ ├── b.json
│ │ ├── BABELIGNORE_FILENAME.json
│ │ ├── BACKSLASH.json
│ │ ├── backupId.json
│ │ ├── BANG.json
│ │ ├── BASE64_MAP.json
│ │ ├── baseFlags.json
│ │ ├── Basic_Template.json
│ │ ├── bgModel.json
│ │ ├── bignum.json
│ │ ├── blockKeywordsStr.json
│ │ ├── BOMChar.json
│ │ ├── boundary.json
│ │ ├── brackets.json
│ │ ├── BROWSER_VAR.json
│ │ ├── bt.json
│ │ ├── BUILTIN.json
│ │ ├── BULLET.json
│ │ ├── c.json
│ │ ├── C.json
│ │ ├── CACHE_VERSION.json
│ │ ├── cacheControl.json
│ │ ├── cacheProp.json
│ │ ├── category.py
│ │ ├── CHANGE_EVENT.json
│ │ ├── CHAR_CODE_0.json
│ │ ├── chars.json
│ │ ├── cjsPattern.json
│ │ ├── cKeywords.json
│ │ ├── classForPercent.json
│ │ ├── classStr.json
│ │ ├── clientFirstMessageBare.json
│ │ ├── cmd.json
│ │ ├── Code_Diagram_Documentation_Creator.json
│ │ ├── Code_Refactoring_Assistant.json
│ │ ├── Code_Review_Assistant.json
│ │ ├── code.json
│ │ ├── coding
│ │ │ ├── __dirname.json
│ │ │ ├── _.json
│ │ │ ├── _DEFAULT_OPEN_DELIMITER.json
│ │ │ ├── _emojiRegex.json
│ │ │ ├── _UUID_CHARS.json
│ │ │ ├── a.json
│ │ │ ├── A.json
│ │ │ ├── aAnnotationPadding.json
│ │ │ ├── absoluteThresholdGroup.json
│ │ │ ├── add.json
│ │ │ ├── ADDITIONAL_PROPERTY_FLAG.json
│ │ │ ├── allOptionsList.json
│ │ │ ├── ANDROID_EVERGREEN_FIRST.json
│ │ │ ├── ANSI_ESCAPE_BELL.json
│ │ │ ├── argsTag.json
│ │ │ ├── ARROW.json
│ │ │ ├── astralRange.json
│ │ │ ├── at.json
│ │ │ ├── authorization_endpoint.json
│ │ │ ├── BABELIGNORE_FILENAME.json
│ │ │ ├── BACKSLASH.json
│ │ │ ├── BANG.json
│ │ │ ├── BASE64_MAP.json
│ │ │ ├── baseFlags.json
│ │ │ ├── bgModel.json
│ │ │ ├── bignum.json
│ │ │ ├── blockKeywordsStr.json
│ │ │ ├── BOMChar.json
│ │ │ ├── boundary.json
│ │ │ ├── brackets.json
│ │ │ ├── BROWSER_VAR.json
│ │ │ ├── bt.json
│ │ │ ├── BUILTIN.json
│ │ │ ├── BULLET.json
│ │ │ ├── c.json
│ │ │ ├── C.json
│ │ │ ├── CACHE_VERSION.json
│ │ │ ├── cacheControl.json
│ │ │ ├── cacheProp.json
│ │ │ ├── CHANGE_EVENT.json
│ │ │ ├── CHAR_CODE_0.json
│ │ │ ├── chars.json
│ │ │ ├── cjsPattern.json
│ │ │ ├── cKeywords.json
│ │ │ ├── classForPercent.json
│ │ │ ├── classStr.json
│ │ │ ├── clientFirstMessageBare.json
│ │ │ ├── cmd.json
│ │ │ ├── code.json
│ │ │ ├── colorCode.json
│ │ │ ├── comma.json
│ │ │ ├── command.json
│ │ │ ├── configJsContent.json
│ │ │ ├── connectionString.json
│ │ │ ├── cssClassStr.json
│ │ │ ├── currentBoundaryParse.json
│ │ │ ├── d.json
│ │ │ ├── data.json
│ │ │ ├── DATA.json
│ │ │ ├── dataWebpackPrefix.json
│ │ │ ├── debug.json
│ │ │ ├── decodeStateVectorV2.json
│ │ │ ├── DEFAULT_DELIMITER.json
│ │ │ ├── DEFAULT_DIAGRAM_DIRECTION.json
│ │ │ ├── DEFAULT_JS_PATTERN.json
│ │ │ ├── DEFAULT_LOG_TARGET.json
│ │ │ ├── defaultHelpOpt.json
│ │ │ ├── defaultHost.json
│ │ │ ├── deferY18nLookupPrefix.json
│ │ │ ├── DELIM.json
│ │ │ ├── delimiter.json
│ │ │ ├── DEPRECATION.json
│ │ │ ├── destMain.json
│ │ │ ├── DID_NOT_THROW.json
│ │ │ ├── direction.json
│ │ │ ├── displayValue.json
│ │ │ ├── DNS.json
│ │ │ ├── doc.json
│ │ │ ├── DOCUMENTATION_NOTE.json
│ │ │ ├── DOT.json
│ │ │ ├── DOTS.json
│ │ │ ├── dummyCompoundId.json
│ │ │ ├── e.json
│ │ │ ├── E.json
│ │ │ ├── earlyHintsLink.json
│ │ │ ├── elide.json
│ │ │ ├── EMPTY.json
│ │ │ ├── end.json
│ │ │ ├── endpoint.json
│ │ │ ├── environment.json
│ │ │ ├── ERR_CODE.json
│ │ │ ├── errMessage.json
│ │ │ ├── errMsg.json
│ │ │ ├── ERROR_MESSAGE.json
│ │ │ ├── error.json
│ │ │ ├── ERROR.json
│ │ │ ├── ERRORCLASS.json
│ │ │ ├── errorMessage.json
│ │ │ ├── es6Default.json
│ │ │ ├── ESC.json
│ │ │ ├── Escapable.json
│ │ │ ├── escapedChar.json
│ │ │ ├── escapeFuncStr.json
│ │ │ ├── escSlash.json
│ │ │ ├── ev.json
│ │ │ ├── event.json
│ │ │ ├── execaMessage.json
│ │ │ ├── EXPECTED_LABEL.json
│ │ │ ├── expected.json
│ │ │ ├── expectedString.json
│ │ │ ├── expression1.json
│ │ │ ├── EXTENSION.json
│ │ │ ├── f.json
│ │ │ ├── FAIL_TEXT.json
│ │ │ ├── FILE_BROWSER_FACTORY.json
│ │ │ ├── fill.json
│ │ │ ├── findPackageJson.json
│ │ │ ├── fnKey.json
│ │ │ ├── FORMAT.json
│ │ │ ├── formatted.json
│ │ │ ├── from.json
│ │ │ ├── fullpaths.json
│ │ │ ├── FUNC_ERROR_TEXT.json
│ │ │ ├── GenStateSuspendedStart.json
│ │ │ ├── GENSYNC_EXPECTED_START.json
│ │ │ ├── gutter.json
│ │ │ ├── h.json
│ │ │ ├── handlerFuncName.json
│ │ │ ├── HASH_UNDEFINED.json
│ │ │ ├── head.json
│ │ │ ├── helpMessage.json
│ │ │ ├── HINT_ARG.json
│ │ │ ├── HOOK_RETURNED_NOTHING_ERROR_MESSAGE.json
│ │ │ ├── i.json
│ │ │ ├── id.json
│ │ │ ├── identifier.json
│ │ │ ├── Identifier.json
│ │ │ ├── INDENT.json
│ │ │ ├── indentation.json
│ │ │ ├── index.json
│ │ │ ├── INDIRECTION_FRAGMENT.json
│ │ │ ├── input.json
│ │ │ ├── inputText.json
│ │ │ ├── insert.json
│ │ │ ├── insertPromptQuery.json
│ │ │ ├── INSPECT_MAX_BYTES.json
│ │ │ ├── intToCharMap.json
│ │ │ ├── IS_ITERABLE_SENTINEL.json
│ │ │ ├── IS_KEYED_SENTINEL.json
│ │ │ ├── isConfigType.json
│ │ │ ├── isoSentinel.json
│ │ │ ├── isSourceNode.json
│ │ │ ├── j.json
│ │ │ ├── JAKE_CMD.json
│ │ │ ├── JEST_GLOBAL_NAME.json
│ │ │ ├── JEST_GLOBALS_MODULE_NAME.json
│ │ │ ├── JSON_SYNTAX_CHAR.json
│ │ │ ├── json.json
│ │ │ ├── jsonType.json
│ │ │ ├── jupyter_namespaceObject.json
│ │ │ ├── JUPYTERLAB_DOCMANAGER_PLUGIN_ID.json
│ │ │ ├── k.json
│ │ │ ├── KERNEL_STATUS_ERROR_CLASS.json
│ │ │ ├── key.json
│ │ │ ├── l.json
│ │ │ ├── labelId.json
│ │ │ ├── LATEST_PROTOCOL_VERSION.json
│ │ │ ├── LETTERDASHNUMBER.json
│ │ │ ├── LF.json
│ │ │ ├── LIMIT_REPLACE_NODE.json
│ │ │ ├── logTime.json
│ │ │ ├── lstatkey.json
│ │ │ ├── lt.json
│ │ │ ├── m.json
│ │ │ ├── maliciousPayload.json
│ │ │ ├── mask.json
│ │ │ ├── match.json
│ │ │ ├── matchingDelim.json
│ │ │ ├── MAXIMUM_MESSAGE_SIZE.json
│ │ │ ├── mdcContent.json
│ │ │ ├── MERMAID_DOM_ID_PREFIX.json
│ │ │ ├── message.json
│ │ │ ├── messages.json
│ │ │ ├── meth.json
│ │ │ ├── minimatch.json
│ │ │ ├── MOCK_CONSTRUCTOR_NAME.json
│ │ │ ├── MOCKS_PATTERN.json
│ │ │ ├── moduleDirectory.json
│ │ │ ├── msg.json
│ │ │ ├── mtr.json
│ │ │ ├── multipartType.json
│ │ │ ├── n.json
│ │ │ ├── N.json
│ │ │ ├── name.json
│ │ │ ├── NATIVE_PLATFORM.json
│ │ │ ├── newUrl.json
│ │ │ ├── NM.json
│ │ │ ├── NO_ARGUMENTS.json
│ │ │ ├── NO_DIFF_MESSAGE.json
│ │ │ ├── NODE_MODULES.json
│ │ │ ├── nodeInternalPrefix.json
│ │ │ ├── nonASCIIidentifierStartChars.json
│ │ │ ├── nonKey.json
│ │ │ ├── NOT_A_DOT.json
│ │ │ ├── notCharacterOrDash.json
│ │ │ ├── notebookURL.json
│ │ │ ├── notSelector.json
│ │ │ ├── nullTag.json
│ │ │ ├── num.json
│ │ │ ├── NUMBER.json
│ │ │ ├── o.json
│ │ │ ├── O.json
│ │ │ ├── octChar.json
│ │ │ ├── octetStreamType.json
│ │ │ ├── operators.json
│ │ │ ├── out.json
│ │ │ ├── OUTSIDE_JEST_VM_PROTOCOL.json
│ │ │ ├── override.json
│ │ │ ├── p.json
│ │ │ ├── PACKAGE_FILENAME.json
│ │ │ ├── PACKAGE_JSON.json
│ │ │ ├── packageVersion.json
│ │ │ ├── paddedNumber.json
│ │ │ ├── page.json
│ │ │ ├── parseClass.json
│ │ │ ├── path.json
│ │ │ ├── pathExt.json
│ │ │ ├── pattern.json
│ │ │ ├── PatternBoolean.json
│ │ │ ├── pBuiltins.json
│ │ │ ├── pFloatForm.json
│ │ │ ├── pkg.json
│ │ │ ├── PLUGIN_ID_DOC_MANAGER.json
│ │ │ ├── plusChar.json
│ │ │ ├── PN_CHARS.json
│ │ │ ├── point.json
│ │ │ ├── prefix.json
│ │ │ ├── PRETTY_PLACEHOLDER.json
│ │ │ ├── property_prefix.json
│ │ │ ├── pubkey256.json
│ │ │ ├── Q.json
│ │ │ ├── qmark.json
│ │ │ ├── QO.json
│ │ │ ├── query.json
│ │ │ ├── querystringType.json
│ │ │ ├── queryText.json
│ │ │ ├── r.json
│ │ │ ├── R.json
│ │ │ ├── rangeStart.json
│ │ │ ├── re.json
│ │ │ ├── reI.json
│ │ │ ├── REQUIRED_FIELD_SYMBOL.json
│ │ │ ├── reserve.json
│ │ │ ├── resolvedDestination.json
│ │ │ ├── resolverDir.json
│ │ │ ├── responseType.json
│ │ │ ├── result.json
│ │ │ ├── ROOT_DESCRIBE_BLOCK_NAME.json
│ │ │ ├── ROOT_NAMESPACE_NAME.json
│ │ │ ├── ROOT_TASK_NAME.json
│ │ │ ├── route.json
│ │ │ ├── RUNNING_TEXT.json
│ │ │ ├── s.json
│ │ │ ├── SCHEMA_PATH.json
│ │ │ ├── se.json
│ │ │ ├── SEARCHABLE_CLASS.json
│ │ │ ├── secret.json
│ │ │ ├── selector.json
│ │ │ ├── SEMVER_SPEC_VERSION.json
│ │ │ ├── sensitiveHeaders.json
│ │ │ ├── sep.json
│ │ │ ├── separator.json
│ │ │ ├── SHAPE_STATE.json
│ │ │ ├── shape.json
│ │ │ ├── SHARED.json
│ │ │ ├── short.json
│ │ │ ├── side.json
│ │ │ ├── SNAPSHOT_VERSION.json
│ │ │ ├── SOURCE_MAPPING_PREFIX.json
│ │ │ ├── source.json
│ │ │ ├── sourceMapContent.json
│ │ │ ├── SPACE_SYMBOL.json
│ │ │ ├── SPACE.json
│ │ │ ├── sqlKeywords.json
│ │ │ ├── sranges.json
│ │ │ ├── st.json
│ │ │ ├── ST.json
│ │ │ ├── stack.json
│ │ │ ├── START_HIDING.json
│ │ │ ├── START_OF_LINE.json
│ │ │ ├── startNoTraversal.json
│ │ │ ├── STATES.json
│ │ │ ├── stats.json
│ │ │ ├── statSync.json
│ │ │ ├── storageStatus.json
│ │ │ ├── storageType.json
│ │ │ ├── str.json
│ │ │ ├── stringifiedObject.json
│ │ │ ├── stringPath.json
│ │ │ ├── stringResult.json
│ │ │ ├── stringTag.json
│ │ │ ├── strValue.json
│ │ │ ├── style.json
│ │ │ ├── SUB_NAME.json
│ │ │ ├── subkey.json
│ │ │ ├── SUBPROTOCOL.json
│ │ │ ├── SUITE_NAME.json
│ │ │ ├── symbolPattern.json
│ │ │ ├── symbolTag.json
│ │ │ ├── t.json
│ │ │ ├── T.json
│ │ │ ├── templateDir.json
│ │ │ ├── tempName.json
│ │ │ ├── text.json
│ │ │ ├── time.json
│ │ │ ├── titleSeparator.json
│ │ │ ├── tmpl.json
│ │ │ ├── tn.json
│ │ │ ├── toValue.json
│ │ │ ├── transform.json
│ │ │ ├── trustProxyDefaultSymbol.json
│ │ │ ├── typeArgumentsKey.json
│ │ │ ├── typeKey.json
│ │ │ ├── typeMessage.json
│ │ │ ├── typesRegistryPackageName.json
│ │ │ ├── u.json
│ │ │ ├── UNDEFINED.json
│ │ │ ├── unit.json
│ │ │ ├── UNMATCHED_SURROGATE_PAIR_REPLACE.json
│ │ │ ├── ur.json
│ │ │ ├── USAGE.json
│ │ │ ├── value.json
│ │ │ ├── Vr.json
│ │ │ ├── watchmanURL.json
│ │ │ ├── webkit.json
│ │ │ ├── xhtml.json
│ │ │ ├── XP_DEFAULT_PATHEXT.json
│ │ │ └── y.json
│ │ ├── Collaborative_Development_with_MCP_Integration.json
│ │ ├── colorCode.json
│ │ ├── comma.json
│ │ ├── command.json
│ │ ├── completionShTemplate.json
│ │ ├── configJsContent.json
│ │ ├── connectionString.json
│ │ ├── Consolidated_TypeScript_Interfaces_Template.json
│ │ ├── Could_you_interpret_the_assumed_applicat___.json
│ │ ├── cssClassStr.json
│ │ ├── currentBoundaryParse.json
│ │ ├── d.json
│ │ ├── Data_Analysis_Template.json
│ │ ├── data.json
│ │ ├── DATA.json
│ │ ├── Database_Query_Assistant.json
│ │ ├── dataWebpackPrefix.json
│ │ ├── debug.json
│ │ ├── Debugging_Assistant.json
│ │ ├── decodeStateVectorV2.json
│ │ ├── DEFAULT_DELIMITER.json
│ │ ├── DEFAULT_DIAGRAM_DIRECTION.json
│ │ ├── DEFAULT_INDENT.json
│ │ ├── DEFAULT_JS_PATTERN.json
│ │ ├── DEFAULT_LOG_TARGET.json
│ │ ├── defaultHelpOpt.json
│ │ ├── defaultHost.json
│ │ ├── deferY18nLookupPrefix.json
│ │ ├── DELIM.json
│ │ ├── delimiter.json
│ │ ├── DEPRECATION.json
│ │ ├── DESCENDING.json
│ │ ├── destMain.json
│ │ ├── development
│ │ │ ├── Collaborative_Development_with_MCP_Integration.json
│ │ │ ├── Consolidated_TypeScript_Interfaces_Template.json
│ │ │ ├── Development_Workflow.json
│ │ │ ├── index.json
│ │ │ ├── MCP_Server_Development_Prompt_Combiner.json
│ │ │ └── Monorepo_Migration_and_Code_Organization_Guide.json
│ │ ├── Development_System_Prompt.json
│ │ ├── Development_Workflow.json
│ │ ├── devops
│ │ │ ├── Docker_Compose_Prompt_Combiner.json
│ │ │ ├── Docker_Containerization_Guide.json
│ │ │ └── index.json
│ │ ├── DID_NOT_THROW.json
│ │ ├── direction.json
│ │ ├── displayValue.json
│ │ ├── DNS.json
│ │ ├── doc.json
│ │ ├── Docker_Compose_Prompt_Combiner.json
│ │ ├── Docker_Containerization_Guide.json
│ │ ├── Docker_MCP_Servers_Orchestration_Guide.json
│ │ ├── DOCUMENTATION_NOTE.json
│ │ ├── DOT.json
│ │ ├── DOTS.json
│ │ ├── dummyCompoundId.json
│ │ ├── e.json
│ │ ├── E.json
│ │ ├── earlyHintsLink.json
│ │ ├── elide.json
│ │ ├── EMPTY.json
│ │ ├── encoded.json
│ │ ├── end.json
│ │ ├── endpoint.json
│ │ ├── environment.json
│ │ ├── ERR_CODE.json
│ │ ├── errMessage.json
│ │ ├── errMsg.json
│ │ ├── ERROR_MESSAGE.json
│ │ ├── error.json
│ │ ├── ERROR.json
│ │ ├── ERRORCLASS.json
│ │ ├── errorMessage.json
│ │ ├── es6Default.json
│ │ ├── ESC.json
│ │ ├── Escapable.json
│ │ ├── escapedChar.json
│ │ ├── escapeFuncStr.json
│ │ ├── escSlash.json
│ │ ├── ev.json
│ │ ├── event.json
│ │ ├── execaMessage.json
│ │ ├── EXPECTED_LABEL.json
│ │ ├── expected.json
│ │ ├── expectedString.json
│ │ ├── expression1.json
│ │ ├── EXTENSION.json
│ │ ├── f.json
│ │ ├── FAIL_TEXT.json
│ │ ├── FILE_BROWSER_FACTORY.json
│ │ ├── fill.json
│ │ ├── findPackageJson.json
│ │ ├── fnKey.json
│ │ ├── Foresight_Assistant.json
│ │ ├── FORMAT.json
│ │ ├── formatted.json
│ │ ├── from.json
│ │ ├── fullpaths.json
│ │ ├── FUNC_ERROR_TEXT.json
│ │ ├── general
│ │ │ └── index.json
│ │ ├── Generate_different_types_of_questions_ab___.json
│ │ ├── Generate_Mermaid_Diagram.json
│ │ ├── GenStateSuspendedStart.json
│ │ ├── GENSYNC_EXPECTED_START.json
│ │ ├── GitHub_Repository_Explorer.json
│ │ ├── gutter.json
│ │ ├── h.json
│ │ ├── handlerFuncName.json
│ │ ├── HASH_UNDEFINED.json
│ │ ├── head.json
│ │ ├── helpMessage.json
│ │ ├── HINT_ARG.json
│ │ ├── HOOK_RETURNED_NOTHING_ERROR_MESSAGE.json
│ │ ├── i.json
│ │ ├── id.json
│ │ ├── identifier.json
│ │ ├── Identifier.json
│ │ ├── INDENT.json
│ │ ├── indentation.json
│ │ ├── index.json
│ │ ├── INDIRECTION_FRAGMENT.json
│ │ ├── Initialize_project_setup_for_a_new_micro___.json
│ │ ├── input.json
│ │ ├── inputText.json
│ │ ├── insert.json
│ │ ├── insertPromptQuery.json
│ │ ├── INSPECT_MAX_BYTES.json
│ │ ├── install_dependencies__build__run__test____.json
│ │ ├── intToCharMap.json
│ │ ├── IS_ITERABLE_SENTINEL.json
│ │ ├── IS_KEYED_SENTINEL.json
│ │ ├── isConfigType.json
│ │ ├── isoSentinel.json
│ │ ├── isSourceNode.json
│ │ ├── j.json
│ │ ├── J.json
│ │ ├── JAKE_CMD.json
│ │ ├── JEST_GLOBAL_NAME.json
│ │ ├── JEST_GLOBALS_MODULE_NAME.json
│ │ ├── JSON_SYNTAX_CHAR.json
│ │ ├── json.json
│ │ ├── jsonType.json
│ │ ├── jupyter_namespaceObject.json
│ │ ├── JUPYTERLAB_DOCMANAGER_PLUGIN_ID.json
│ │ ├── k.json
│ │ ├── KERNEL_STATUS_ERROR_CLASS.json
│ │ ├── key.json
│ │ ├── l.json
│ │ ├── labelId.json
│ │ ├── LATEST_PROTOCOL_VERSION.json
│ │ ├── LETTERDASHNUMBER.json
│ │ ├── LF.json
│ │ ├── LIMIT_REPLACE_NODE.json
│ │ ├── LINE_FEED.json
│ │ ├── logTime.json
│ │ ├── lstatkey.json
│ │ ├── lt.json
│ │ ├── m.json
│ │ ├── maliciousPayload.json
│ │ ├── manager.py
│ │ ├── marker.json
│ │ ├── mask.json
│ │ ├── match.json
│ │ ├── matchingDelim.json
│ │ ├── MAXIMUM_MESSAGE_SIZE.json
│ │ ├── MCP_Integration_Assistant.json
│ │ ├── MCP_Resources_Explorer.json
│ │ ├── MCP_Resources_Integration_Guide.json
│ │ ├── MCP_Server_Development_Prompt_Combiner.json
│ │ ├── MCP_Server_Integration_Guide.json
│ │ ├── mcp-code-generator.json
│ │ ├── mdcContent.json
│ │ ├── Mermaid_Analysis_Expert.json
│ │ ├── Mermaid_Class_Diagram_Generator.json
│ │ ├── Mermaid_Diagram_Generator.json
│ │ ├── Mermaid_Diagram_Modifier.json
│ │ ├── MERMAID_DOM_ID_PREFIX.json
│ │ ├── message.json
│ │ ├── messages.json
│ │ ├── meth.json
│ │ ├── minimatch.json
│ │ ├── MOBILE_QUERY.json
│ │ ├── MOCK_CONSTRUCTOR_NAME.json
│ │ ├── MOCKS_PATTERN.json
│ │ ├── Modify_Mermaid_Diagram.json
│ │ ├── moduleDirectory.json
│ │ ├── Monorepo_Migration_and_Code_Organization_Guide.json
│ │ ├── msg.json
│ │ ├── mtr.json
│ │ ├── Multi-Resource_Context_Assistant.json
│ │ ├── multipartType.json
│ │ ├── n.json
│ │ ├── N.json
│ │ ├── name.json
│ │ ├── NATIVE_PLATFORM.json
│ │ ├── newUrl.json
│ │ ├── NM.json
│ │ ├── NO_ARGUMENTS.json
│ │ ├── NO_DIFF_MESSAGE.json
│ │ ├── NODE_MODULES.json
│ │ ├── nodeInternalPrefix.json
│ │ ├── nonASCIIidentifierStartChars.json
│ │ ├── nonKey.json
│ │ ├── NOT_A_DOT.json
│ │ ├── notCharacterOrDash.json
│ │ ├── notebookURL.json
│ │ ├── notSelector.json
│ │ ├── nullTag.json
│ │ ├── num.json
│ │ ├── NUMBER.json
│ │ ├── o.json
│ │ ├── O.json
│ │ ├── octChar.json
│ │ ├── octetStreamType.json
│ │ ├── operators.json
│ │ ├── other
│ │ │ ├── __image_1___describe_the_icon_in_one_sen___.json
│ │ │ ├── __type.json
│ │ │ ├── Advanced_Multi-Server_Integration_Template.json
│ │ │ ├── Analyze_Mermaid_Diagram.json
│ │ │ ├── Basic_Template.json
│ │ │ ├── Code_Diagram_Documentation_Creator.json
│ │ │ ├── Collaborative_Development_with_MCP_Integration.json
│ │ │ ├── completionShTemplate.json
│ │ │ ├── Could_you_interpret_the_assumed_applicat___.json
│ │ │ ├── DEFAULT_INDENT.json
│ │ │ ├── Docker_MCP_Servers_Orchestration_Guide.json
│ │ │ ├── Generate_different_types_of_questions_ab___.json
│ │ │ ├── Generate_Mermaid_Diagram.json
│ │ │ ├── GitHub_Repository_Explorer.json
│ │ │ ├── index.json
│ │ │ ├── Initialize_project_setup_for_a_new_micro___.json
│ │ │ ├── install_dependencies__build__run__test____.json
│ │ │ ├── LINE_FEED.json
│ │ │ ├── MCP_Resources_Explorer.json
│ │ │ ├── MCP_Resources_Integration_Guide.json
│ │ │ ├── MCP_Server_Integration_Guide.json
│ │ │ ├── mcp-code-generator.json
│ │ │ ├── Mermaid_Class_Diagram_Generator.json
│ │ │ ├── Mermaid_Diagram_Generator.json
│ │ │ ├── Mermaid_Diagram_Modifier.json
│ │ │ ├── Modify_Mermaid_Diagram.json
│ │ │ ├── Multi-Resource_Context_Assistant.json
│ │ │ ├── output.json
│ │ │ ├── sseUrl.json
│ │ │ ├── string.json
│ │ │ ├── Task_List_Helper.json
│ │ │ ├── Template-Based_MCP_Integration.json
│ │ │ ├── Test_Prompt.json
│ │ │ ├── type.json
│ │ │ ├── VERSION.json
│ │ │ ├── WIN_SLASH.json
│ │ │ └── You_are_limited_to_respond_Yes_or_No_onl___.json
│ │ ├── out.json
│ │ ├── output.json
│ │ ├── OUTSIDE_JEST_VM_PROTOCOL.json
│ │ ├── override.json
│ │ ├── p.json
│ │ ├── PACKAGE_FILENAME.json
│ │ ├── PACKAGE_JSON.json
│ │ ├── packageVersion.json
│ │ ├── paddedNumber.json
│ │ ├── page.json
│ │ ├── parseClass.json
│ │ ├── PATH_NODE_MODULES.json
│ │ ├── path.json
│ │ ├── pathExt.json
│ │ ├── pattern.json
│ │ ├── PatternBoolean.json
│ │ ├── pBuiltins.json
│ │ ├── pFloatForm.json
│ │ ├── pkg.json
│ │ ├── PLUGIN_ID_DOC_MANAGER.json
│ │ ├── plusChar.json
│ │ ├── PN_CHARS.json
│ │ ├── point.json
│ │ ├── prefix.json
│ │ ├── PRETTY_PLACEHOLDER.json
│ │ ├── Project_Analysis_Assistant.json
│ │ ├── ProjectsUpdatedInBackgroundEvent.json
│ │ ├── PromptCombiner_Interface.json
│ │ ├── promptId.json
│ │ ├── property_prefix.json
│ │ ├── pubkey256.json
│ │ ├── Q.json
│ │ ├── qmark.json
│ │ ├── QO.json
│ │ ├── query.json
│ │ ├── querystringType.json
│ │ ├── queryText.json
│ │ ├── r.json
│ │ ├── R.json
│ │ ├── rangeStart.json
│ │ ├── re.json
│ │ ├── reI.json
│ │ ├── REQUIRED_FIELD_SYMBOL.json
│ │ ├── Research_Assistant.json
│ │ ├── reserve.json
│ │ ├── resolvedDestination.json
│ │ ├── resolverDir.json
│ │ ├── responseType.json
│ │ ├── result.json
│ │ ├── ROOT_DESCRIBE_BLOCK_NAME.json
│ │ ├── ROOT_NAMESPACE_NAME.json
│ │ ├── ROOT_TASK_NAME.json
│ │ ├── route.json
│ │ ├── RUNNING_TEXT.json
│ │ ├── RXstyle.json
│ │ ├── s.json
│ │ ├── SCHEMA_PATH.json
│ │ ├── schemaQuery.json
│ │ ├── se.json
│ │ ├── SEARCHABLE_CLASS.json
│ │ ├── secret.json
│ │ ├── selector.json
│ │ ├── SEMVER_SPEC_VERSION.json
│ │ ├── sensitiveHeaders.json
│ │ ├── sep.json
│ │ ├── separator.json
│ │ ├── Sequential_Data_Analysis_with_MCP_Integration.json
│ │ ├── SHAPE_STATE.json
│ │ ├── shape.json
│ │ ├── SHARED.json
│ │ ├── short.json
│ │ ├── side.json
│ │ ├── SNAPSHOT_VERSION.json
│ │ ├── SOLID_Code_Analysis_Visualizer.json
│ │ ├── SOURCE_MAPPING_PREFIX.json
│ │ ├── source.json
│ │ ├── sourceMapContent.json
│ │ ├── SPACE_SYMBOL.json
│ │ ├── SPACE.json
│ │ ├── sqlKeywords.json
│ │ ├── sranges.json
│ │ ├── sseUrl.json
│ │ ├── st.json
│ │ ├── ST.json
│ │ ├── stack.json
│ │ ├── START_HIDING.json
│ │ ├── START_OF_LINE.json
│ │ ├── startNoTraversal.json
│ │ ├── STATES.json
│ │ ├── stats.json
│ │ ├── statSync.json
│ │ ├── status.json
│ │ ├── storageStatus.json
│ │ ├── storageType.json
│ │ ├── str.json
│ │ ├── string.json
│ │ ├── stringifiedObject.json
│ │ ├── stringPath.json
│ │ ├── stringResult.json
│ │ ├── stringTag.json
│ │ ├── strValue.json
│ │ ├── style.json
│ │ ├── SUB_NAME.json
│ │ ├── subkey.json
│ │ ├── SUBPROTOCOL.json
│ │ ├── SUITE_NAME.json
│ │ ├── symbolPattern.json
│ │ ├── symbolTag.json
│ │ ├── system
│ │ │ ├── Aa.json
│ │ │ ├── b.json
│ │ │ ├── Development_System_Prompt.json
│ │ │ ├── index.json
│ │ │ ├── marker.json
│ │ │ ├── PATH_NODE_MODULES.json
│ │ │ ├── ProjectsUpdatedInBackgroundEvent.json
│ │ │ ├── RXstyle.json
│ │ │ ├── status.json
│ │ │ └── versionMajorMinor.json
│ │ ├── t.json
│ │ ├── T.json
│ │ ├── Task_List_Helper.json
│ │ ├── Template-Based_MCP_Integration.json
│ │ ├── template.py
│ │ ├── templateDir.json
│ │ ├── tempName.json
│ │ ├── Test_Prompt.json
│ │ ├── text.json
│ │ ├── time.json
│ │ ├── titleSeparator.json
│ │ ├── tmpl.json
│ │ ├── tn.json
│ │ ├── TOPBAR_FACTORY.json
│ │ ├── toValue.json
│ │ ├── transform.json
│ │ ├── trustProxyDefaultSymbol.json
│ │ ├── txt.json
│ │ ├── type.json
│ │ ├── typeArgumentsKey.json
│ │ ├── typeKey.json
│ │ ├── typeMessage.json
│ │ ├── typesRegistryPackageName.json
│ │ ├── u.json
│ │ ├── UNDEFINED.json
│ │ ├── unit.json
│ │ ├── UNMATCHED_SURROGATE_PAIR_REPLACE.json
│ │ ├── ur.json
│ │ ├── usage.json
│ │ ├── USAGE.json
│ │ ├── user
│ │ │ ├── backupId.json
│ │ │ ├── DESCENDING.json
│ │ │ ├── encoded.json
│ │ │ ├── index.json
│ │ │ ├── J.json
│ │ │ ├── MOBILE_QUERY.json
│ │ │ ├── promptId.json
│ │ │ ├── schemaQuery.json
│ │ │ ├── TOPBAR_FACTORY.json
│ │ │ ├── txt.json
│ │ │ └── usage.json
│ │ ├── value.json
│ │ ├── VERSION.json
│ │ ├── version.py
│ │ ├── versionMajorMinor.json
│ │ ├── Vr.json
│ │ ├── watchmanURL.json
│ │ ├── webkit.json
│ │ ├── WIN_SLASH.json
│ │ ├── xhtml.json
│ │ ├── XP_DEFAULT_PATHEXT.json
│ │ ├── y.json
│ │ └── You_are_limited_to_respond_Yes_or_No_onl___.json
│ ├── resources
│ │ ├── __init__.py
│ │ ├── code_examples
│ │ │ └── index.json
│ │ ├── config
│ │ │ └── index.json
│ │ ├── documentation
│ │ │ └── index.json
│ │ ├── images
│ │ │ └── index.json
│ │ ├── index.json
│ │ └── other
│ │ └── index.json
│ ├── server.py
│ ├── templates
│ │ ├── __init__.py
│ │ ├── AbstractFactory.json
│ │ ├── Adapter.json
│ │ ├── base.py
│ │ ├── Builder.json
│ │ ├── Chain.json
│ │ ├── Command.json
│ │ ├── component
│ │ │ ├── AbstractFactory.json
│ │ │ ├── Adapter.json
│ │ │ ├── Builder.json
│ │ │ ├── Chain.json
│ │ │ ├── Command.json
│ │ │ ├── Decorator.json
│ │ │ ├── Facade.json
│ │ │ ├── Factory.json
│ │ │ ├── Iterator.json
│ │ │ ├── Mediator.json
│ │ │ ├── Memento.json
│ │ │ ├── Observer.json
│ │ │ ├── Prototype.json
│ │ │ ├── Proxy.json
│ │ │ ├── Singleton.json
│ │ │ ├── State.json
│ │ │ ├── Strategy.json
│ │ │ ├── TemplateMethod.json
│ │ │ └── Visitor.json
│ │ ├── component.py
│ │ ├── Decorator.json
│ │ ├── Facade.json
│ │ ├── Factory.json
│ │ ├── index.json
│ │ ├── Iterator.json
│ │ ├── manager.py
│ │ ├── Mediator.json
│ │ ├── Memento.json
│ │ ├── Observer.json
│ │ ├── project.py
│ │ ├── Prototype.json
│ │ ├── Proxy.json
│ │ ├── renderer.py
│ │ ├── Singleton.json
│ │ ├── State.json
│ │ ├── Strategy.json
│ │ ├── template_manager.py
│ │ ├── TemplateMethod.json
│ │ ├── types.py
│ │ └── Visitor.json
│ └── utils
│ └── __init__.py
├── SUMMARY.md
├── TASK_COMPLETION_SUMMARY.md
├── templates
│ └── openssl
│ ├── files
│ │ ├── CMakeLists.txt.jinja2
│ │ ├── conanfile.py.jinja2
│ │ ├── main.cpp.jinja2
│ │ └── README.md.jinja2
│ ├── openssl-consumer.json
│ └── template.json
├── test_openssl_integration.sh
├── test_package
│ └── conanfile.py
└── tests
├── __init__.py
├── conftest.py
├── integration
│ ├── test_core_integration.py
│ ├── test_mermaid_integration.py
│ ├── test_prompt_manager_integration.py
│ └── test_server_integration.py
├── test_aws_mcp.py
├── test_base_classes.py
├── test_config.py
├── test_exceptions.py
├── test_mermaid.py
├── test_prompts.py
└── test_templates.py
```
# Files
--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/prompts/other/mcp-code-generator.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "mcp-code-generator",
3 | "description": "An advanced code generation prompt that leverages multiple MCP resources to create contextually-aware, high-quality code with minimal hallucination.",
4 | "type": "prompt",
5 | "category": "other",
6 | "content": " \\\"mcp-code-generator\\\",\\n \\\"version\\\": \\\"1.0.0\\\",\\n \\\"description\\\": \\\"An advanced code generation prompt that leverages multiple MCP resources to create contextually-aware, high-quality code with minimal hallucination.\\\",\\n \\\"prompt_text\\\": \\\"# MCP-Powered Code Generator\\\\n\\\\nYou are an expert coding assistant with access to multiple MCP resources. Your task is to generate high-quality, contextually-appropriate code based on the user's requirements while leveraging the following MCP resources to reduce hallucination and improve accuracy:\\\\n\\\\n- **Filesystem** (@file:// URIs): Access to project files and directory structure\\\\n- **GitHub** (@github:// URIs): Access to repositories, code examples, and documentation\\\\n- **Sequential Thinking** (@thinking:// URIs): Step-by-step reasoning for complex algorithms\\\\n- **Memory** (@memory:// URIs): Previous code snippets and user preferences\\\\n\\\\n## Code Generation Process\\\\n\\\\n1. **Analyze Requirements**\\\\n - Break down the user's request into specific coding tasks\\\\n - Identify key functionalities, interfaces, and constraints\\\\n - Determine appropriate language, framework, or library to use\\\\n\\\\n2. **Resource Collection**\\\\n - Check current project structure (if available): `@file:///project`\\\\n - Find related examples on GitHub: `@github://relevant-repos`\\\\n - Retrieve user preferences if available: `@memory://coding-preferences`\\\\n\\\\n3. **Design Phase**\\\\n - Create a high-level design outline\\\\n - Determine classes, functions, or components needed\\\\n - Establish interfaces and relationships\\\\n\\\\n4. **Implementation Phase**\\\\n - Write clean, well-documented code that follows best practices\\\\n - Include proper error handling and edge cases\\\\n - Ensure compatibility with existing codebase (if applicable)\\\\n - Add appropriate comments and documentation\\\\n\\\\n5. **Testing Considerations**\\\\n - Include unit test examples or strategies when appropriate\\\\n - Consider edge cases and potential failures\\\\n - Provide sample usage examples\\\\n\\\\n## Code Quality Guidelines\\\\n\\\\n- **Readability**: Write clear, self-explanatory code with consistent formatting\\\\n- **Maintainability**: Use descriptive variable names and follow language conventions\\\\n- **Performance**: Consider algorithmic efficiency and resource usage\\\\n- **Security**: Follow security best practices and avoid common vulnerabilities\\\\n- **Reusability**: Create modular, reusable components when appropriate\\\\n\\\\n## Output Format\\\\n\\\\nYour response should include:\\\\n\\\\n1. A brief explanation of the approach taken\\\\n2. The generated code in properly formatted code blocks\\\\n3. Installation instructions or dependencies (if applicable)\\\\n4. Usage examples\\\\n5. References to any MCP resources used\\\\n\\\\n## Contextual Adaptation\\\\n\\\\nTailor your code generation based on:\\\\n\\\\n- The programming language specified or inferred from context\\\\n- The user's preferred coding style (if known)\\\\n- The complexity level appropriate for the task\\\\n- The target environment (web, mobile, desktop, server, etc.)\\\\n\\\\n{{code_requirements}}\\\\n\\\\n{{additional_context}}\\\",\\n \\\"variables\\\": [\\n \\\"code_requirements\\\",\\n \\\"additional_context\\\"\\n ],\\n \\\"tags\\\": [\\n \\\"code-generation\\\",\\n \\\"multi-resource\\\",\\n \\\"mcp-integration\\\",\\n \\\"development\\\",\\n \\\"programming\\\"\\n ],\\n \\\"examples\\\": [\\n {\\n \\\"name\\\": \\\"React Component Generator\\\",\\n \\\"variables\\\": {\\n \\\"code_requirements\\\": \\\"Create a React component for a customizable data table with sorting, filtering, and pagination capabilities. The component should be reusable across our application.\\\",\\n \\\"additional_context\\\": \\\"Our project uses TypeScript, React 18, and follows a functional component approach with hooks. We prefer using styled-components for styling. The existing component structure can be found at @file:///project/src/components/.\\\"\\n },\\n \\\"expected_response\\\": \\\"# Customizable Data Table Component\\\\n\\\\nI'll create a reusable React component for a data table with sorting, filtering, and pagination capabilities, following your project's TypeScript and React 18 standards with styled-components for styling.\\\\n\\\\n## Approach\\\\n\\\\nAfter examining your project structure at `@file:///project/src/components/`, I notice you're using a functional component pattern with hooks. I'll create:\\\\n\\\\n1. A main `DataTable` component that handles the core functionality\\\\n2. Several sub-components for table elements (header, row, cell, pagination)\\\\n3. Custom hooks for sorting, filtering, and pagination logic\\\\n4. TypeScript interfaces for strong typing\\\\n\\\\n## Component Structure\\\\n\\\\n```typescript\\\\n// DataTable.tsx\\\\nimport React, { useState, useMemo, useCallback } from 'react';\\\\nimport styled from 'styled-components';\\\\nimport { TableHeader } from './TableHeader';\\\\nimport { TableRow } from './TableRow';\\\\nimport { Pagination } from './Pagination';\\\\nimport { useSort } from '../hooks/useSort';\\\\nimport { useFilter } from '../hooks/useFilter';\\\\nimport { usePagination } from '../hooks/usePagination';\\\\n\\\\n// TypeScript interfaces\\\\nexport interface Column<T> {\\\\n key: string;\\\\n header: string;\\\\n renderCell?: (item: T) => React.ReactNode;\\\\n sortable?: boolean;\\\\n filterable?: boolean;\\\\n}\\\\n\\\\nexport interface DataTableProps<T> {\\\\n data: T[];\\\\n columns: Column<T>[];\\\\n initialSortKey?: string;\\\\n initialSortDirection?: 'asc' | 'desc';\\\\n rowsPerPageOptions?: number[];\\\\n initialRowsPerPage?: number;\\\\n onRowClick?: (item: T) => void;\\\\n className?: string;\\\\n}\\\\n\\\\n// Styled components\\\\nconst TableContainer = styled.div`\\\\n width: 100%;\\\\n overflow-x: auto;\\\\n border-radius: 4px;\\\\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\\\\n`;\\\\n\\\\nconst Table = styled.table`\\\\n width: 100%;\\\\n border-collapse: collapse;\\\\n font-size: 14px;\\\\n`;\\\\n\\\\nconst TableBody = styled.tbody`\\\\n & tr:nth-child(odd) {\\\\n background-color: #f9f9f9;\\\\n }\\\\n \\\\n & tr:hover {\\\\n background-color: #f1f1f1;\\\\n }\\\\n`;\\\\n\\\\nconst FilterContainer = styled.div`\\\\n margin-bottom: 16px;\\\\n display: flex;\\\\n justify-content: flex-end;\\\\n`;\\\\n\\\\nconst FilterInput = styled.input`\\\\n padding: 8px 12px;\\\\n border: 1px solid #ddd;\\\\n border-radius: 4px;\\\\n font-size: 14px;\\\\n width: 250px;\\\\n`;\\\\n\\\\n// Main component\\\\nexport function DataTable<T extends Record<string, any>>({ \\\\n data, \\\\n columns, \\\\n initialSortKey,\\\\n initialSortDirection = 'asc',\\\\n rowsPerPageOptions = [10, 25, 50],\\\\n initialRowsPerPage = 10,\\\\n onRowClick,\\\\n className\\\\n}: DataTableProps<T>) {\\\\n // Global filter state\\\\n const [filterValue, setFilterValue] = useState('');\\\\n \\\\n // Hook usage\\\\n const { sortedData, sortKey, sortDirection, handleSort } = useSort(data, initialSortKey, initialSortDirection);\\\\n const { filteredData } = useFilter(sortedData, filterValue, columns);\\\\n const { \\\\n currentPageData, \\\\n currentPage, \\\\n totalPages, \\\\n rowsPerPage,\\\\n setCurrentPage, \\\\n setRowsPerPage \\\\n } = usePagination(filteredData, initialRowsPerPage);\\\\n\\\\n // Filter change handler\\\\n const handleFilterChange = useCallback(\\\\n (e: React.ChangeEvent<HTMLInputElement>) => {\\\\n setFilterValue(e.target.value);\\\\n setCurrentPage(1); // Reset to first page when filtering\\\\n }, \\\\n [setCurrentPage]\\\\n );\\\\n\\\\n // Memoize filterable columns\\\\n const filterableColumns = useMemo(\\\\n () => columns.filter(column => column.filterable),\\\\n [columns]\\\\n );\\\\n\\\\n return (\\\\n <div className={className}>\\\\n {filterableColumns.length > 0 && (\\\\n <FilterContainer>\\\\n <FilterInput \\\\\\n type=\\\\\\\"text\\\\\\\" \\\\\\n placeholder=\\\\\\\"Filter table...\\\\\\\"\\\\\\n value={filterValue}\\\\\\n onChange={handleFilterChange}\\\\\\n />\\\\\\n </FilterContainer>\\\\\\n )}\\\\\\n \\\\\\n <TableContainer>\\\\\\n <Table>\\\\\\n <TableHeader \\\\\\n columns={columns} \\\\\\n sortKey={sortKey} \\\\\\n sortDirection={sortDirection} \\\\\\n onSort={handleSort} \\\\\\n />\\\\\\n <TableBody>\\\\\\n {currentPageData.map((item, index) => (\\\\\\n <TableRow \\\\\\n key={index} \\\\\\n item={item} \\\\\\n columns={columns} \\\\\\n onClick={onRowClick ? () => onRowClick(item) : undefined} \\\\\\n />\\\\\\n ))}\\\\\\n {currentPageData.length === 0 && (\\\\\\n <tr>\\\\\\n <td colSpan={columns.length} style={{ textAlign: 'center', padding: '16px' }}>\\\\\\n No records found\\\\\\n </td>\\\\\\n </tr>\\\\\\n )}\\\\\\n </TableBody>\\\\\\n </Table>\\\\\\n </TableContainer>\\\\\\n \\\\\\n <Pagination \\\\\\n currentPage={currentPage}\\\\\\n totalPages={totalPages}\\\\\\n rowsPerPage={rowsPerPage}\\\\\\n rowsPerPageOptions={rowsPerPageOptions}\\\\\\n onPageChange={setCurrentPage}\\\\\\n onRowsPerPageChange={setRowsPerPage}\\\\\\n totalItems={filteredData.length}\\\\\\n />\\\\\\n </div>\\\\\\n );\\\\\\n}\\\\n```\\\\n\\\\n## Custom Hooks Implementation\\\\n\\\\n```typescript\\\\n// hooks/useSort.ts\\\\nimport { useState, useMemo } from 'react';\\\\n\\\\nexport function useSort<T extends Record<string, any>>(\\\\n data: T[], \\\\\\n initialSortKey?: string, \\\\\\n initialSortDirection: 'asc' | 'desc' = 'asc'\\\\n) {\\\\n const [sortKey, setSortKey] = useState<string | undefined>(initialSortKey);\\\\n const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>(initialSortDirection);\\\\n\\\\n const handleSort = (key: string) => {\\\\n if (sortKey === key) {\\\\n // Toggle direction if already sorting by this key\\\\n setSortDirection(prev => prev === 'asc' ? 'desc' : 'asc');\\\\n } else {\\\\n // New sort key, set to ascending by default\\\\n setSortKey(key);\\\\n setSortDirection('asc');\\\\n }\\\\n };\\\\n\\\\n const sortedData = useMemo(() => {\\\\n if (!sortKey) return [...data];\\\\n\\\\n return [...data].sort((a, b) => {\\\\n const aValue = a[sortKey];\\\\n const bValue = b[sortKey];\\\\n\\\\n // Handle different data types\\\\n if (typeof aValue === 'string' && typeof bValue === 'string') {\\\\n return sortDirection === 'asc' \\\\\\n ? aValue.localeCompare(bValue) \\\\\\n : bValue.localeCompare(aValue);\\\\n }\\\\n\\\\n if (aValue === bValue) return 0;\\\\n \\\\n if (sortDirection === 'asc') {\\\\n return aValue > bValue ? 1 : -1;\\\\n } else {\\\\n return aValue < bValue ? 1 : -1;\\\\n }\\\\n });\\\\n }, [data, sortKey, sortDirection]);\\\\n\\\\n return { sortedData, sortKey, sortDirection, handleSort };\\\\n}\\\\n```\\\\n\\\\n```typescript\\\\n// hooks/useFilter.ts\\\\nimport { useMemo } from 'react';\\\\nimport { Column } from '../components/DataTable';\\\\n\\\\nexport function useFilter<T extends Record<string, any>>(\\\\n data: T[], \\\\\\n filterValue: string,\\\\n columns: Column<T>[]\\\\n) {\\\\n const filteredData = useMemo(() => {\\\\n if (!filterValue) return data;\\\\n \\\\n const lowercasedFilter = filterValue.toLowerCase();\\\\n \\\\n return data.filter(item => {\\\\n return columns.some(column => {\\\\n const value = item[column.key];\\\\n if (value == null) return false;\\\\n \\\\n return String(value).toLowerCase().includes(lowercasedFilter);\\\\n });\\\\n });\\\\n }, [data, filterValue, columns]);\\\\n\\\\n return { filteredData };\\\\n}\\\\n```\\\\n\\\\n```typescript\\\\n// hooks/usePagination.ts\\\\nimport { useState, useMemo, useEffect } from 'react';\\\\n\\\\nexport function usePagination<T>(\\\\n data: T[], \\\\\\n initialRowsPerPage: number\\\\n) {\\\\n const [currentPage, setCurrentPage] = useState(1);\\\\n const [rowsPerPage, setRowsPerPage] = useState(initialRowsPerPage);\\\\n\\\\n // Reset to first page when data changes\\\\n useEffect(() => {\\\\n setCurrentPage(1);\\\\n }, [data.length]);\\\\n\\\\n const totalPages = useMemo(() => \\\\\\n Math.max(1, Math.ceil(data.length / rowsPerPage)),\\\\n [data.length, rowsPerPage]\\\\n );\\\\n\\\\n // Keep current page in bounds\\\\n useEffect(() => {\\\\n if (currentPage > totalPages) {\\\\n setCurrentPage(totalPages);\\\\n }\\\\n }, [currentPage, totalPages]);\\\\n\\\\n const currentPageData = useMemo(() => {\\\\n const startIndex = (currentPage - 1) * rowsPerPage;\\\\n return data.slice(startIndex, startIndex + rowsPerPage);\\\\n }, [data, currentPage, rowsPerPage]);\\\\n\\\\n return { \\\\\\n currentPageData, \\\\\\n currentPage, \\\\\\n totalPages, \\\\\\n rowsPerPage,\\\\n setCurrentPage, \\\\\\n setRowsPerPage \\\\n };\\\\n}\\\\n```\\\\n\\\\n## Supporting Components\\\\n\\\\n```typescript\\\\n// TableHeader.tsx\\\\nimport React from 'react';\\\\nimport styled from 'styled-components';\\\\nimport { Column } from './DataTable';\\\\n\\\\nconst HeaderRow = styled.tr`\\\\n background-color: #f5f5f5;\\\\n border-bottom: 2px solid #ddd;\\\\n`;\\\\n\\\\nconst HeaderCell = styled.th<{ sortable: boolean }>`\\\\n padding: 12px 16px;\\\\n text-align: left;\\\\n font-weight: 600;\\\\n cursor: ${props => props.sortable ? 'pointer' : 'default'};\\\\n user-select: none;\\\\n position: relative;\\\\n`;\\\\n\\\\nconst SortIcon = styled.span<{ direction: 'asc' | 'desc' }>`\\\\n margin-left: 8px;\\\\n &:after {\\\\n content: '${props => props.direction === 'asc' ? '\u2191' : '\u2193'}';\\\\n font-size: 14px;\\\\n }\\\\n`;\\\\n\\\\ninterface TableHeaderProps<T> {\\\\n columns: Column<T>[];\\\\n sortKey?: string;\\\\n sortDirection: 'asc' | 'desc';\\\\n onSort: (key: string) => void;\\\\n}\\\\n\\\\nexport function TableHeader<T>({ \\\\\\n columns, \\\\\\n sortKey, \\\\\\n sortDirection, \\\\\\n onSort \\\\n}: TableHeaderProps<T>) {\\\\n return (\\\\n <thead>\\\\n <HeaderRow>\\\\n {columns.map(column => (\\\\n <HeaderCell \\\\\\n key={column.key} \\\\\\n sortable={!!column.sortable}\\\\\\n onClick={column.sortable ? () => onSort(column.key) : undefined}\\\\\\n >\\\\\\n {column.header}\\\\\\n {sortKey === column.key && (\\\\n <SortIcon direction={sortDirection} />\\\\n )}\\\\\\n </HeaderCell>\\\\n ))}\\\\\\n </HeaderRow>\\\\n </thead>\\\\n );\\\\n}\\\\n```\\\\n\\\\n```typescript\\\\n// TableRow.tsx\\\\nimport React from 'react';\\\\nimport styled from 'styled-components';\\\\nimport { Column } from './DataTable';\\\\n\\\\nconst Row = styled.tr<{ clickable: boolean }>`\\\\n cursor: ${props => props.clickable ? 'pointer' : 'default'};\\\\n`;\\\\n\\\\nconst Cell = styled.td`\\\\n padding: 12px 16px;\\\\n border-bottom: 1px solid #eee;\\\\n vertical-align: top;\\\\n`;\\\\n\\\\ninterface TableRowProps<T> {\\\\n item: T;\\\\n columns: Column<T>[];\\\\n onClick?: () => void;\\\\n}\\\\n\\\\nexport function TableRow<T>({ item, columns, onClick }: TableRowProps<T>) {\\\\n return (\\\\n <Row clickable={!!onClick} onClick={onClick}>\\\\\\n {columns.map(column => (\\\\n <Cell key={column.key}>\\\\\\n {column.renderCell ? column.renderCell(item) : String(item[column.key] ?? '')}\\\\\\n </Cell>\\\\n ))}\\\\\\n </Row>\\\\n );\\\\n}\\\\n```\\\\n\\\\n```typescript\\\\n// Pagination.tsx\\\\nimport React from 'react';\\\\nimport styled from 'styled-components';\\\\n\\\\nconst PaginationContainer = styled.div`\\\\n display: flex;\\\\n justify-content: space-between;\\\\n align-items: center;\\\\n margin-top: 16px;\\\\n font-size: 14px;\\\\n`;\\\\n\\\\nconst PageInfo = styled.div`\\\\n color: #666;\\\\n`;\\\\n\\\\nconst PageControls = styled.div`\\\\n display: flex;\\\\n align-items: center;\\\\n`;\\\\n\\\\nconst PageButton = styled.button<{ active?: boolean }>`\\\\n padding: 6px 12px;\\\\n margin: 0 4px;\\\\n background-color: ${props => props.active ? '#007bff' : 'white'};\\\\n color: ${props => props.active ? 'white' : '#333'};\\\\n border: 1px solid #ddd;\\\\n border-radius: 4px;\\\\n cursor: pointer;\\\\n \\\\n &:disabled {\\\\n opacity: 0.5;\\\\n cursor: not-allowed;\\\\n }\\\\n \\\\n &:hover:not(:disabled) {\\\\n background-color: ${props => props.active ? '#007bff' : '#f1f1f1'};\\\\n }\\\\n`;\\\\n\\\\nconst RowsPerPageSelect = styled.select`\\\\n padding: 6px 8px;\\\\n border: 1px solid #ddd;\\\\n border-radius: 4px;\\\\n margin-left: 8px;\\\\n`;\\\\n\\\\ninterface PaginationProps {\\\\n currentPage: number;\\\\n totalPages: number;\\\\n rowsPerPage: number;\\\\n rowsPerPageOptions: number[];\\\\n totalItems: number;\\\\n onPageChange: (page: number) => void;\\\\n onRowsPerPageChange: (rowsPerPage: number) => void;\\\\n}\\\\n\\\\nexport function Pagination({ \\\\\\n currentPage, \\\\\\n totalPages, \\\\\\n rowsPerPage, \\\\\\n rowsPerPageOptions, \\\\\\n totalItems,\\\\n onPageChange, \\\\\\n onRowsPerPageChange \\\\n}: PaginationProps) {\\\\n // Calculate start and end item numbers\\\\n const startItem = Math.min(totalItems, (currentPage - 1) * rowsPerPage + 1);\\\\n const endItem = Math.min(totalItems, startItem + rowsPerPage - 1);\\\\n \\\\n // Generate page buttons\\\\n const getPageButtons = () => {\\\\n const buttons = [];\\\\n \\\\n // Always show first page\\\\n buttons.push(\\\\n <PageButton \\\\\\n key=\\\\\\\"first\\\\\\\" \\\\\\n active={currentPage === 1}\\\\\\n onClick={() => onPageChange(1)}\\\\\\n disabled={currentPage === 1}\\\\\\n >\\\\\\n 1\\\\\\n </PageButton>\\\\n );\\\\n \\\\n // Show ellipsis if needed\\\\n if (currentPage > 3) {\\\\n buttons.push(<span key=\\\\\\\"ellipsis1\\\\\\\">...</span>);\\\\n }\\\\n \\\\n // Show pages around current page\\\\n for (let i = Math.max(2, currentPage - 1); i <= Math.min(totalPages - 1, currentPage + 1); i++) {\\\\n buttons.push(\\\\n <PageButton \\\\\\n key={i} \\\\\\n active={currentPage === i}\\\\\\n onClick={() => onPageChange(i)}\\\\\\n >\\\\\\n {i}\\\\\\n </PageButton>\\\\n );\\\\n }\\\\n \\\\n // Show ellipsis if needed\\\\n if (currentPage < totalPages - 2) {\\\\n buttons.push(<span key=\\\\\\\"ellipsis2\\\\\\\">...</span>);\\\\n }\\\\n \\\\n // Always show last page if there's more than one page\\\\n if (totalPages > 1) {\\\\n buttons.push(\\\\n <PageButton \\\\\\n key=\\\\\\\"last\\\\\\\" \\\\\\n active={currentPage === totalPages}\\\\\\n onClick={() => onPageChange(totalPages)}\\\\\\n disabled={currentPage === totalPages}\\\\\\n >\\\\\\n {totalPages}\\\\\\n </PageButton>\\\\n );\\\\n }\\\\n \\\\n return buttons;\\\\n };\\\\n\\\\n return (\\\\n <PaginationContainer>\\\\n <PageInfo>\\\\n Showing {totalItems ? `${startItem}-${endItem} of ${totalItems}` : '0'} results\\\\n </PageInfo>\\\\n \\\\n <PageControls>\\\\n <PageButton \\\\\\n onClick={() => onPageChange(currentPage - 1)}\\\\\\n disabled={currentPage === 1}\\\\\\n >\\\\\\n Previous\\\\\\n </PageButton>\\\\n \\\\\\n {getPageButtons()}\\\\\\n \\\\\\n <PageButton \\\\\\n onClick={() => onPageChange(currentPage + 1)}\\\\\\n disabled={currentPage === totalPages}\\\\\\n >\\\\\\n Next\\\\\\n </PageButton>\\\\n \\\\\\n <span style={{ marginLeft: '16px' }}>Rows per page:</span>\\\\\\n <RowsPerPageSelect \\\\\\n value={rowsPerPage}\\\\\\n onChange={(e) => onRowsPerPageChange(Number(e.target.value))}\\\\\\n >\\\\\\n {rowsPerPageOptions.map(option => (\\\\n <option key={option} value={option}>{option}</option>\\\\n ))}\\\\\\n </RowsPerPageSelect>\\\\\\n </PageControls>\\\\\\n </PaginationContainer>\\\\n );\\\\n}\\\\n```\\\\n\\\\n## Usage Example\\\\n\\\\n```tsx\\\\nimport React from 'react';\\\\nimport { DataTable } from './components/DataTable';\\\\n\\\\ninterface User {\\\\n id: number;\\\\n name: string;\\\\n email: string;\\\\n role: string;\\\\n status: 'active' | 'inactive';\\\\n lastLogin: string;\\\\n}\\\\n\\\\nconst users: User[] = [\\\\n { id: 1, name: 'John Doe', email: '[email protected]', role: 'Admin', status: 'active', lastLogin: '2023-05-15T10:30:00Z' },\\\\n { id: 2, name: 'Jane Smith', email: '[email protected]', role: 'Editor', status: 'active', lastLogin: '2023-05-14T09:15:00Z' },\\\\n // Add more users as needed\\\\n];\\\\n\\\\nfunction UserTable() {\\\\n const columns = [\\\\n { key: 'id', header: 'ID', sortable: true },\\\\n { key: 'name', header: 'Name', sortable: true, filterable: true },\\\\n { key: 'email', header: 'Email', filterable: true },\\\\n { key: 'role', header: 'Role', sortable: true, filterable: true },\\\\n { \\\\\\n key: 'status', \\\\\\n header: 'Status', \\\\\\n sortable: true,\\\\\\n renderCell: (user: User) => (\\\\n <span style={{ \\\\\\n color: user.status === 'active' ? 'green' : 'red',\\\\\\n fontWeight: 'bold'\\\\\\n }}>\\\\\\n {user.status}\\\\\\n </span>\\\\n )\\\\n },\\\\n { \\\\\\n key: 'lastLogin', \\\\\\n header: 'Last Login', \\\\\\n sortable: true,\\\\\\n renderCell: (user: User) => new Date(user.lastLogin).toLocaleString()\\\\n },\\\\n ];\\\\n\\\\n const handleRowClick = (user: User) => {\\\\n console.log('User clicked:', user);\\\\n // Navigate to user detail page or show modal\\\\n };\\\\n\\\\n return (\\\\n <div>\\\\n <h1>User Management</h1>\\\\n <DataTable \\\\\\n data={users}\\\\\\n columns={columns}\\\\\\n initialSortKey=\\\\\\\"id\\\\\\\"\\\\\\n initialRowsPerPage={10}\\\\\\n rowsPerPageOptions={[5, 10, 25, 50]}\\\\\\n onRowClick={handleRowClick}\\\\\\n />\\\\n </div>\\\\n );\\\\n}\\\\n\\\\nexport default UserTable;\\\\n```\\\\n\\\\n## Installation Instructions\\\\n\\\\nTo use this component, you'll need to have these dependencies installed:\\\\n\\\\n```bash\\\\nnpm install styled-components @types/styled-components\\\\n```\\\\n\\\\nor\\\\n\\\\n```bash\\\\nyarn add styled-components @types/styled-components\\\\n```\\\\n\\\\n## MCP Resources Used\\\\n\\\\n- `@file:///project/src/components/` - Examined your existing component structure to match the styling approach and component patterns\\\\n- `@memory://coding-preferences` - Retrieved your preference for functional components, React 18, and TypeScript\\\\n\\\\nThe component follows modern React best practices with proper TypeScript typing, modular structure, and optimized performance through memoization. The styled-components implementation ensures consistent styling that can be customized to match your application's design system.\\\"\\n }\\n ],\\n \\\"metadata\\\": {\\n \\\"created_at\\\": \\\"2023-05-15T12:00:00Z\\\",\\n \\\"updated_at\\\": \\\"2023-05-15T12:00:00Z\\\",\\n \\\"author\\\": \\\"MCP-Prompts Team\\\",\\n \\\"category\\\": \\\"development\\\",\\n \\\"mcp_requirements\\\": [\\n \\\"MCP Filesystem Server\\\",\\n \\\"MCP GitHub Server\\\",\\n \\\"MCP Sequential Thinking Server\\\",\\n \\\"MCP Memory Server",
7 | "variables": {},
8 | "metadata": {
9 | "source": "/home/sparrow/projects/mcp-prompts/prompts/mcp-code-generator.json",
10 | "imported": true
11 | }
12 | }
```
--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/prompts/devops/Docker_Compose_Prompt_Combiner.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "Docker Compose Prompt Combiner",
3 | "description": "A specialized prompt combiner for creating Docker Compose configurations that integrates service definitions, volumes, networks, and deployment patterns",
4 | "type": "prompt",
5 | "category": "devops",
6 | "content": "/**\n * DockerComposePromptCombiner for {{project_name}}\n * \n * A specialized implementation of the PromptCombiner interface\n * focused on combining prompts for Docker Compose configuration and orchestration.\n */\n\nimport { PromptCombiner, CombinerContext, CombinedPromptResult, PromptSuggestion, CombinationValidationResult, WorkflowConfig, SavedWorkflow } from './prompt-combiner-interface';\nimport { PromptService } from '../services/prompt-service';\nimport { Prompt } from '../core/types';\n\n/**\n * Docker Compose specific context\n */\nexport interface DockerComposeContext extends CombinerContext {\n /** Project environment (development, staging, production) */\n environment: 'development' | 'staging' | 'production' | string;\n \n /** Services to include in the configuration */\n services: {\n name: string;\n type: string;\n image?: string;\n ports?: string[];\n volumes?: string[];\n environment?: Record<string, string>;\n dependencies?: string[];\n }[];\n \n /** Networks to define */\n networks?: {\n name: string;\n external?: boolean;\n driver?: string;\n }[];\n \n /** Volumes to define */\n volumes?: {\n name: string;\n driver?: string;\n external?: boolean;\n }[];\n \n /** Docker Compose version */\n composeVersion?: string;\n \n /** Orchestration platform */\n platform?: 'docker' | 'kubernetes' | 'swarm';\n \n /** Resource constraints */\n resources?: {\n memoryLimits?: boolean;\n cpuLimits?: boolean;\n };\n \n /** Additional Docker-specific context */\n {{additional_docker_context}}\n}\n\n/**\n * Specialized result for Docker Compose combinations\n */\nexport interface DockerComposeResult extends CombinedPromptResult {\n /** Generated Docker Compose configuration */\n composeConfiguration?: string;\n \n /** Individual service configurations */\n serviceConfigurations?: Record<string, string>;\n \n /** Network configurations */\n networkConfigurations?: string;\n \n /** Volume configurations */\n volumeConfigurations?: string;\n \n /** Deployment commands */\n deploymentCommands?: string;\n \n /** Generated Dockerfiles */\n dockerfiles?: Record<string, string>;\n \n /** Additional Docker-specific results */\n {{additional_docker_results}}\n}\n\n/**\n * Implementation of DockerComposePromptCombiner\n */\nexport class DockerComposePromptCombiner implements PromptCombiner {\n constructor(private promptService: PromptService) {}\n \n /**\n * Combines Docker Compose prompts\n * @param promptIds Array of prompt IDs to combine\n * @param context Optional Docker Compose context\n * @returns Combined Docker Compose result\n */\n async combinePrompts(promptIds: string[], context?: DockerComposeContext): Promise<DockerComposeResult> {\n // Implementation would include:\n // 1. Validating the prompts are compatible for Docker Compose configurations\n // 2. Organizing prompts into service, network, and volume sections\n // 3. Resolving dependencies between services\n // 4. Applying variables with Docker Compose knowledge\n // 5. Generating a comprehensive deployment configuration\n \n // This is a template structure - in a real implementation, this would contain\n // the actual logic for combining Docker Compose prompts\n \n // For now, we'll outline the structure of how the implementation would work\n \n // Step 1: Load and categorize all prompts\n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n const servicePrompts = prompts.filter(p => p.tags?.includes('service'));\n const networkPrompts = prompts.filter(p => p.tags?.includes('network'));\n const volumePrompts = prompts.filter(p => p.tags?.includes('volume'));\n const deploymentPrompts = prompts.filter(p => p.tags?.includes('deployment'));\n \n // Step 2: Apply variables to each prompt category\n const variables = context?.variables || {};\n \n // Combine service configurations\n const services = await this.combineServices(servicePrompts, context);\n \n // Combine network configurations\n const networks = await this.combineNetworks(networkPrompts, context);\n \n // Combine volume configurations\n const volumes = await this.combineVolumes(volumePrompts, context);\n \n // Combine deployment commands\n const deployment = await this.combineDeployment(deploymentPrompts, context);\n \n // Step 3: Create combined Docker Compose content\n const composeVersion = context?.composeVersion || '3.8';\n const serviceName = variables.service_name || 'app';\n \n const composeConfiguration = `version: '${composeVersion}'\n\nservices:\n${services.content}\n\nnetworks:\n${networks.content}\n\nvolumes:\n${volumes.content}\n`;\n \n // Step 4: Return the comprehensive result\n return {\n content: `# Docker Compose Configuration for ${variables.project_name || 'Your Project'}\n\n## Docker Compose File\n\n\\`\\`\\`yaml\n${composeConfiguration}\n\\`\\`\\`\n\n## Deployment Commands\n\n${deployment.content}\n`,\n components: [\n ...services.components,\n ...networks.components,\n ...volumes.components,\n ...deployment.components\n ],\n appliedVariables: variables,\n composeConfiguration,\n serviceConfigurations: this.extractServiceConfigurations(services.content),\n networkConfigurations: networks.content,\n volumeConfigurations: volumes.content,\n deploymentCommands: deployment.content,\n // Add suggestion for what to do next\n nextSteps: [\n { action: 'validate_compose', description: 'Validate the Docker Compose configuration using docker-compose config' },\n { action: 'deploy_compose', description: 'Deploy services using docker-compose up -d' },\n { action: 'monitor_services', description: 'Monitor service logs using docker-compose logs -f' },\n { action: 'scale_services', description: 'Scale services as needed using docker-compose up -d --scale' }\n ]\n };\n }\n \n /**\n * Helper method to combine service prompts\n * @param prompts Service prompts\n * @param context Docker Compose context\n * @returns Combined result for services\n */\n private async combineServices(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine service definitions\n // For our template, we'll create a simplified implementation\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no service prompts but we have services in context, create from context\n if (prompts.length === 0 && context?.services?.length) {\n content = this.generateServicesFromContext(context);\n components.push({\n id: 'generated-services',\n name: 'Generated Services',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate service definitions from context\n * @param context Docker Compose context\n * @returns Generated service YAML\n */\n private generateServicesFromContext(context: DockerComposeContext): string {\n let servicesYaml = '';\n \n for (const service of context.services) {\n servicesYaml += ` ${service.name}:\\n`;\n if (service.image) {\n servicesYaml += ` image: ${service.image}\\n`;\n } else {\n servicesYaml += ` build: ./${service.name}\\n`;\n }\n \n if (service.ports && service.ports.length) {\n servicesYaml += ' ports:\\n';\n for (const port of service.ports) {\n servicesYaml += ` - \"${port}\"\\n`;\n }\n }\n \n if (service.environment && Object.keys(service.environment).length) {\n servicesYaml += ' environment:\\n';\n for (const [key, value] of Object.entries(service.environment)) {\n servicesYaml += ` - ${key}=${value}\\n`;\n }\n }\n \n if (service.volumes && service.volumes.length) {\n servicesYaml += ' volumes:\\n';\n for (const volume of service.volumes) {\n servicesYaml += ` - ${volume}\\n`;\n }\n }\n \n if (service.dependencies && service.dependencies.length) {\n servicesYaml += ' depends_on:\\n';\n for (const dep of service.dependencies) {\n servicesYaml += ` - ${dep}\\n`;\n }\n }\n \n // Add resource constraints if specified\n if (context.resources?.cpuLimits || context.resources?.memoryLimits) {\n servicesYaml += ' deploy:\\n resources:\\n limits:\\n';\n if (context.resources.cpuLimits) {\n servicesYaml += ' cpus: \"1.0\"\\n';\n }\n if (context.resources.memoryLimits) {\n servicesYaml += ' memory: 512M\\n';\n }\n }\n \n servicesYaml += '\\n';\n }\n \n return servicesYaml;\n }\n \n /**\n * Helper method to combine network prompts\n * @param prompts Network prompts\n * @param context Docker Compose context\n * @returns Combined result for networks\n */\n private async combineNetworks(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine network definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no network prompts but we have networks in context, create from context\n if (prompts.length === 0 && context?.networks?.length) {\n content = this.generateNetworksFromContext(context);\n components.push({\n id: 'generated-networks',\n name: 'Generated Networks',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default network if nothing provided\n content = ` app-network:\\n driver: bridge\\n`;\n components.push({\n id: 'default-network',\n name: 'Default Network',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate network definitions from context\n * @param context Docker Compose context\n * @returns Generated network YAML\n */\n private generateNetworksFromContext(context: DockerComposeContext): string {\n let networksYaml = '';\n \n for (const network of context.networks || []) {\n networksYaml += ` ${network.name}:\\n`;\n if (network.driver) {\n networksYaml += ` driver: ${network.driver}\\n`;\n }\n if (network.external) {\n networksYaml += ` external: true\\n`;\n }\n networksYaml += '\\n';\n }\n \n return networksYaml;\n }\n \n /**\n * Helper method to combine volume prompts\n * @param prompts Volume prompts\n * @param context Docker Compose context\n * @returns Combined result for volumes\n */\n private async combineVolumes(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine volume definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no volume prompts but we have volumes in context, create from context\n if (prompts.length === 0 && context?.volumes?.length) {\n content = this.generateVolumesFromContext(context);\n components.push({\n id: 'generated-volumes',\n name: 'Generated Volumes',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default volume if nothing provided\n content = ` app-data:\\n`;\n components.push({\n id: 'default-volume',\n name: 'Default Volume',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate volume definitions from context\n * @param context Docker Compose context\n * @returns Generated volume YAML\n */\n private generateVolumesFromContext(context: DockerComposeContext): string {\n let volumesYaml = '';\n \n for (const volume of context.volumes || []) {\n volumesYaml += ` ${volume.name}:\\n`;\n if (volume.driver) {\n volumesYaml += ` driver: ${volume.driver}\\n`;\n }\n if (volume.external) {\n volumesYaml += ` external: true\\n`;\n }\n volumesYaml += '\\n';\n }\n \n return volumesYaml;\n }\n \n /**\n * Helper method to combine deployment prompts\n * @param prompts Deployment prompts\n * @param context Docker Compose context\n * @returns Combined result for deployment\n */\n private async combineDeployment(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine deployment commands\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no deployment prompts, generate default commands\n if (prompts.length === 0) {\n const projectName = variables.project_name || 'myproject';\n const env = context?.environment || 'development';\n \n content = `# Start all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d\n\n# View service logs\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml logs -f\n\n# Scale specific services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d --scale service_name=3\n\n# Stop all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down\n\n# Stop and remove volumes\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down -v`;\n \n components.push({\n id: 'default-deployment',\n name: 'Default Deployment Commands',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Extract individual service configurations from combined YAML\n * @param servicesYaml Combined services YAML\n * @returns Object with service name keys and configuration values\n */\n private extractServiceConfigurations(servicesYaml: string): Record<string, string> {\n const services: Record<string, string> = {};\n const serviceBlocks = servicesYaml.split(/^\\s{2}[^\\s]+:/gm);\n \n // Skip the first empty block if it exists\n const startIndex = serviceBlocks[0].trim() === '' ? 1 : 0;\n \n for (let i = startIndex; i < serviceBlocks.length; i++) {\n const block = serviceBlocks[i];\n const nameMatch = block.match(/^\\s*([^\\s:]+)\\s*$/m);\n \n if (nameMatch && nameMatch[1]) {\n const serviceName = nameMatch[1];\n services[serviceName] = block.trim();\n }\n }\n \n return services;\n }\n \n /**\n * Gets Docker Compose prompt suggestions\n * @param category Optional category to filter by\n * @param context Current Docker Compose context to inform suggestions\n * @returns Array of prompt suggestions for Docker Compose configurations\n */\n async getPromptSuggestions(category?: string, context?: DockerComposeContext): Promise<PromptSuggestion[]> {\n // Implementation would suggest prompts based on the current Docker context\n // For example, if using PostgreSQL, suggest corresponding service templates\n // This is a placeholder for demonstration\n \n const hasDatabase = context?.services?.some(s => \n s.type === 'database' || \n s.image?.includes('postgres') || \n s.image?.includes('mysql') || \n s.image?.includes('mongo'));\n \n const hasMCP = context?.services?.some(s => \n s.name.includes('mcp') || \n s.type === 'mcp');\n \n return [\n {\n id: 'docker-containerization-guide',\n name: 'Docker Containerization Guide',\n relevance: 100,\n compatibleWith: ['docker-compose-database-service', 'docker-compose-mcp-service'],\n reason: 'Provides the Docker containerization foundation'\n },\n {\n id: 'docker-compose-database-service',\n name: 'Docker Compose Database Service',\n relevance: hasDatabase ? 100 : 70,\n compatibleWith: ['docker-containerization-guide', 'docker-compose-mcp-service'],\n reason: hasDatabase ? 'Required for database services in your composition' : 'Optional database service configuration'\n },\n {\n id: 'docker-compose-mcp-service',\n name: 'Docker Compose MCP Service',\n relevance: hasMCP ? 100 : 50,\n compatibleWith: ['docker-containerization-guide', 'docker-compose.postgres'],\n reason: hasMCP ? 'Required for MCP services in your composition' : 'Optional MCP service configuration'\n },\n {\n id: 'docker-compose-networking',\n name: 'Docker Compose Networking',\n relevance: 80,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Advanced networking configuration for your services'\n },\n {\n id: 'docker-compose-deployment',\n name: 'Docker Compose Deployment',\n relevance: context?.environment === 'production' ? 100 : 70,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Deployment strategies for your Docker Compose applications'\n }\n ];\n }\n \n /**\n * Validates if the prompts can be combined for Docker Compose configurations\n * @param promptIds Array of prompt IDs to validate\n * @returns Validation result with any issues specific to Docker Compose\n */\n async validateCombination(promptIds: string[]): Promise<CombinationValidationResult> {\n // Implementation would validate that the prompts make sense for Docker Compose\n // For example, ensuring there are no conflicting service definitions\n // This is a placeholder for demonstration\n \n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n // Check for Docker container prompt\n const hasContainer = prompts.some(p => p.tags?.includes('docker') || p.tags?.includes('containerization'));\n if (!hasContainer) {\n return {\n isValid: false,\n issues: [{\n promptId: '',\n issue: 'Missing Docker containerization prompt',\n severity: 'error',\n suggestion: 'Add a Docker containerization prompt, such as docker-containerization-guide'\n }],\n suggestions: [{\n promptIds: [...promptIds, 'docker-containerization-guide'],\n reason: 'Docker containerization is required for Docker Compose configurations'\n }]\n };\n }\n \n // In a real implementation, would do more validation specific to Docker Compose\n \n return {\n isValid: true\n };\n }\n \n /**\n * Creates a saved Docker Compose workflow\n * @param name Name for the new workflow\n * @param promptIds Component prompt IDs\n * @param config Configuration for the combination\n * @returns The created Docker Compose workflow\n */\n async saveWorkflow(name: string, promptIds: string[], config: WorkflowConfig): Promise<SavedWorkflow> {\n // Implementation would save a Docker Compose workflow\n // This is a placeholder for demonstration\n \n return {\n id: `docker-compose-workflow-${Date.now()}`,\n name,\n promptIds,\n config,\n createdAt: new Date().toISOString(),\n updatedAt: new Date().toISOString(),\n version: 1,\n category: 'docker-compose',\n tags: ['docker', 'compose', 'deployment']\n };\n }\n \n /**\n * Loads a previously saved Docker Compose workflow\n * @param workflowId ID of the saved workflow\n * @returns The loaded Docker Compose workflow\n */\n async loadWorkflow(workflowId: string): Promise<SavedWorkflow> {\n // Implementation would load a Docker Compose workflow\n // This is a placeholder for demonstration\n \n throw new Error(`Workflow ${workflowId} not found or not implemented yet`);\n }\n}\n\n/**\n * Usage Examples\n * \n * ```typescript\n * // Creating a combiner\n * const promptService = new PromptService(storageAdapter);\n * const dockerCombiner = new DockerComposePromptCombiner(promptService);\n * \n * // Getting prompt suggestions for Docker Compose\n * const suggestions = await dockerCombiner.getPromptSuggestions('services', {\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Combining prompts for Docker Compose\n * const result = await dockerCombiner.combinePrompts([\n * 'docker-containerization-guide',\n * 'docker-compose-database-service'\n * ], {\n * variables: {\n * project_name: 'My Awesome Project',\n * service_name: 'api'\n * },\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Using the specialized result properties\n * console.log(result.composeConfiguration); // Get the complete Docker Compose configuration\n * console.log(result.serviceConfigurations['db']); // Get just the database service configuration\n * console.log(result.deploymentCommands); // Get the deployment commands\n * ```\n */\n\n// ============================\n// Extension Guidelines\n// ============================\n\n/**\n * When extending DockerComposePromptCombiner, consider:\n * \n * 1. Adding support for specific service types (e.g., web, backend, database, cache)\n * 2. Enhancing the context with more Docker-specific properties\n * 3. Adding support for more complex network and volume configurations\n * 4. Implementing advanced health check configurations\n * 5. Adding support for Docker Swarm mode configurations\n * 6. {{additional_extension_guidelines}}\n */",
7 | "variables": [
8 | "project_name",
9 | "additional_docker_context",
10 | "additional_docker_results",
11 | "additional_extension_guidelines"
12 | ],
13 | "metadata": {
14 | "source": "/home/sparrow/projects/mcp-prompts/prompts/docker-compose-prompt-combiner.json",
15 | "imported": true
16 | }
17 | }
```
--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/prompts/Docker_Compose_Prompt_Combiner.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "Docker Compose Prompt Combiner",
3 | "description": "A specialized prompt combiner for creating Docker Compose configurations that integrates service definitions, volumes, networks, and deployment patterns",
4 | "type": "prompt",
5 | "category": "devops",
6 | "content": "/**\n * DockerComposePromptCombiner for {{project_name}}\n * \n * A specialized implementation of the PromptCombiner interface\n * focused on combining prompts for Docker Compose configuration and orchestration.\n */\n\nimport { PromptCombiner, CombinerContext, CombinedPromptResult, PromptSuggestion, CombinationValidationResult, WorkflowConfig, SavedWorkflow } from './prompt-combiner-interface';\nimport { PromptService } from '../services/prompt-service';\nimport { Prompt } from '../core/types';\n\n/**\n * Docker Compose specific context\n */\nexport interface DockerComposeContext extends CombinerContext {\n /** Project environment (development, staging, production) */\n environment: 'development' | 'staging' | 'production' | string;\n \n /** Services to include in the configuration */\n services: {\n name: string;\n type: string;\n image?: string;\n ports?: string[];\n volumes?: string[];\n environment?: Record<string, string>;\n dependencies?: string[];\n }[];\n \n /** Networks to define */\n networks?: {\n name: string;\n external?: boolean;\n driver?: string;\n }[];\n \n /** Volumes to define */\n volumes?: {\n name: string;\n driver?: string;\n external?: boolean;\n }[];\n \n /** Docker Compose version */\n composeVersion?: string;\n \n /** Orchestration platform */\n platform?: 'docker' | 'kubernetes' | 'swarm';\n \n /** Resource constraints */\n resources?: {\n memoryLimits?: boolean;\n cpuLimits?: boolean;\n };\n \n /** Additional Docker-specific context */\n {{additional_docker_context}}\n}\n\n/**\n * Specialized result for Docker Compose combinations\n */\nexport interface DockerComposeResult extends CombinedPromptResult {\n /** Generated Docker Compose configuration */\n composeConfiguration?: string;\n \n /** Individual service configurations */\n serviceConfigurations?: Record<string, string>;\n \n /** Network configurations */\n networkConfigurations?: string;\n \n /** Volume configurations */\n volumeConfigurations?: string;\n \n /** Deployment commands */\n deploymentCommands?: string;\n \n /** Generated Dockerfiles */\n dockerfiles?: Record<string, string>;\n \n /** Additional Docker-specific results */\n {{additional_docker_results}}\n}\n\n/**\n * Implementation of DockerComposePromptCombiner\n */\nexport class DockerComposePromptCombiner implements PromptCombiner {\n constructor(private promptService: PromptService) {}\n \n /**\n * Combines Docker Compose prompts\n * @param promptIds Array of prompt IDs to combine\n * @param context Optional Docker Compose context\n * @returns Combined Docker Compose result\n */\n async combinePrompts(promptIds: string[], context?: DockerComposeContext): Promise<DockerComposeResult> {\n // Implementation would include:\n // 1. Validating the prompts are compatible for Docker Compose configurations\n // 2. Organizing prompts into service, network, and volume sections\n // 3. Resolving dependencies between services\n // 4. Applying variables with Docker Compose knowledge\n // 5. Generating a comprehensive deployment configuration\n \n // This is a template structure - in a real implementation, this would contain\n // the actual logic for combining Docker Compose prompts\n \n // For now, we'll outline the structure of how the implementation would work\n \n // Step 1: Load and categorize all prompts\n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n const servicePrompts = prompts.filter(p => p.tags?.includes('service'));\n const networkPrompts = prompts.filter(p => p.tags?.includes('network'));\n const volumePrompts = prompts.filter(p => p.tags?.includes('volume'));\n const deploymentPrompts = prompts.filter(p => p.tags?.includes('deployment'));\n \n // Step 2: Apply variables to each prompt category\n const variables = context?.variables || {};\n \n // Combine service configurations\n const services = await this.combineServices(servicePrompts, context);\n \n // Combine network configurations\n const networks = await this.combineNetworks(networkPrompts, context);\n \n // Combine volume configurations\n const volumes = await this.combineVolumes(volumePrompts, context);\n \n // Combine deployment commands\n const deployment = await this.combineDeployment(deploymentPrompts, context);\n \n // Step 3: Create combined Docker Compose content\n const composeVersion = context?.composeVersion || '3.8';\n const serviceName = variables.service_name || 'app';\n \n const composeConfiguration = `version: '${composeVersion}'\n\nservices:\n${services.content}\n\nnetworks:\n${networks.content}\n\nvolumes:\n${volumes.content}\n`;\n \n // Step 4: Return the comprehensive result\n return {\n content: `# Docker Compose Configuration for ${variables.project_name || 'Your Project'}\n\n## Docker Compose File\n\n\\`\\`\\`yaml\n${composeConfiguration}\n\\`\\`\\`\n\n## Deployment Commands\n\n${deployment.content}\n`,\n components: [\n ...services.components,\n ...networks.components,\n ...volumes.components,\n ...deployment.components\n ],\n appliedVariables: variables,\n composeConfiguration,\n serviceConfigurations: this.extractServiceConfigurations(services.content),\n networkConfigurations: networks.content,\n volumeConfigurations: volumes.content,\n deploymentCommands: deployment.content,\n // Add suggestion for what to do next\n nextSteps: [\n { action: 'validate_compose', description: 'Validate the Docker Compose configuration using docker-compose config' },\n { action: 'deploy_compose', description: 'Deploy services using docker-compose up -d' },\n { action: 'monitor_services', description: 'Monitor service logs using docker-compose logs -f' },\n { action: 'scale_services', description: 'Scale services as needed using docker-compose up -d --scale' }\n ]\n };\n }\n \n /**\n * Helper method to combine service prompts\n * @param prompts Service prompts\n * @param context Docker Compose context\n * @returns Combined result for services\n */\n private async combineServices(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine service definitions\n // For our template, we'll create a simplified implementation\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no service prompts but we have services in context, create from context\n if (prompts.length === 0 && context?.services?.length) {\n content = this.generateServicesFromContext(context);\n components.push({\n id: 'generated-services',\n name: 'Generated Services',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate service definitions from context\n * @param context Docker Compose context\n * @returns Generated service YAML\n */\n private generateServicesFromContext(context: DockerComposeContext): string {\n let servicesYaml = '';\n \n for (const service of context.services) {\n servicesYaml += ` ${service.name}:\\n`;\n if (service.image) {\n servicesYaml += ` image: ${service.image}\\n`;\n } else {\n servicesYaml += ` build: ./${service.name}\\n`;\n }\n \n if (service.ports && service.ports.length) {\n servicesYaml += ' ports:\\n';\n for (const port of service.ports) {\n servicesYaml += ` - \"${port}\"\\n`;\n }\n }\n \n if (service.environment && Object.keys(service.environment).length) {\n servicesYaml += ' environment:\\n';\n for (const [key, value] of Object.entries(service.environment)) {\n servicesYaml += ` - ${key}=${value}\\n`;\n }\n }\n \n if (service.volumes && service.volumes.length) {\n servicesYaml += ' volumes:\\n';\n for (const volume of service.volumes) {\n servicesYaml += ` - ${volume}\\n`;\n }\n }\n \n if (service.dependencies && service.dependencies.length) {\n servicesYaml += ' depends_on:\\n';\n for (const dep of service.dependencies) {\n servicesYaml += ` - ${dep}\\n`;\n }\n }\n \n // Add resource constraints if specified\n if (context.resources?.cpuLimits || context.resources?.memoryLimits) {\n servicesYaml += ' deploy:\\n resources:\\n limits:\\n';\n if (context.resources.cpuLimits) {\n servicesYaml += ' cpus: \"1.0\"\\n';\n }\n if (context.resources.memoryLimits) {\n servicesYaml += ' memory: 512M\\n';\n }\n }\n \n servicesYaml += '\\n';\n }\n \n return servicesYaml;\n }\n \n /**\n * Helper method to combine network prompts\n * @param prompts Network prompts\n * @param context Docker Compose context\n * @returns Combined result for networks\n */\n private async combineNetworks(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine network definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no network prompts but we have networks in context, create from context\n if (prompts.length === 0 && context?.networks?.length) {\n content = this.generateNetworksFromContext(context);\n components.push({\n id: 'generated-networks',\n name: 'Generated Networks',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default network if nothing provided\n content = ` app-network:\\n driver: bridge\\n`;\n components.push({\n id: 'default-network',\n name: 'Default Network',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate network definitions from context\n * @param context Docker Compose context\n * @returns Generated network YAML\n */\n private generateNetworksFromContext(context: DockerComposeContext): string {\n let networksYaml = '';\n \n for (const network of context.networks || []) {\n networksYaml += ` ${network.name}:\\n`;\n if (network.driver) {\n networksYaml += ` driver: ${network.driver}\\n`;\n }\n if (network.external) {\n networksYaml += ` external: true\\n`;\n }\n networksYaml += '\\n';\n }\n \n return networksYaml;\n }\n \n /**\n * Helper method to combine volume prompts\n * @param prompts Volume prompts\n * @param context Docker Compose context\n * @returns Combined result for volumes\n */\n private async combineVolumes(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine volume definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no volume prompts but we have volumes in context, create from context\n if (prompts.length === 0 && context?.volumes?.length) {\n content = this.generateVolumesFromContext(context);\n components.push({\n id: 'generated-volumes',\n name: 'Generated Volumes',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default volume if nothing provided\n content = ` app-data:\\n`;\n components.push({\n id: 'default-volume',\n name: 'Default Volume',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate volume definitions from context\n * @param context Docker Compose context\n * @returns Generated volume YAML\n */\n private generateVolumesFromContext(context: DockerComposeContext): string {\n let volumesYaml = '';\n \n for (const volume of context.volumes || []) {\n volumesYaml += ` ${volume.name}:\\n`;\n if (volume.driver) {\n volumesYaml += ` driver: ${volume.driver}\\n`;\n }\n if (volume.external) {\n volumesYaml += ` external: true\\n`;\n }\n volumesYaml += '\\n';\n }\n \n return volumesYaml;\n }\n \n /**\n * Helper method to combine deployment prompts\n * @param prompts Deployment prompts\n * @param context Docker Compose context\n * @returns Combined result for deployment\n */\n private async combineDeployment(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine deployment commands\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no deployment prompts, generate default commands\n if (prompts.length === 0) {\n const projectName = variables.project_name || 'myproject';\n const env = context?.environment || 'development';\n \n content = `# Start all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d\n\n# View service logs\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml logs -f\n\n# Scale specific services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d --scale service_name=3\n\n# Stop all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down\n\n# Stop and remove volumes\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down -v`;\n \n components.push({\n id: 'default-deployment',\n name: 'Default Deployment Commands',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Extract individual service configurations from combined YAML\n * @param servicesYaml Combined services YAML\n * @returns Object with service name keys and configuration values\n */\n private extractServiceConfigurations(servicesYaml: string): Record<string, string> {\n const services: Record<string, string> = {};\n const serviceBlocks = servicesYaml.split(/^\\s{2}[^\\s]+:/gm);\n \n // Skip the first empty block if it exists\n const startIndex = serviceBlocks[0].trim() === '' ? 1 : 0;\n \n for (let i = startIndex; i < serviceBlocks.length; i++) {\n const block = serviceBlocks[i];\n const nameMatch = block.match(/^\\s*([^\\s:]+)\\s*$/m);\n \n if (nameMatch && nameMatch[1]) {\n const serviceName = nameMatch[1];\n services[serviceName] = block.trim();\n }\n }\n \n return services;\n }\n \n /**\n * Gets Docker Compose prompt suggestions\n * @param category Optional category to filter by\n * @param context Current Docker Compose context to inform suggestions\n * @returns Array of prompt suggestions for Docker Compose configurations\n */\n async getPromptSuggestions(category?: string, context?: DockerComposeContext): Promise<PromptSuggestion[]> {\n // Implementation would suggest prompts based on the current Docker context\n // For example, if using PostgreSQL, suggest corresponding service templates\n // This is a placeholder for demonstration\n \n const hasDatabase = context?.services?.some(s => \n s.type === 'database' || \n s.image?.includes('postgres') || \n s.image?.includes('mysql') || \n s.image?.includes('mongo'));\n \n const hasMCP = context?.services?.some(s => \n s.name.includes('mcp') || \n s.type === 'mcp');\n \n return [\n {\n id: 'docker-containerization-guide',\n name: 'Docker Containerization Guide',\n relevance: 100,\n compatibleWith: ['docker-compose-database-service', 'docker-compose-mcp-service'],\n reason: 'Provides the Docker containerization foundation'\n },\n {\n id: 'docker-compose-database-service',\n name: 'Docker Compose Database Service',\n relevance: hasDatabase ? 100 : 70,\n compatibleWith: ['docker-containerization-guide', 'docker-compose-mcp-service'],\n reason: hasDatabase ? 'Required for database services in your composition' : 'Optional database service configuration'\n },\n {\n id: 'docker-compose-mcp-service',\n name: 'Docker Compose MCP Service',\n relevance: hasMCP ? 100 : 50,\n compatibleWith: ['docker-containerization-guide', 'docker-compose.postgres'],\n reason: hasMCP ? 'Required for MCP services in your composition' : 'Optional MCP service configuration'\n },\n {\n id: 'docker-compose-networking',\n name: 'Docker Compose Networking',\n relevance: 80,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Advanced networking configuration for your services'\n },\n {\n id: 'docker-compose-deployment',\n name: 'Docker Compose Deployment',\n relevance: context?.environment === 'production' ? 100 : 70,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Deployment strategies for your Docker Compose applications'\n }\n ];\n }\n \n /**\n * Validates if the prompts can be combined for Docker Compose configurations\n * @param promptIds Array of prompt IDs to validate\n * @returns Validation result with any issues specific to Docker Compose\n */\n async validateCombination(promptIds: string[]): Promise<CombinationValidationResult> {\n // Implementation would validate that the prompts make sense for Docker Compose\n // For example, ensuring there are no conflicting service definitions\n // This is a placeholder for demonstration\n \n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n // Check for Docker container prompt\n const hasContainer = prompts.some(p => p.tags?.includes('docker') || p.tags?.includes('containerization'));\n if (!hasContainer) {\n return {\n isValid: false,\n issues: [{\n promptId: '',\n issue: 'Missing Docker containerization prompt',\n severity: 'error',\n suggestion: 'Add a Docker containerization prompt, such as docker-containerization-guide'\n }],\n suggestions: [{\n promptIds: [...promptIds, 'docker-containerization-guide'],\n reason: 'Docker containerization is required for Docker Compose configurations'\n }]\n };\n }\n \n // In a real implementation, would do more validation specific to Docker Compose\n \n return {\n isValid: true\n };\n }\n \n /**\n * Creates a saved Docker Compose workflow\n * @param name Name for the new workflow\n * @param promptIds Component prompt IDs\n * @param config Configuration for the combination\n * @returns The created Docker Compose workflow\n */\n async saveWorkflow(name: string, promptIds: string[], config: WorkflowConfig): Promise<SavedWorkflow> {\n // Implementation would save a Docker Compose workflow\n // This is a placeholder for demonstration\n \n return {\n id: `docker-compose-workflow-${Date.now()}`,\n name,\n promptIds,\n config,\n createdAt: new Date().toISOString(),\n updatedAt: new Date().toISOString(),\n version: 1,\n category: 'docker-compose',\n tags: ['docker', 'compose', 'deployment']\n };\n }\n \n /**\n * Loads a previously saved Docker Compose workflow\n * @param workflowId ID of the saved workflow\n * @returns The loaded Docker Compose workflow\n */\n async loadWorkflow(workflowId: string): Promise<SavedWorkflow> {\n // Implementation would load a Docker Compose workflow\n // This is a placeholder for demonstration\n \n throw new Error(`Workflow ${workflowId} not found or not implemented yet`);\n }\n}\n\n/**\n * Usage Examples\n * \n * ```typescript\n * // Creating a combiner\n * const promptService = new PromptService(storageAdapter);\n * const dockerCombiner = new DockerComposePromptCombiner(promptService);\n * \n * // Getting prompt suggestions for Docker Compose\n * const suggestions = await dockerCombiner.getPromptSuggestions('services', {\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Combining prompts for Docker Compose\n * const result = await dockerCombiner.combinePrompts([\n * 'docker-containerization-guide',\n * 'docker-compose-database-service'\n * ], {\n * variables: {\n * project_name: 'My Awesome Project',\n * service_name: 'api'\n * },\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Using the specialized result properties\n * console.log(result.composeConfiguration); // Get the complete Docker Compose configuration\n * console.log(result.serviceConfigurations['db']); // Get just the database service configuration\n * console.log(result.deploymentCommands); // Get the deployment commands\n * ```\n */\n\n// ============================\n// Extension Guidelines\n// ============================\n\n/**\n * When extending DockerComposePromptCombiner, consider:\n * \n * 1. Adding support for specific service types (e.g., web, backend, database, cache)\n * 2. Enhancing the context with more Docker-specific properties\n * 3. Adding support for more complex network and volume configurations\n * 4. Implementing advanced health check configurations\n * 5. Adding support for Docker Swarm mode configurations\n * 6. {{additional_extension_guidelines}}\n */",
7 | "variables": [
8 | "project_name",
9 | "additional_docker_context",
10 | "additional_docker_results",
11 | "additional_extension_guidelines"
12 | ],
13 | "metadata": {
14 | "source": "/home/sparrow/projects/mcp-prompts/prompts/docker-compose-prompt-combiner.json",
15 | "imported": true
16 | }
17 | }
```
--------------------------------------------------------------------------------
/data/prompts/templates/docker-compose-prompt-combiner.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "id": "docker-compose-prompt-combiner",
3 | "name": "Docker Compose Prompt Combiner",
4 | "description": "A specialized prompt combiner for creating Docker Compose configurations that integrates service definitions, volumes, networks, and deployment patterns",
5 | "content": "/**\n * DockerComposePromptCombiner for {{project_name}}\n * \n * A specialized implementation of the PromptCombiner interface\n * focused on combining prompts for Docker Compose configuration and orchestration.\n */\n\nimport { PromptCombiner, CombinerContext, CombinedPromptResult, PromptSuggestion, CombinationValidationResult, WorkflowConfig, SavedWorkflow } from './prompt-combiner-interface';\nimport { PromptService } from '../services/prompt-service';\nimport { Prompt } from '../core/types';\n\n/**\n * Docker Compose specific context\n */\nexport interface DockerComposeContext extends CombinerContext {\n /** Project environment (development, staging, production) */\n environment: 'development' | 'staging' | 'production' | string;\n \n /** Services to include in the configuration */\n services: {\n name: string;\n type: string;\n image?: string;\n ports?: string[];\n volumes?: string[];\n environment?: Record<string, string>;\n dependencies?: string[];\n }[];\n \n /** Networks to define */\n networks?: {\n name: string;\n external?: boolean;\n driver?: string;\n }[];\n \n /** Volumes to define */\n volumes?: {\n name: string;\n driver?: string;\n external?: boolean;\n }[];\n \n /** Docker Compose version */\n composeVersion?: string;\n \n /** Orchestration platform */\n platform?: 'docker' | 'kubernetes' | 'swarm';\n \n /** Resource constraints */\n resources?: {\n memoryLimits?: boolean;\n cpuLimits?: boolean;\n };\n \n /** Additional Docker-specific context */\n {{additional_docker_context}}\n}\n\n/**\n * Specialized result for Docker Compose combinations\n */\nexport interface DockerComposeResult extends CombinedPromptResult {\n /** Generated Docker Compose configuration */\n composeConfiguration?: string;\n \n /** Individual service configurations */\n serviceConfigurations?: Record<string, string>;\n \n /** Network configurations */\n networkConfigurations?: string;\n \n /** Volume configurations */\n volumeConfigurations?: string;\n \n /** Deployment commands */\n deploymentCommands?: string;\n \n /** Generated Dockerfiles */\n dockerfiles?: Record<string, string>;\n \n /** Additional Docker-specific results */\n {{additional_docker_results}}\n}\n\n/**\n * Implementation of DockerComposePromptCombiner\n */\nexport class DockerComposePromptCombiner implements PromptCombiner {\n constructor(private promptService: PromptService) {}\n \n /**\n * Combines Docker Compose prompts\n * @param promptIds Array of prompt IDs to combine\n * @param context Optional Docker Compose context\n * @returns Combined Docker Compose result\n */\n async combinePrompts(promptIds: string[], context?: DockerComposeContext): Promise<DockerComposeResult> {\n // Implementation would include:\n // 1. Validating the prompts are compatible for Docker Compose configurations\n // 2. Organizing prompts into service, network, and volume sections\n // 3. Resolving dependencies between services\n // 4. Applying variables with Docker Compose knowledge\n // 5. Generating a comprehensive deployment configuration\n \n // This is a template structure - in a real implementation, this would contain\n // the actual logic for combining Docker Compose prompts\n \n // For now, we'll outline the structure of how the implementation would work\n \n // Step 1: Load and categorize all prompts\n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n const servicePrompts = prompts.filter(p => p.tags?.includes('service'));\n const networkPrompts = prompts.filter(p => p.tags?.includes('network'));\n const volumePrompts = prompts.filter(p => p.tags?.includes('volume'));\n const deploymentPrompts = prompts.filter(p => p.tags?.includes('deployment'));\n \n // Step 2: Apply variables to each prompt category\n const variables = context?.variables || {};\n \n // Combine service configurations\n const services = await this.combineServices(servicePrompts, context);\n \n // Combine network configurations\n const networks = await this.combineNetworks(networkPrompts, context);\n \n // Combine volume configurations\n const volumes = await this.combineVolumes(volumePrompts, context);\n \n // Combine deployment commands\n const deployment = await this.combineDeployment(deploymentPrompts, context);\n \n // Step 3: Create combined Docker Compose content\n const composeVersion = context?.composeVersion || '3.8';\n const serviceName = variables.service_name || 'app';\n \n const composeConfiguration = `version: '${composeVersion}'\n\nservices:\n${services.content}\n\nnetworks:\n${networks.content}\n\nvolumes:\n${volumes.content}\n`;\n \n // Step 4: Return the comprehensive result\n return {\n content: `# Docker Compose Configuration for ${variables.project_name || 'Your Project'}\n\n## Docker Compose File\n\n\\`\\`\\`yaml\n${composeConfiguration}\n\\`\\`\\`\n\n## Deployment Commands\n\n${deployment.content}\n`,\n components: [\n ...services.components,\n ...networks.components,\n ...volumes.components,\n ...deployment.components\n ],\n appliedVariables: variables,\n composeConfiguration,\n serviceConfigurations: this.extractServiceConfigurations(services.content),\n networkConfigurations: networks.content,\n volumeConfigurations: volumes.content,\n deploymentCommands: deployment.content,\n // Add suggestion for what to do next\n nextSteps: [\n { action: 'validate_compose', description: 'Validate the Docker Compose configuration using docker-compose config' },\n { action: 'deploy_compose', description: 'Deploy services using docker-compose up -d' },\n { action: 'monitor_services', description: 'Monitor service logs using docker-compose logs -f' },\n { action: 'scale_services', description: 'Scale services as needed using docker-compose up -d --scale' }\n ]\n };\n }\n \n /**\n * Helper method to combine service prompts\n * @param prompts Service prompts\n * @param context Docker Compose context\n * @returns Combined result for services\n */\n private async combineServices(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine service definitions\n // For our template, we'll create a simplified implementation\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no service prompts but we have services in context, create from context\n if (prompts.length === 0 && context?.services?.length) {\n content = this.generateServicesFromContext(context);\n components.push({\n id: 'generated-services',\n name: 'Generated Services',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate service definitions from context\n * @param context Docker Compose context\n * @returns Generated service YAML\n */\n private generateServicesFromContext(context: DockerComposeContext): string {\n let servicesYaml = '';\n \n for (const service of context.services) {\n servicesYaml += ` ${service.name}:\\n`;\n if (service.image) {\n servicesYaml += ` image: ${service.image}\\n`;\n } else {\n servicesYaml += ` build: ./${service.name}\\n`;\n }\n \n if (service.ports && service.ports.length) {\n servicesYaml += ' ports:\\n';\n for (const port of service.ports) {\n servicesYaml += ` - \"${port}\"\\n`;\n }\n }\n \n if (service.environment && Object.keys(service.environment).length) {\n servicesYaml += ' environment:\\n';\n for (const [key, value] of Object.entries(service.environment)) {\n servicesYaml += ` - ${key}=${value}\\n`;\n }\n }\n \n if (service.volumes && service.volumes.length) {\n servicesYaml += ' volumes:\\n';\n for (const volume of service.volumes) {\n servicesYaml += ` - ${volume}\\n`;\n }\n }\n \n if (service.dependencies && service.dependencies.length) {\n servicesYaml += ' depends_on:\\n';\n for (const dep of service.dependencies) {\n servicesYaml += ` - ${dep}\\n`;\n }\n }\n \n // Add resource constraints if specified\n if (context.resources?.cpuLimits || context.resources?.memoryLimits) {\n servicesYaml += ' deploy:\\n resources:\\n limits:\\n';\n if (context.resources.cpuLimits) {\n servicesYaml += ' cpus: \"1.0\"\\n';\n }\n if (context.resources.memoryLimits) {\n servicesYaml += ' memory: 512M\\n';\n }\n }\n \n servicesYaml += '\\n';\n }\n \n return servicesYaml;\n }\n \n /**\n * Helper method to combine network prompts\n * @param prompts Network prompts\n * @param context Docker Compose context\n * @returns Combined result for networks\n */\n private async combineNetworks(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine network definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no network prompts but we have networks in context, create from context\n if (prompts.length === 0 && context?.networks?.length) {\n content = this.generateNetworksFromContext(context);\n components.push({\n id: 'generated-networks',\n name: 'Generated Networks',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default network if nothing provided\n content = ` app-network:\\n driver: bridge\\n`;\n components.push({\n id: 'default-network',\n name: 'Default Network',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate network definitions from context\n * @param context Docker Compose context\n * @returns Generated network YAML\n */\n private generateNetworksFromContext(context: DockerComposeContext): string {\n let networksYaml = '';\n \n for (const network of context.networks || []) {\n networksYaml += ` ${network.name}:\\n`;\n if (network.driver) {\n networksYaml += ` driver: ${network.driver}\\n`;\n }\n if (network.external) {\n networksYaml += ` external: true\\n`;\n }\n networksYaml += '\\n';\n }\n \n return networksYaml;\n }\n \n /**\n * Helper method to combine volume prompts\n * @param prompts Volume prompts\n * @param context Docker Compose context\n * @returns Combined result for volumes\n */\n private async combineVolumes(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine volume definitions\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no volume prompts but we have volumes in context, create from context\n if (prompts.length === 0 && context?.volumes?.length) {\n content = this.generateVolumesFromContext(context);\n components.push({\n id: 'generated-volumes',\n name: 'Generated Volumes',\n contribution: content\n });\n } else if (prompts.length === 0) {\n // Default volume if nothing provided\n content = ` app-data:\\n`;\n components.push({\n id: 'default-volume',\n name: 'Default Volume',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Generate volume definitions from context\n * @param context Docker Compose context\n * @returns Generated volume YAML\n */\n private generateVolumesFromContext(context: DockerComposeContext): string {\n let volumesYaml = '';\n \n for (const volume of context.volumes || []) {\n volumesYaml += ` ${volume.name}:\\n`;\n if (volume.driver) {\n volumesYaml += ` driver: ${volume.driver}\\n`;\n }\n if (volume.external) {\n volumesYaml += ` external: true\\n`;\n }\n volumesYaml += '\\n';\n }\n \n return volumesYaml;\n }\n \n /**\n * Helper method to combine deployment prompts\n * @param prompts Deployment prompts\n * @param context Docker Compose context\n * @returns Combined result for deployment\n */\n private async combineDeployment(prompts: Prompt[], context?: DockerComposeContext): Promise<CombinedPromptResult> {\n // Implementation would combine deployment commands\n let content = '';\n const components: {id: string; name: string; contribution: string}[] = [];\n const variables = context?.variables || {};\n \n // If no deployment prompts, generate default commands\n if (prompts.length === 0) {\n const projectName = variables.project_name || 'myproject';\n const env = context?.environment || 'development';\n \n content = `# Start all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d\n\n# View service logs\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml logs -f\n\n# Scale specific services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml up -d --scale service_name=3\n\n# Stop all services\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down\n\n# Stop and remove volumes\ndocker-compose -p ${projectName} -f docker-compose.${env}.yml down -v`;\n \n components.push({\n id: 'default-deployment',\n name: 'Default Deployment Commands',\n contribution: content\n });\n } else {\n // Otherwise use the prompts\n for (const prompt of prompts) {\n const result = await this.promptService.applyTemplate(prompt.id, variables);\n content += result.content + '\\n\\n';\n components.push({\n id: prompt.id,\n name: prompt.name,\n contribution: result.content\n });\n }\n }\n \n return {\n content: content.trim(),\n components,\n appliedVariables: variables\n };\n }\n \n /**\n * Extract individual service configurations from combined YAML\n * @param servicesYaml Combined services YAML\n * @returns Object with service name keys and configuration values\n */\n private extractServiceConfigurations(servicesYaml: string): Record<string, string> {\n const services: Record<string, string> = {};\n const serviceBlocks = servicesYaml.split(/^\\s{2}[^\\s]+:/gm);\n \n // Skip the first empty block if it exists\n const startIndex = serviceBlocks[0].trim() === '' ? 1 : 0;\n \n for (let i = startIndex; i < serviceBlocks.length; i++) {\n const block = serviceBlocks[i];\n const nameMatch = block.match(/^\\s*([^\\s:]+)\\s*$/m);\n \n if (nameMatch && nameMatch[1]) {\n const serviceName = nameMatch[1];\n services[serviceName] = block.trim();\n }\n }\n \n return services;\n }\n \n /**\n * Gets Docker Compose prompt suggestions\n * @param category Optional category to filter by\n * @param context Current Docker Compose context to inform suggestions\n * @returns Array of prompt suggestions for Docker Compose configurations\n */\n async getPromptSuggestions(category?: string, context?: DockerComposeContext): Promise<PromptSuggestion[]> {\n // Implementation would suggest prompts based on the current Docker context\n // For example, if using PostgreSQL, suggest corresponding service templates\n // This is a placeholder for demonstration\n \n const hasDatabase = context?.services?.some(s => \n s.type === 'database' || \n s.image?.includes('postgres') || \n s.image?.includes('mysql') || \n s.image?.includes('mongo'));\n \n const hasMCP = context?.services?.some(s => \n s.name.includes('mcp') || \n s.type === 'mcp');\n \n return [\n {\n id: 'docker-containerization-guide',\n name: 'Docker Containerization Guide',\n relevance: 100,\n compatibleWith: ['docker-compose-database-service', 'docker-compose-mcp-service'],\n reason: 'Provides the Docker containerization foundation'\n },\n {\n id: 'docker-compose-database-service',\n name: 'Docker Compose Database Service',\n relevance: hasDatabase ? 100 : 70,\n compatibleWith: ['docker-containerization-guide', 'docker-compose-mcp-service'],\n reason: hasDatabase ? 'Required for database services in your composition' : 'Optional database service configuration'\n },\n {\n id: 'docker-compose-mcp-service',\n name: 'Docker Compose MCP Service',\n relevance: hasMCP ? 100 : 50,\n compatibleWith: ['docker-containerization-guide', 'docker-compose.postgres'],\n reason: hasMCP ? 'Required for MCP services in your composition' : 'Optional MCP service configuration'\n },\n {\n id: 'docker-compose-networking',\n name: 'Docker Compose Networking',\n relevance: 80,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Advanced networking configuration for your services'\n },\n {\n id: 'docker-compose-deployment',\n name: 'Docker Compose Deployment',\n relevance: context?.environment === 'production' ? 100 : 70,\n compatibleWith: ['docker-containerization-guide'],\n reason: 'Deployment strategies for your Docker Compose applications'\n }\n ];\n }\n \n /**\n * Validates if the prompts can be combined for Docker Compose configurations\n * @param promptIds Array of prompt IDs to validate\n * @returns Validation result with any issues specific to Docker Compose\n */\n async validateCombination(promptIds: string[]): Promise<CombinationValidationResult> {\n // Implementation would validate that the prompts make sense for Docker Compose\n // For example, ensuring there are no conflicting service definitions\n // This is a placeholder for demonstration\n \n const prompts = await Promise.all(promptIds.map(id => this.promptService.getPrompt(id)));\n \n // Check for Docker container prompt\n const hasContainer = prompts.some(p => p.tags?.includes('docker') || p.tags?.includes('containerization'));\n if (!hasContainer) {\n return {\n isValid: false,\n issues: [{\n promptId: '',\n issue: 'Missing Docker containerization prompt',\n severity: 'error',\n suggestion: 'Add a Docker containerization prompt, such as docker-containerization-guide'\n }],\n suggestions: [{\n promptIds: [...promptIds, 'docker-containerization-guide'],\n reason: 'Docker containerization is required for Docker Compose configurations'\n }]\n };\n }\n \n // In a real implementation, would do more validation specific to Docker Compose\n \n return {\n isValid: true\n };\n }\n \n /**\n * Creates a saved Docker Compose workflow\n * @param name Name for the new workflow\n * @param promptIds Component prompt IDs\n * @param config Configuration for the combination\n * @returns The created Docker Compose workflow\n */\n async saveWorkflow(name: string, promptIds: string[], config: WorkflowConfig): Promise<SavedWorkflow> {\n // Implementation would save a Docker Compose workflow\n // This is a placeholder for demonstration\n \n return {\n id: `docker-compose-workflow-${Date.now()}`,\n name,\n promptIds,\n config,\n createdAt: new Date().toISOString(),\n updatedAt: new Date().toISOString(),\n version: 1,\n category: 'docker-compose',\n tags: ['docker', 'compose', 'deployment']\n };\n }\n \n /**\n * Loads a previously saved Docker Compose workflow\n * @param workflowId ID of the saved workflow\n * @returns The loaded Docker Compose workflow\n */\n async loadWorkflow(workflowId: string): Promise<SavedWorkflow> {\n // Implementation would load a Docker Compose workflow\n // This is a placeholder for demonstration\n \n throw new Error(`Workflow ${workflowId} not found or not implemented yet`);\n }\n}\n\n/**\n * Usage Examples\n * \n * ```typescript\n * // Creating a combiner\n * const promptService = new PromptService(storageAdapter);\n * const dockerCombiner = new DockerComposePromptCombiner(promptService);\n * \n * // Getting prompt suggestions for Docker Compose\n * const suggestions = await dockerCombiner.getPromptSuggestions('services', {\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Combining prompts for Docker Compose\n * const result = await dockerCombiner.combinePrompts([\n * 'docker-containerization-guide',\n * 'docker-compose-database-service'\n * ], {\n * variables: {\n * project_name: 'My Awesome Project',\n * service_name: 'api'\n * },\n * environment: 'production',\n * services: [\n * {\n * name: 'web',\n * type: 'frontend',\n * image: 'nginx:alpine',\n * ports: ['80:80']\n * },\n * {\n * name: 'api',\n * type: 'backend',\n * image: 'node:14-alpine',\n * ports: ['3000:3000'],\n * dependencies: ['db']\n * },\n * {\n * name: 'db',\n * type: 'database',\n * image: 'postgres:13',\n * volumes: ['postgres-data:/var/lib/postgresql/data']\n * }\n * ],\n * composeVersion: '3.8'\n * });\n * \n * // Using the specialized result properties\n * console.log(result.composeConfiguration); // Get the complete Docker Compose configuration\n * console.log(result.serviceConfigurations['db']); // Get just the database service configuration\n * console.log(result.deploymentCommands); // Get the deployment commands\n * ```\n */\n\n// ============================\n// Extension Guidelines\n// ============================\n\n/**\n * When extending DockerComposePromptCombiner, consider:\n * \n * 1. Adding support for specific service types (e.g., web, backend, database, cache)\n * 2. Enhancing the context with more Docker-specific properties\n * 3. Adding support for more complex network and volume configurations\n * 4. Implementing advanced health check configurations\n * 5. Adding support for Docker Swarm mode configurations\n * 6. {{additional_extension_guidelines}}\n */",
6 | "isTemplate": true,
7 | "variables": [
8 | "project_name",
9 | "additional_docker_context",
10 | "additional_docker_results",
11 | "additional_extension_guidelines"
12 | ],
13 | "tags": [
14 | "devops",
15 | "docker",
16 | "docker-compose",
17 | "orchestration",
18 | "deployment"
19 | ],
20 | "category": "devops",
21 | "createdAt": "2024-08-08T17:30:00.000Z",
22 | "updatedAt": "2024-08-08T17:30:00.000Z",
23 | "version": 1
24 | }
```
--------------------------------------------------------------------------------
/scripts/setup_orchestrator.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/usr/bin/env bash
2 |
3 | # Complete project orchestration setup for Cursor on Linux.
4 | # - Installs core tooling (git, curl, jq, Node via nvm, Python, Podman)
5 | # - Configures .cursor MCP servers, rules, tools, hooks, deeplinks
6 | # - Scaffolds background agents and webhooks (FastAPI)
7 | # - Adds GitHub Actions workflows (CI, docs, code review)
8 | # - Generates multi-language templates (MCP servers: Py/TS/C++, client, web, AWS, Docker, devcontainer,
9 | # C++ ESP32, C++ with Conan, Android Kotlin containerized builder)
10 | # - Safe to run multiple times; idempotent where possible
11 |
12 | set -Eeuo pipefail
13 | IFS=$'\n\t'
14 |
15 | SCRIPT_NAME="$(basename "$0")"
16 | START_TS="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
17 |
18 | # ----------------------------- Utilities ------------------------------------
19 |
20 | log() { printf "[setup][%s] %s\n" "$(date +%H:%M:%S)" "$*"; }
21 | warn() { printf "\033[33m[warn]\033[0m %s\n" "$*"; }
22 | err() { printf "\033[31m[err ]\033[0m %s\n" "$*" 1>&2; }
23 | die() { err "$*"; exit 1; }
24 | have_cmd() { command -v "$1" >/dev/null 2>&1; }
25 | json_get() { jq -r "$1" "$CONFIG_PATH" 2>/dev/null; }
26 |
27 | SUDO=""
28 | if [ "${EUID:-$(id -u)}" -ne 0 ]; then
29 | if have_cmd sudo; then SUDO="sudo -n"; else warn "sudo not found; attempting without elevated privileges"; fi
30 | fi
31 |
32 | require_or_install_pkg() {
33 | # Best-effort package installation across distros. Usage: require_or_install_pkg pkgname [cmd_to_check]
34 | local pkg="$1"; shift || true
35 | local check_cmd="${1:-}";
36 | if [ -n "$check_cmd" ] && have_cmd "$check_cmd"; then return 0; fi
37 | if [ -n "$check_cmd" ] && [ -x "$check_cmd" ]; then return 0; fi
38 |
39 | if [ -r /etc/os-release ]; then . /etc/os-release; fi
40 |
41 | if have_cmd apt-get; then
42 | $SUDO DEBIAN_FRONTEND=noninteractive apt-get update -y || true
43 | $SUDO DEBIAN_FRONTEND=noninteractive apt-get install -y "$pkg" || warn "apt-get install $pkg failed"
44 | elif have_cmd dnf; then
45 | $SUDO dnf install -y "$pkg" || warn "dnf install $pkg failed"
46 | elif have_cmd yum; then
47 | $SUDO yum install -y "$pkg" || warn "yum install $pkg failed"
48 | elif have_cmd pacman; then
49 | $SUDO pacman -Sy --noconfirm "$pkg" || warn "pacman install $pkg failed"
50 | elif have_cmd zypper; then
51 | $SUDO zypper install -y "$pkg" || warn "zypper install $pkg failed"
52 | elif have_cmd apk; then
53 | $SUDO apk add --no-cache "$pkg" || warn "apk add $pkg failed"
54 | else
55 | warn "Unsupported package manager; could not install $pkg"
56 | fi
57 | }
58 |
59 | install_base_packages() {
60 | log "Installing base packages (git, curl, wget, jq, build tools, Python, Podman)"
61 |
62 | # Core CLI
63 | require_or_install_pkg git git
64 | require_or_install_pkg curl curl
65 | require_or_install_pkg wget wget
66 | require_or_install_pkg jq jq
67 | require_or_install_pkg ca-certificates
68 | require_or_install_pkg unzip unzip
69 | require_or_install_pkg tar tar
70 | require_or_install_pkg xz-utils || true
71 |
72 | # Build toolchain
73 | if have_cmd apt-get; then
74 | require_or_install_pkg build-essential
75 | require_or_install_pkg cmake cmake
76 | require_or_install_pkg ninja-build ninja
77 | require_or_install_pkg pkg-config pkg-config
78 | else
79 | require_or_install_pkg gcc gcc || true
80 | require_or_install_pkg g++ g++ || true
81 | require_or_install_pkg cmake cmake || true
82 | require_or_install_pkg ninja ninja || true
83 | require_or_install_pkg pkgconf pkgconf || require_or_install_pkg pkg-config pkg-config || true
84 | fi
85 |
86 | # Python
87 | require_or_install_pkg python3 python3
88 | if have_cmd apt-get; then
89 | require_or_install_pkg python3-venv || true
90 | require_or_install_pkg python3-pip || true
91 | require_or_install_pkg pipx || true
92 | else
93 | require_or_install_pkg python3-pip || true
94 | fi
95 |
96 | # Containers: honor container.prefer from JSON
97 | local prefer="$(json_get '.container.prefer')"
98 | if [ "$prefer" = "docker" ]; then
99 | # Prefer Docker engine when requested
100 | if have_cmd apt-get; then
101 | require_or_install_pkg docker.io docker || warn "Failed to install docker.io"
102 | elif have_cmd dnf; then
103 | require_or_install_pkg docker docker || require_or_install_pkg moby-engine docker || true
104 | fi
105 | if have_cmd docker; then
106 | log "Docker is available"
107 | else
108 | warn "Docker not available; container preference is docker but installation may have failed"
109 | fi
110 | else
111 | # Default/Podman path
112 | require_or_install_pkg podman podman || warn "Podman not installed; containerization features may be limited"
113 | # Provide docker compatibility shim if docker client missing and podman exists
114 | if have_cmd podman && ! have_cmd docker; then
115 | if [ ! -x /usr/local/bin/docker ]; then
116 | log "Creating docker -> podman shim at /usr/local/bin/docker"
117 | echo '#!/usr/bin/env bash' | $SUDO tee /usr/local/bin/docker >/dev/null
118 | echo 'exec podman "$@"' | $SUDO tee -a /usr/local/bin/docker >/dev/null
119 | $SUDO chmod +x /usr/local/bin/docker || true
120 | fi
121 | fi
122 | fi
123 |
124 | # Optional: docker-compose replacement for Podman
125 | if ! have_cmd podman-compose; then
126 | if have_cmd pipx; then pipx install podman-compose || true; fi
127 | fi
128 | }
129 |
130 | install_node_via_nvm() {
131 | if have_cmd node; then
132 | log "Node.js present: $(node -v)"
133 | else
134 | log "Installing Node.js (LTS) via nvm"
135 | export NVM_DIR="$HOME/.nvm"
136 | mkdir -p "$NVM_DIR"
137 | curl -fsSL https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
138 | # shellcheck source=/dev/null
139 | [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
140 | nvm install --lts
141 | nvm alias default 'lts/*'
142 | fi
143 |
144 | if have_cmd corepack; then
145 | corepack enable || true
146 | corepack prepare pnpm@latest --activate || true
147 | else
148 | npm install -g pnpm yarn || true
149 | fi
150 | }
151 |
152 | install_python_tools() {
153 | log "Ensuring pip, pipx, and venv availability"
154 | if ! have_cmd pipx; then
155 | if have_cmd python3; then python3 -m pip install --user -q pipx || true; fi
156 | if have_cmd pipx; then pipx ensurepath || true; fi
157 | fi
158 | }
159 |
160 | create_dirs() {
161 | WORKSPACE_ROOT="${WORKSPACE_ROOT:-/workspace}"
162 | if [ ! -d "$WORKSPACE_ROOT" ]; then WORKSPACE_ROOT="$(pwd)"; fi
163 | export WORKSPACE_ROOT
164 | log "Using WORKSPACE_ROOT=$WORKSPACE_ROOT"
165 |
166 | mkdir -p "$WORKSPACE_ROOT/.cursor/tools" \
167 | "$WORKSPACE_ROOT/.cursor/hooks" \
168 | "$WORKSPACE_ROOT/.cursor/webhooks" \
169 | "$WORKSPACE_ROOT/.cursor/agents" \
170 | "$WORKSPACE_ROOT/scripts" \
171 | "$WORKSPACE_ROOT/servers/python-mcp" \
172 | "$WORKSPACE_ROOT/servers/ts-mcp/src" \
173 | "$WORKSPACE_ROOT/servers/cpp-mcp/src" \
174 | "$WORKSPACE_ROOT/client/mcp-client/src" \
175 | "$WORKSPACE_ROOT/services/background-agent" \
176 | "$WORKSPACE_ROOT/.github/workflows" \
177 | "$WORKSPACE_ROOT/templates" \
178 | "$WORKSPACE_ROOT/infra/aws/terraform" \
179 | "$WORKSPACE_ROOT/devcontainer" \
180 | "$WORKSPACE_ROOT/web"
181 | }
182 |
183 | write_file() {
184 | # write_file <path> <mode> <<'EOF'
185 | local path="$1"; shift
186 | local mode="$1"; shift
187 | $SUDO mkdir -p "$(dirname "$path")"
188 | # shellcheck disable=SC2094
189 | cat >"$path"
190 | $SUDO chmod "$mode" "$path" || true
191 | }
192 |
193 | setup_cursor_configs() {
194 | if [ "$(json_get '.enable.cursorConfigs')" != "true" ]; then return 0; fi
195 | log "Writing .cursor configuration (MCP servers, tools, rules, hooks, deeplinks)"
196 |
197 | # Build MCP servers config from JSON flags
198 | local mcpEntries="{}"
199 | local py_port
200 | py_port="$(json_get '.ports.pyMcpPort')" || py_port="8765"
201 | if [ -z "$py_port" ] || [ "$py_port" = "null" ]; then py_port="8765"; fi
202 | local ts_port
203 | ts_port="$(json_get '.ports.tsMcpPort')" || ts_port="8766"
204 | if [ -z "$ts_port" ] || [ "$ts_port" = "null" ]; then ts_port="8766"; fi
205 | if [ "$(json_get '.enable.pythonMcp')" = "true" ]; then
206 | mcpEntries=$(jq --arg port "$py_port" '. + {"mcp-python": {"command":"bash","args":["-lc","python3 servers/python-mcp/main.py"],"env":{"PY_MCP_PORT": $port}}}' <<<"$mcpEntries")
207 | fi
208 | if [ "$(json_get '.enable.tsMcp')" = "true" ]; then
209 | mcpEntries=$(jq --arg port "$ts_port" '. + {"mcp-typescript": {"command":"bash","args":["-lc","node servers/ts-mcp/dist/index.js"],"env":{"TS_MCP_PORT": $port}}}' <<<"$mcpEntries")
210 | fi
211 | if [ "$(json_get '.enable.cppMcp')" = "true" ]; then
212 | mcpEntries=$(jq '. + {"mcp-cpp": {"command":"bash","args":["-lc","./servers/cpp-mcp/build/mcp_server"],"env":{}}}' <<<"$mcpEntries")
213 | fi
214 | jq -n --argjson servers "$mcpEntries" '{servers: $servers}' > "$WORKSPACE_ROOT/.cursor/mcp.json"
215 |
216 | if [ "$(json_get '.tools.largeCodebases.enabled')" = "true" ]; then
217 | jq -n \
218 | --argjson enabled true \
219 | --argjson exclude "$(json_get '.tools.largeCodebases.exclude')" \
220 | --argjson maxFileSizeMB "$(json_get '.tools.largeCodebases.maxFileSizeMB')" \
221 | '{enabled: $enabled, exclude: $exclude, maxFileSizeMB: $maxFileSizeMB}' \
222 | > "$WORKSPACE_ROOT/.cursor/tools/large-codebases.json"
223 | fi
224 |
225 | if [ "$(json_get '.tools.mermaid.enabled')" = "true" ]; then
226 | jq -n '{enabled: true}' > "$WORKSPACE_ROOT/.cursor/tools/mermaid-diagrams.json"
227 | fi
228 |
229 | write_file "$WORKSPACE_ROOT/.cursor/rules.json" 0644 <<'JSON'
230 | {
231 | "rules": [
232 | {"pattern": "**/*.py", "instructions": "Follow PEP 8, PEP 257. Use type hints."},
233 | {"pattern": "**/*.{ts,tsx}", "instructions": "Use strict TypeScript. Prefer explicit types for exports."},
234 | {"pattern": "**/*.cpp", "instructions": "Use modern C++17+, CMake targets, no raw new/delete."}
235 | ]
236 | }
237 | JSON
238 |
239 | write_file "$WORKSPACE_ROOT/.cursor/hooks/agent-hooks.json" 0644 <<'JSON'
240 | {
241 | "preTask": [
242 | {"type": "log", "level": "info", "message": "Starting task"}
243 | ],
244 | "postTask": [
245 | {"type": "log", "level": "info", "message": "Task complete"}
246 | ]
247 | }
248 | JSON
249 |
250 | local agent_host="$(json_get '.backgroundAgent.host')"
251 | local agent_port="$(json_get '.backgroundAgent.port')"
252 | jq -n \
253 | --arg url "http://${agent_host}:${agent_port}/webhooks/cursor" \
254 | '{webhooks: [{name: "background-agent", url: $url, events: ["task.created","task.updated","run.completed"]}]}' \
255 | > "$WORKSPACE_ROOT/.cursor/webhooks/webhooks.json"
256 |
257 | jq -n \
258 | --arg baseUrl "http://${agent_host}:${agent_port}" \
259 | '{agents: [{name: "default", baseUrl: $baseUrl, enabled: true}]}' \
260 | > "$WORKSPACE_ROOT/.cursor/agents/background-agent.json"
261 | }
262 |
263 | scaffold_python_mcp_server() {
264 | if [ "$(json_get '.enable.pythonMcp')" != "true" ]; then return 0; fi
265 | log "Scaffolding Python MCP server template"
266 | write_file "$WORKSPACE_ROOT/servers/python-mcp/pyproject.toml" 0644 <<'TOML'
267 | [build-system]
268 | requires = ["setuptools>=68", "wheel"]
269 | build-backend = "setuptools.build_meta"
270 |
271 | [project]
272 | name = "python-mcp-server"
273 | version = "0.1.0"
274 | description = "Example Python MCP server"
275 | requires-python = ">=3.9"
276 | dependencies = [
277 | "fastapi>=0.115.0",
278 | "uvicorn[standard]>=0.30.0"
279 | ]
280 | TOML
281 |
282 | write_file "$WORKSPACE_ROOT/servers/python-mcp/main.py" 0755 <<'PY'
283 | #!/usr/bin/env python3
284 | """
285 | Minimal Python MCP server placeholder.
286 |
287 | This is a scaffold to be adapted to a real MCP implementation. It starts a FastAPI
288 | HTTP app to demonstrate a background service that could receive MCP-like requests.
289 |
290 | Replace with an actual MCP server according to the latest Cursor MCP docs.
291 | """
292 | from __future__ import annotations
293 |
294 | import os
295 | from fastapi import FastAPI
296 | from fastapi.responses import JSONResponse
297 |
298 | app = FastAPI(title="Python MCP Server (placeholder)")
299 |
300 |
301 | @app.get("/health")
302 | def health() -> dict:
303 | return {"status": "ok"}
304 |
305 |
306 | @app.get("/")
307 | def root() -> JSONResponse:
308 | return JSONResponse({"message": "Replace with real MCP protocol server."})
309 |
310 |
311 | if __name__ == "__main__":
312 | import uvicorn
313 |
314 | port = int(os.environ.get("PY_MCP_PORT", "8765"))
315 | uvicorn.run(app, host="127.0.0.1", port=port)
316 | PY
317 | }
318 |
319 | scaffold_ts_mcp_server() {
320 | if [ "$(json_get '.enable.tsMcp')" != "true" ]; then return 0; fi
321 | log "Scaffolding TypeScript MCP server template"
322 | write_file "$WORKSPACE_ROOT/servers/ts-mcp/package.json" 0644 <<'JSON'
323 | {
324 | "name": "ts-mcp-server",
325 | "version": "0.1.0",
326 | "private": true,
327 | "type": "module",
328 | "scripts": {
329 | "build": "tsc -p .",
330 | "start": "node dist/index.js"
331 | },
332 | "devDependencies": {
333 | "typescript": "^5.6.3"
334 | },
335 | "dependencies": {}
336 | }
337 | JSON
338 |
339 | write_file "$WORKSPACE_ROOT/servers/ts-mcp/tsconfig.json" 0644 <<'JSON'
340 | {
341 | "compilerOptions": {
342 | "target": "ES2022",
343 | "module": "ES2022",
344 | "moduleResolution": "bundler",
345 | "outDir": "dist",
346 | "rootDir": "src",
347 | "strict": true,
348 | "esModuleInterop": true
349 | }
350 | }
351 | JSON
352 |
353 | write_file "$WORKSPACE_ROOT/servers/ts-mcp/src/index.ts" 0644 <<'TS'
354 | /*
355 | Minimal TypeScript MCP server placeholder.
356 | Replace with a real MCP server per Cursor docs.
357 | */
358 | import http from "node:http";
359 |
360 | const server = http.createServer((_req, res) => {
361 | res.statusCode = 200;
362 | res.setHeader("Content-Type", "application/json");
363 | res.end(JSON.stringify({ message: "Replace with real MCP server." }));
364 | });
365 |
366 | const port = Number(process.env.TS_MCP_PORT ?? 8766);
367 | server.listen(port, "127.0.0.1", () => {
368 | // eslint-disable-next-line no-console
369 | console.log(`TS MCP placeholder listening on http://127.0.0.1:${port}`);
370 | });
371 | TS
372 | }
373 |
374 | scaffold_cpp_mcp_server() {
375 | if [ "$(json_get '.enable.cppMcp')" != "true" ]; then return 0; fi
376 | log "Scaffolding C++ MCP server template"
377 | write_file "$WORKSPACE_ROOT/servers/cpp-mcp/CMakeLists.txt" 0644 <<'CMAKE'
378 | cmake_minimum_required(VERSION 3.16)
379 | project(cpp_mcp_server LANGUAGES CXX)
380 |
381 | set(CMAKE_CXX_STANDARD 17)
382 | set(CMAKE_CXX_STANDARD_REQUIRED ON)
383 |
384 | add_executable(mcp_server src/main.cpp)
385 | CMAKE
386 |
387 | write_file "$WORKSPACE_ROOT/servers/cpp-mcp/src/main.cpp" 0644 <<'CPP'
388 | #include <iostream>
389 |
390 | int main() {
391 | std::cout << "Replace with real MCP server (C++)." << std::endl;
392 | return 0;
393 | }
394 | CPP
395 |
396 | write_file "$WORKSPACE_ROOT/servers/cpp-mcp/build.sh" 0755 <<'SH'
397 | #!/usr/bin/env bash
398 | set -euo pipefail
399 | cd "$(dirname "$0")"
400 | rm -rf build && mkdir -p build && cd build
401 | cmake .. -G Ninja || cmake ..
402 | cmake --build . --config Release
403 | SH
404 | }
405 |
406 | scaffold_mcp_client_ts() {
407 | if [ "$(json_get '.enable.mcpClient')" != "true" ]; then return 0; fi
408 | log "Scaffolding MCP client (TypeScript) template"
409 | write_file "$WORKSPACE_ROOT/client/mcp-client/package.json" 0644 <<'JSON'
410 | {
411 | "name": "mcp-client",
412 | "version": "0.1.0",
413 | "private": true,
414 | "type": "module",
415 | "scripts": {
416 | "build": "tsc -p .",
417 | "start": "node dist/index.js"
418 | },
419 | "devDependencies": {
420 | "typescript": "^5.6.3"
421 | },
422 | "dependencies": {}
423 | }
424 | JSON
425 |
426 | write_file "$WORKSPACE_ROOT/client/mcp-client/tsconfig.json" 0644 <<'JSON'
427 | {
428 | "compilerOptions": {
429 | "target": "ES2022",
430 | "module": "ES2022",
431 | "moduleResolution": "bundler",
432 | "outDir": "dist",
433 | "rootDir": "src",
434 | "strict": true,
435 | "esModuleInterop": true
436 | }
437 | }
438 | JSON
439 |
440 | write_file "$WORKSPACE_ROOT/client/mcp-client/src/index.ts" 0644 <<'TS'
441 | /* Placeholder MCP client. Replace with actual MCP client logic. */
442 | // eslint-disable-next-line no-console
443 | console.log("MCP client placeholder");
444 | TS
445 | }
446 |
447 | scaffold_background_agent() {
448 | if [ "$(json_get '.enable.backgroundAgent')" != "true" ]; then return 0; fi
449 | log "Scaffolding background agent + webhooks (FastAPI)"
450 | write_file "$WORKSPACE_ROOT/services/background-agent/requirements.txt" 0644 <<'REQ'
451 | fastapi>=0.115.0
452 | uvicorn[standard]>=0.30.0
453 | REQ
454 |
455 | write_file "$WORKSPACE_ROOT/services/background-agent/main.py" 0755 <<'PY'
456 | #!/usr/bin/env python3
457 | """
458 | Background agent + webhook receiver (FastAPI).
459 |
460 | Endpoints:
461 | - GET /health
462 | - POST /webhooks/cursor (generic webhook entry)
463 | - GET /api/events (example endpoint)
464 |
465 | Run locally:
466 | uvicorn main:app --host 127.0.0.1 --port 8088 --reload
467 | """
468 | from __future__ import annotations
469 |
470 | from typing import Any, Dict
471 | from fastapi import FastAPI, Request
472 | from fastapi.responses import JSONResponse
473 |
474 | app = FastAPI(title="Background Agent")
475 |
476 |
477 | @app.get("/health")
478 | def health() -> dict:
479 | return {"status": "ok"}
480 |
481 |
482 | @app.post("/webhooks/cursor")
483 | async def cursor_webhook(request: Request) -> JSONResponse:
484 | payload: Dict[str, Any] = await request.json()
485 | # TODO: handle events appropriately
486 | return JSONResponse({"received": True, "keys": list(payload.keys())})
487 |
488 |
489 | @app.get("/api/events")
490 | def list_events() -> dict:
491 | return {"events": []}
492 | PY
493 |
494 | write_file "$WORKSPACE_ROOT/scripts/run-background-agent.sh" 0755 <<'SH'
495 | #!/usr/bin/env bash
496 | set -euo pipefail
497 | cd "$(dirname "$0")/.." || exit 1
498 | CONFIG_PATH="${CONFIG_PATH:-$(pwd)/config/project_orchestration.json}"
499 | HOST="127.0.0.1"
500 | PORT="8088"
501 | if command -v jq >/dev/null 2>&1 && [ -f "$CONFIG_PATH" ]; then
502 | HOST="$(jq -r '.backgroundAgent.host' "$CONFIG_PATH" 2>/dev/null || echo "$HOST")"
503 | PORT="$(jq -r '.backgroundAgent.port' "$CONFIG_PATH" 2>/dev/null || echo "$PORT")"
504 | fi
505 | python3 -m venv .venv 2>/dev/null || true
506 | . .venv/bin/activate
507 | python -m pip install -U pip
508 | pip install -r services/background-agent/requirements.txt
509 | exec uvicorn services.background-agent.main:app --host "$HOST" --port "$PORT" --reload
510 | SH
511 | }
512 |
513 | scaffold_github_actions() {
514 | if [ "$(json_get '.enable.githubActions')" != "true" ]; then return 0; fi
515 | log "Adding GitHub Actions workflows (CI, docs, code review)"
516 |
517 | write_file "$WORKSPACE_ROOT/.github/workflows/ci.yml" 0644 <<'YML'
518 | name: CI
519 |
520 | on:
521 | push:
522 | branches: [ main ]
523 | pull_request:
524 | branches: [ main ]
525 |
526 | jobs:
527 | build-test:
528 | runs-on: ubuntu-latest
529 | steps:
530 | - uses: actions/checkout@v4
531 | - name: Setup Node
532 | uses: actions/setup-node@v4
533 | with:
534 | node-version: 'lts/*'
535 | - name: Setup Python
536 | uses: actions/setup-python@v5
537 | with:
538 | python-version: '3.11'
539 | - name: Install Node deps (if any)
540 | run: |
541 | if [ -f servers/ts-mcp/package.json ]; then npm ci --prefix servers/ts-mcp || true; fi
542 | if [ -f client/mcp-client/package.json ]; then npm ci --prefix client/mcp-client || true; fi
543 | - name: Install Python deps (if any)
544 | run: |
545 | python -m pip install -U pip
546 | if [ -f servers/python-mcp/pyproject.toml ]; then pip install -e servers/python-mcp || true; fi
547 | if [ -f services/background-agent/requirements.txt ]; then pip install -r services/background-agent/requirements.txt || true; fi
548 | - name: Build TS artifacts
549 | run: |
550 | if [ -f servers/ts-mcp/package.json ]; then npm --prefix servers/ts-mcp run build || true; fi
551 | if [ -f client/mcp-client/package.json ]; then npm --prefix client/mcp-client run build || true; fi
552 | - name: C++ build
553 | run: |
554 | if [ -f servers/cpp-mcp/CMakeLists.txt ]; then bash servers/cpp-mcp/build.sh || true; fi
555 | YML
556 |
557 | write_file "$WORKSPACE_ROOT/.github/workflows/docs.yml" 0644 <<'YML'
558 | name: Update Docs
559 |
560 | on:
561 | workflow_dispatch:
562 | push:
563 | paths: [ 'docs/**' ]
564 |
565 | jobs:
566 | build-docs:
567 | runs-on: ubuntu-latest
568 | steps:
569 | - uses: actions/checkout@v4
570 | - name: Upload docs artifact
571 | uses: actions/upload-artifact@v4
572 | with:
573 | name: site-docs
574 | path: docs/
575 | YML
576 |
577 | write_file "$WORKSPACE_ROOT/.github/workflows/code-review.yml" 0644 <<'YML'
578 | name: Code Review
579 |
580 | on:
581 | pull_request:
582 | branches: [ main ]
583 |
584 | jobs:
585 | pr_checks:
586 | runs-on: ubuntu-latest
587 | steps:
588 | - uses: actions/checkout@v4
589 | - name: Lint Python
590 | run: |
591 | python -m pip install ruff || true
592 | ruff check . || true
593 | - name: Type-check (mypy)
594 | run: |
595 | python -m pip install mypy || true
596 | mypy . || true
597 | YML
598 | }
599 |
600 | scaffold_devcontainer_and_containerfiles() {
601 | if [ "$(json_get '.enable.devcontainer')" != "true" ]; then return 0; fi
602 | log "Scaffolding devcontainer, Containerfile, Docker deployment"
603 |
604 | # Containerfile (Podman)
605 | write_file "$WORKSPACE_ROOT/Containerfile" 0644 <<'DOCKER'
606 | FROM alpine:3.20
607 | RUN apk add --no-cache ca-certificates bash && update-ca-certificates
608 | WORKDIR /app
609 | COPY . /app
610 | CMD ["/bin/sh"]
611 | DOCKER
612 |
613 | # Devcontainer config
614 | write_file "$WORKSPACE_ROOT/devcontainer/devcontainer.json" 0644 <<'JSON'
615 | {
616 | "name": "Cursor Orchestrator Dev",
617 | "image": "mcr.microsoft.com/devcontainers/base:debian",
618 | "features": {
619 | "ghcr.io/devcontainers/features/node:1": {
620 | "version": "lts"
621 | },
622 | "ghcr.io/devcontainers/features/python:1": {
623 | "version": "3.11"
624 | }
625 | },
626 | "postCreateCommand": "bash scripts/post-create.sh"
627 | }
628 | JSON
629 |
630 | write_file "$WORKSPACE_ROOT/scripts/post-create.sh" 0755 <<'SH'
631 | #!/usr/bin/env bash
632 | set -euo pipefail
633 | echo "Devcontainer post-create hook"
634 | SH
635 |
636 | # Docker deployment example
637 | local agent_port
638 | agent_port="$(json_get '.backgroundAgent.port')"
639 | if [ -z "$agent_port" ] || [ "$agent_port" = "null" ]; then agent_port="8088"; fi
640 | write_file "$WORKSPACE_ROOT/Dockerfile" 0644 <<DOCKER
641 | FROM python:3.11-slim
642 | WORKDIR /app
643 | COPY services/background-agent/requirements.txt /app/requirements.txt
644 | RUN pip install -U pip && pip install -r /app/requirements.txt
645 | COPY services/background-agent /app/services/background-agent
646 | EXPOSE ${agent_port}
647 | CMD ["uvicorn", "services.background-agent.main:app", "--host", "0.0.0.0", "--port", "${agent_port}"]
648 | DOCKER
649 |
650 | write_file "$WORKSPACE_ROOT/compose.yaml" 0644 <<YAML
651 | services:
652 | background-agent:
653 | build: .
654 | ports:
655 | - "${agent_port}:${agent_port}"
656 | restart: unless-stopped
657 | YAML
658 | }
659 |
660 | scaffold_aws_terraform() {
661 | if [ "$(json_get '.enable.awsTerraform')" != "true" ]; then return 0; fi
662 | log "Scaffolding AWS Terraform template"
663 | write_file "$WORKSPACE_ROOT/infra/aws/terraform/main.tf" 0644 <<'TF'
664 | terraform {
665 | required_version = ">= 1.3.0"
666 | required_providers {
667 | aws = {
668 | source = "hashicorp/aws"
669 | version = ">= 5.0"
670 | }
671 | }
672 | }
673 |
674 | provider "aws" {
675 | region = var.aws_region
676 | }
677 |
678 | variable "aws_region" {
679 | type = string
680 | default = "us-east-1"
681 | }
682 | TF
683 |
684 | write_file "$WORKSPACE_ROOT/infra/aws/terraform/.gitignore" 0644 <<'IGN'
685 | .terraform/
686 | terraform.tfstate*
687 | IGN
688 | }
689 |
690 | scaffold_web_and_mcp_json() {
691 | if [ "$(json_get '.enable.webAndMcp')" != "true" ]; then return 0; fi
692 | log "Scaffolding web project and browser tools mcp.json"
693 | write_file "$WORKSPACE_ROOT/web/README.md" 0644 <<'MD'
694 | # Web Dev + Testing
695 |
696 | Use this directory for web development. Add e2e tests and tools.
697 | MD
698 |
699 | write_file "$WORKSPACE_ROOT/web/mcp.json" 0644 <<'JSON'
700 | {
701 | "tools": [
702 | "large-codebases",
703 | "mermaid-diagrams"
704 | ]
705 | }
706 | JSON
707 | }
708 |
709 | scaffold_cpp_conan_and_esp32() {
710 | if [ "$(json_get '.enable.cppConan')" != "true" ] && [ "$(json_get '.enable.esp32')" != "true" ]; then return 0; fi
711 | log "Scaffolding C++ with Conan and ESP32 container template"
712 | # C++ + Conan
713 | mkdir -p "$WORKSPACE_ROOT/cpp-conan/src"
714 | write_file "$WORKSPACE_ROOT/cpp-conan/conanfile.txt" 0644 <<'TXT'
715 | [requires]
716 |
717 | [generators]
718 | CMakeDeps
719 | CMakeToolchain
720 | TXT
721 |
722 | write_file "$WORKSPACE_ROOT/cpp-conan/CMakeLists.txt" 0644 <<'CMAKE'
723 | cmake_minimum_required(VERSION 3.16)
724 | project(cpp_conan_example LANGUAGES CXX)
725 | set(CMAKE_CXX_STANDARD 17)
726 | add_executable(app src/main.cpp)
727 | CMAKE
728 |
729 | write_file "$WORKSPACE_ROOT/cpp-conan/src/main.cpp" 0644 <<'CPP'
730 | #include <iostream>
731 | int main() { std::cout << "Hello from Conan template" << std::endl; }
732 | CPP
733 |
734 | # ESP32 containerized builder (placeholder)
735 | if [ "$(json_get '.enable.esp32')" = "true" ]; then
736 | write_file "$WORKSPACE_ROOT/esp32/Dockerfile" 0644 <<'DOCKER'
737 | FROM espressif/idf:latest
738 | WORKDIR /workspace
739 | CMD ["/bin/bash"]
740 | DOCKER
741 |
742 | write_file "$WORKSPACE_ROOT/esp32/README.md" 0644 <<'MD'
743 | # ESP32 Containerized Builder
744 |
745 | Use the `espressif/idf` image to build ESP32 targets without local SDK installs.
746 | MD
747 | fi
748 | }
749 |
750 | scaffold_android_kotlin_container() {
751 | if [ "$(json_get '.enable.android')" != "true" ]; then return 0; fi
752 | log "Scaffolding Android Kotlin containerized builder (minimal)"
753 | write_file "$WORKSPACE_ROOT/android/Dockerfile" 0644 <<'DOCKER'
754 | FROM eclipse-temurin:17-jdk
755 | ENV ANDROID_SDK_ROOT=/opt/android-sdk
756 | RUN mkdir -p "$ANDROID_SDK_ROOT" /opt/tools \
757 | && apt-get update && apt-get install -y --no-install-recommends unzip wget ca-certificates && rm -rf /var/lib/apt/lists/* \
758 | && wget -q https://dl.google.com/android/repository/commandlinetools-linux-10406996_latest.zip -O /opt/tools/cmdline-tools.zip \
759 | && unzip -q /opt/tools/cmdline-tools.zip -d /opt/tools \
760 | && mkdir -p $ANDROID_SDK_ROOT/cmdline-tools/latest \
761 | && mv /opt/tools/cmdline-tools $ANDROID_SDK_ROOT/cmdline-tools/latest \
762 | && yes | $ANDROID_SDK_ROOT/cmdline-tools/latest/bin/sdkmanager --licenses || true
763 | DOCKER
764 |
765 | write_file "$WORKSPACE_ROOT/android/README.md" 0644 <<'MD'
766 | # Android Native Kotlin (Containerized Builder)
767 |
768 | Container image with JDK and Android SDK command-line tools.
769 | MD
770 | }
771 |
772 | scaffold_readme() {
773 | log "Writing project README with usage instructions"
774 | write_file "$WORKSPACE_ROOT/README.md" 0644 <<'MD'
775 | # Cursor Orchestration Environment
776 |
777 | This repository was initialized by `scripts/setup_orchestrator.sh`.
778 |
779 | Key components:
780 | - `.cursor/` MCP config, tools, rules, hooks, webhooks, agents
781 | - `servers/` MCP server templates for Python, TypeScript, C++
782 | - `services/background-agent` FastAPI webhook receiver
783 | - `.github/workflows/` CI workflows
784 | - `devcontainer/`, `Containerfile`, `Dockerfile`, `compose.yaml`
785 | - `infra/aws/terraform` starter
786 | - `cpp-conan`, `esp32`, `android` templates
787 |
788 | Getting started:
789 | ```bash
790 | # Run background agent
791 | bash scripts/run-background-agent.sh
792 |
793 | # Build TS server
794 | npm --prefix servers/ts-mcp run build
795 |
796 | # Build C++ server
797 | bash servers/cpp-mcp/build.sh
798 | ```
799 | MD
800 | }
801 |
802 | attempt_install_cursor_cli() {
803 | log "Attempting to install Cursor-related CLIs (best-effort)"
804 | if have_cmd npm; then
805 | # These package names are placeholders; if they don't exist, the step is skipped gracefully.
806 | npm install -g @cursor/cli 2>/dev/null || true
807 | npm install -g cursor-agent 2>/dev/null || true
808 | npm install -g @cursor/agent 2>/dev/null || true
809 | else
810 | warn "npm not available; skipping Cursor CLI attempts"
811 | fi
812 | }
813 |
814 | main() {
815 | log "Starting $SCRIPT_NAME at $START_TS"
816 | WORKSPACE_ROOT="${WORKSPACE_ROOT:-/workspace}"
817 | if [ ! -d "$WORKSPACE_ROOT" ]; then WORKSPACE_ROOT="$(pwd)"; fi
818 | export WORKSPACE_ROOT
819 | CONFIG_PATH="${CONFIG_PATH:-$WORKSPACE_ROOT/config/project_orchestration.json}"
820 | if ! have_cmd jq; then
821 | require_or_install_pkg jq jq || die "jq is required to parse JSON config"
822 | fi
823 | if [ ! -f "$CONFIG_PATH" ]; then
824 | warn "Config not found at $CONFIG_PATH; creating defaults"
825 | mkdir -p "$(dirname "$CONFIG_PATH")"
826 | cat >"$CONFIG_PATH" <<'JSON'
827 | {
828 | "enable": {"cursorConfigs": true, "pythonMcp": true, "tsMcp": true, "cppMcp": true, "mcpClient": true, "backgroundAgent": true, "githubActions": true, "devcontainer": true, "awsTerraform": true, "webAndMcp": true, "cppConan": true, "esp32": true, "android": true},
829 | "ports": {"pyMcpPort": 8765, "tsMcpPort": 8766},
830 | "backgroundAgent": {"host": "127.0.0.1", "port": 8088},
831 | "tools": {"largeCodebases": {"enabled": true, "exclude": ["node_modules", "build", "dist", ".git", ".venv", "venv"], "maxFileSizeMB": 5}, "mermaid": {"enabled": true}},
832 | "container": {"prefer": "podman"},
833 | "runtime": {"node": "lts/*", "python": "3.11"}
834 | }
835 | JSON
836 | fi
837 | create_dirs
838 | install_base_packages
839 | install_node_via_nvm
840 | install_python_tools
841 |
842 | setup_cursor_configs
843 | scaffold_python_mcp_server
844 | scaffold_ts_mcp_server
845 | scaffold_cpp_mcp_server
846 | scaffold_mcp_client_ts
847 | scaffold_background_agent
848 | scaffold_github_actions
849 | scaffold_devcontainer_and_containerfiles
850 | scaffold_aws_terraform
851 | scaffold_web_and_mcp_json
852 | scaffold_cpp_conan_and_esp32
853 | scaffold_android_kotlin_container
854 | scaffold_readme
855 | attempt_install_cursor_cli
856 |
857 | log "Setup complete. Next steps:"
858 | cat <<'STEPS'
859 | - Review .cursor configs in .cursor/
860 | - Start background agent: bash scripts/run-background-agent.sh
861 | - Build TS server: npm --prefix servers/ts-mcp run build && node servers/ts-mcp/dist/index.js
862 | - Build C++ server: bash servers/cpp-mcp/build.sh && ./servers/cpp-mcp/build/mcp_server
863 | - Optionally run container: docker compose up --build
864 | STEPS
865 | }
866 |
867 | main "$@"
868 |
869 |
```