#
tokens: 49556/50000 12/1179 files (page 11/21)
lines: off (toggle) GitHub
raw markdown copy
This is page 11 of 21. Use http://codebase.md/sparesparrow/mcp-project-orchestrator?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .cursorrules
├── .env.example
├── .github
│   └── workflows
│       ├── build.yml
│       ├── ci-cd.yml
│       ├── ci.yml
│       ├── deploy.yml
│       ├── ecosystem-monitor.yml
│       ├── fan-out-orchestrator.yml
│       └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── AUTOMOTIVE_CAMERA_SYSTEM_SUMMARY.md
├── automotive-camera-system
│   ├── docs
│   │   └── IMPLEMENTACE_CS.md
│   └── README.md
├── AWS_MCP_IMPLEMENTATION_SUMMARY.md
├── AWS_MCP_QUICKSTART.md
├── AWS_SIP_TRUNK_DEPLOYMENT_COMPLETE.md
├── aws-sip-trunk
│   ├── .gitignore
│   ├── config
│   │   ├── extensions.conf.j2
│   │   └── pjsip.conf.j2
│   ├── DEPLOYMENT_SUMMARY.md
│   ├── docs
│   │   ├── DEPLOYMENT.md
│   │   └── TROUBLESHOOTING.md
│   ├── PROJECT_INDEX.md
│   ├── pyproject.toml
│   ├── QUICKSTART.md
│   ├── README.md
│   ├── scripts
│   │   ├── deploy-asterisk-aws.sh
│   │   └── user-data.sh
│   ├── terraform
│   │   ├── ec2.tf
│   │   ├── main.tf
│   │   ├── monitoring.tf
│   │   ├── networking.tf
│   │   ├── outputs.tf
│   │   ├── storage.tf
│   │   ├── terraform.tfvars.example
│   │   └── variables.tf
│   ├── tests
│   │   └── test_sip_connectivity.py
│   └── VERIFICATION_CHECKLIST.md
├── CLAUDE.md
├── component_templates.json
├── conanfile.py
├── config
│   ├── default.json
│   └── project_orchestration.json
├── Containerfile
├── cursor-templates
│   └── openssl
│       ├── linux-dev.mdc.jinja2
│       └── shared.mdc.jinja2
├── data
│   └── prompts
│       └── templates
│           ├── advanced-multi-server-template.json
│           ├── analysis-assistant.json
│           ├── analyze-mermaid-diagram.json
│           ├── architecture-design-assistant.json
│           ├── code-diagram-documentation-creator.json
│           ├── code-refactoring-assistant.json
│           ├── code-review-assistant.json
│           ├── collaborative-development.json
│           ├── consolidated-interfaces-template.json
│           ├── could-you-interpret-the-assumed-applicat.json
│           ├── data-analysis-template.json
│           ├── database-query-assistant.json
│           ├── debugging-assistant.json
│           ├── development-system-prompt-zcna0.json
│           ├── development-system-prompt.json
│           ├── development-workflow.json
│           ├── docker-compose-prompt-combiner.json
│           ├── docker-containerization-guide.json
│           ├── docker-mcp-servers-orchestration.json
│           ├── foresight-assistant.json
│           ├── generate-different-types-of-questions-ab.json
│           ├── generate-mermaid-diagram.json
│           ├── image-1-describe-the-icon-in-one-sen.json
│           ├── initialize-project-setup-for-a-new-micro.json
│           ├── install-dependencies-build-run-test.json
│           ├── mcp-code-generator.json
│           ├── mcp-integration-assistant.json
│           ├── mcp-resources-explorer.json
│           ├── mcp-resources-integration.json
│           ├── mcp-server-configurator.json
│           ├── mcp-server-dev-prompt-combiner.json
│           ├── mcp-server-integration-template.json
│           ├── mcp-template-system.json
│           ├── mermaid-analysis-expert.json
│           ├── mermaid-class-diagram-generator.json
│           ├── mermaid-diagram-generator.json
│           ├── mermaid-diagram-modifier.json
│           ├── modify-mermaid-diagram.json
│           ├── monorepo-migration-guide.json
│           ├── multi-resource-context.json
│           ├── project-analysis-assistant.json
│           ├── prompt-combiner-interface.json
│           ├── prompt-templates.json
│           ├── repository-explorer.json
│           ├── research-assistant.json
│           ├── sequential-data-analysis.json
│           ├── solid-code-analysis-visualizer.json
│           ├── task-list-helper-8ithy.json
│           ├── template-based-mcp-integration.json
│           ├── templates.json
│           ├── test-prompt.json
│           └── you-are-limited-to-respond-yes-or-no-onl.json
├── docs
│   ├── AWS_MCP.md
│   ├── AWS.md
│   ├── CONAN.md
│   └── integration.md
├── elevenlabs-agents
│   ├── agent-prompts.json
│   └── README.md
├── IMPLEMENTATION_STATUS.md
├── integration_plan.md
├── LICENSE
├── MANIFEST.in
├── mcp-project-orchestrator
│   └── openssl
│       ├── .github
│       │   └── workflows
│       │       └── validate-cursor-config.yml
│       ├── conanfile.py
│       ├── CURSOR_DEPLOYMENT_POLISH.md
│       ├── cursor-rules
│       │   ├── mcp.json.jinja2
│       │   ├── prompts
│       │   │   ├── fips-compliance.md.jinja2
│       │   │   ├── openssl-coding-standards.md.jinja2
│       │   │   └── pr-review.md.jinja2
│       │   └── rules
│       │       ├── ci-linux.mdc.jinja2
│       │       ├── linux-dev.mdc.jinja2
│       │       ├── macos-dev.mdc.jinja2
│       │       ├── shared.mdc.jinja2
│       │       └── windows-dev.mdc.jinja2
│       ├── docs
│       │   └── cursor-configuration-management.md
│       ├── examples
│       │   └── example-workspace
│       │       ├── .cursor
│       │       │   ├── mcp.json
│       │       │   └── rules
│       │       │       ├── linux-dev.mdc
│       │       │       └── shared.mdc
│       │       ├── .gitignore
│       │       ├── CMakeLists.txt
│       │       ├── conanfile.py
│       │       ├── profiles
│       │       │   ├── linux-gcc-debug.profile
│       │       │   └── linux-gcc-release.profile
│       │       ├── README.md
│       │       └── src
│       │           ├── crypto_utils.cpp
│       │           ├── crypto_utils.h
│       │           └── main.cpp
│       ├── IMPLEMENTATION_SUMMARY.md
│       ├── mcp_orchestrator
│       │   ├── __init__.py
│       │   ├── cli.py
│       │   ├── conan_integration.py
│       │   ├── cursor_config.py
│       │   ├── cursor_deployer.py
│       │   ├── deploy_cursor.py
│       │   ├── env_config.py
│       │   ├── platform_detector.py
│       │   └── yaml_validator.py
│       ├── openssl-cursor-example-workspace-20251014_121133.zip
│       ├── pyproject.toml
│       ├── README.md
│       ├── requirements.txt
│       ├── scripts
│       │   └── create_example_workspace.py
│       ├── setup.py
│       ├── test_deployment.py
│       └── tests
│           ├── __init__.py
│           ├── test_cursor_deployer.py
│           └── test_template_validation.py
├── printcast-agent
│   ├── .env.example
│   ├── config
│   │   └── asterisk
│   │       └── extensions.conf
│   ├── Containerfile
│   ├── docker-compose.yml
│   ├── pyproject.toml
│   ├── README.md
│   ├── scripts
│   │   └── docker-entrypoint.sh
│   ├── src
│   │   ├── integrations
│   │   │   ├── __init__.py
│   │   │   ├── asterisk.py
│   │   │   ├── content.py
│   │   │   ├── delivery.py
│   │   │   ├── elevenlabs.py
│   │   │   └── printing.py
│   │   ├── mcp_server
│   │   │   ├── __init__.py
│   │   │   ├── main.py
│   │   │   └── server.py
│   │   └── orchestration
│   │       ├── __init__.py
│   │       └── workflow.py
│   └── tests
│       └── test_mcp_server.py
├── project_orchestration.json
├── project_templates.json
├── pyproject.toml
├── README.md
├── REFACTORING_COMPLETED.md
├── REFACTORING_RECOMMENDATIONS.md
├── requirements.txt
├── scripts
│   ├── archive
│   │   ├── init_claude_test.sh
│   │   ├── init_postgres.sh
│   │   ├── start_mcp_servers.sh
│   │   └── test_claude_desktop.sh
│   ├── consolidate_mermaid.py
│   ├── consolidate_prompts.py
│   ├── consolidate_resources.py
│   ├── consolidate_templates.py
│   ├── INSTRUCTIONS.md
│   ├── README.md
│   ├── setup_aws_mcp.sh
│   ├── setup_mcp.sh
│   ├── setup_orchestrator.sh
│   ├── setup_project.py
│   └── test_mcp.sh
├── src
│   └── mcp_project_orchestrator
│       ├── __init__.py
│       ├── __main__.py
│       ├── aws_mcp.py
│       ├── cli
│       │   └── __init__.py
│       ├── cli.py
│       ├── commands
│       │   └── openssl_cli.py
│       ├── core
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── config.py
│       │   ├── exceptions.py
│       │   ├── fastmcp.py
│       │   ├── logging.py
│       │   └── managers.py
│       ├── cursor_deployer.py
│       ├── ecosystem_monitor.py
│       ├── fan_out_orchestrator.py
│       ├── fastmcp.py
│       ├── mcp-py
│       │   ├── AggregateVersions.py
│       │   ├── CustomBashTool.py
│       │   ├── FileAnnotator.py
│       │   ├── mcp-client.py
│       │   ├── mcp-server.py
│       │   ├── MermaidDiagramGenerator.py
│       │   ├── NamingAgent.py
│       │   └── solid-analyzer-agent.py
│       ├── mermaid
│       │   ├── __init__.py
│       │   ├── generator.py
│       │   ├── mermaid_orchestrator.py
│       │   ├── renderer.py
│       │   ├── templates
│       │   │   ├── AbstractFactory-diagram.json
│       │   │   ├── Adapter-diagram.json
│       │   │   ├── Analyze_Mermaid_Diagram.json
│       │   │   ├── Builder-diagram.json
│       │   │   ├── Chain-diagram.json
│       │   │   ├── Code_Diagram_Documentation_Creator.json
│       │   │   ├── Command-diagram.json
│       │   │   ├── Decorator-diagram.json
│       │   │   ├── Facade-diagram.json
│       │   │   ├── Factory-diagram.json
│       │   │   ├── flowchart
│       │   │   │   ├── AbstractFactory-diagram.json
│       │   │   │   ├── Adapter-diagram.json
│       │   │   │   ├── Analyze_Mermaid_Diagram.json
│       │   │   │   ├── Builder-diagram.json
│       │   │   │   ├── Chain-diagram.json
│       │   │   │   ├── Code_Diagram_Documentation_Creator.json
│       │   │   │   ├── Command-diagram.json
│       │   │   │   ├── Decorator-diagram.json
│       │   │   │   ├── Facade-diagram.json
│       │   │   │   ├── Factory-diagram.json
│       │   │   │   ├── Generate_Mermaid_Diagram.json
│       │   │   │   ├── generated_diagram.json
│       │   │   │   ├── integration.json
│       │   │   │   ├── Iterator-diagram.json
│       │   │   │   ├── Mediator-diagram.json
│       │   │   │   ├── Memento-diagram.json
│       │   │   │   ├── Mermaid_Analysis_Expert.json
│       │   │   │   ├── Mermaid_Class_Diagram_Generator.json
│       │   │   │   ├── Mermaid_Diagram_Generator.json
│       │   │   │   ├── Mermaid_Diagram_Modifier.json
│       │   │   │   ├── Modify_Mermaid_Diagram.json
│       │   │   │   ├── Observer-diagram.json
│       │   │   │   ├── Prototype-diagram.json
│       │   │   │   ├── Proxy-diagram.json
│       │   │   │   ├── README.json
│       │   │   │   ├── Singleton-diagram.json
│       │   │   │   ├── State-diagram.json
│       │   │   │   ├── Strategy-diagram.json
│       │   │   │   ├── TemplateMethod-diagram.json
│       │   │   │   ├── theme_dark.json
│       │   │   │   ├── theme_default.json
│       │   │   │   ├── theme_pastel.json
│       │   │   │   ├── theme_vibrant.json
│       │   │   │   └── Visitor-diagram.json
│       │   │   ├── Generate_Mermaid_Diagram.json
│       │   │   ├── generated_diagram.json
│       │   │   ├── index.json
│       │   │   ├── integration.json
│       │   │   ├── Iterator-diagram.json
│       │   │   ├── Mediator-diagram.json
│       │   │   ├── Memento-diagram.json
│       │   │   ├── Mermaid_Analysis_Expert.json
│       │   │   ├── Mermaid_Class_Diagram_Generator.json
│       │   │   ├── Mermaid_Diagram_Generator.json
│       │   │   ├── Mermaid_Diagram_Modifier.json
│       │   │   ├── Modify_Mermaid_Diagram.json
│       │   │   ├── Observer-diagram.json
│       │   │   ├── Prototype-diagram.json
│       │   │   ├── Proxy-diagram.json
│       │   │   ├── README.json
│       │   │   ├── Singleton-diagram.json
│       │   │   ├── State-diagram.json
│       │   │   ├── Strategy-diagram.json
│       │   │   ├── TemplateMethod-diagram.json
│       │   │   ├── theme_dark.json
│       │   │   ├── theme_default.json
│       │   │   ├── theme_pastel.json
│       │   │   ├── theme_vibrant.json
│       │   │   └── Visitor-diagram.json
│       │   └── types.py
│       ├── project_orchestration.py
│       ├── prompt_manager
│       │   ├── __init__.py
│       │   ├── loader.py
│       │   ├── manager.py
│       │   └── template.py
│       ├── prompts
│       │   ├── __dirname.json
│       │   ├── __image_1___describe_the_icon_in_one_sen___.json
│       │   ├── __init__.py
│       │   ├── __type.json
│       │   ├── _.json
│       │   ├── _DEFAULT_OPEN_DELIMITER.json
│       │   ├── _emojiRegex.json
│       │   ├── _UUID_CHARS.json
│       │   ├── a.json
│       │   ├── A.json
│       │   ├── Aa.json
│       │   ├── aAnnotationPadding.json
│       │   ├── absoluteThresholdGroup.json
│       │   ├── add.json
│       │   ├── ADDITIONAL_PROPERTY_FLAG.json
│       │   ├── Advanced_Multi-Server_Integration_Template.json
│       │   ├── allOptionsList.json
│       │   ├── analysis
│       │   │   ├── Data_Analysis_Template.json
│       │   │   ├── index.json
│       │   │   ├── Mermaid_Analysis_Expert.json
│       │   │   ├── Sequential_Data_Analysis_with_MCP_Integration.json
│       │   │   └── SOLID_Code_Analysis_Visualizer.json
│       │   ├── Analysis_Assistant.json
│       │   ├── Analyze_Mermaid_Diagram.json
│       │   ├── ANDROID_EVERGREEN_FIRST.json
│       │   ├── ANSI_ESCAPE_BELL.json
│       │   ├── architecture
│       │   │   ├── index.json
│       │   │   └── PromptCombiner_Interface.json
│       │   ├── Architecture_Design_Assistant.json
│       │   ├── argsTag.json
│       │   ├── ARROW.json
│       │   ├── assistant
│       │   │   ├── Analysis_Assistant.json
│       │   │   ├── Architecture_Design_Assistant.json
│       │   │   ├── Code_Refactoring_Assistant.json
│       │   │   ├── Code_Review_Assistant.json
│       │   │   ├── Database_Query_Assistant.json
│       │   │   ├── Debugging_Assistant.json
│       │   │   ├── Foresight_Assistant.json
│       │   │   ├── index.json
│       │   │   ├── MCP_Integration_Assistant.json
│       │   │   ├── Project_Analysis_Assistant.json
│       │   │   └── Research_Assistant.json
│       │   ├── astralRange.json
│       │   ├── at.json
│       │   ├── authorization_endpoint.json
│       │   ├── b.json
│       │   ├── BABELIGNORE_FILENAME.json
│       │   ├── BACKSLASH.json
│       │   ├── backupId.json
│       │   ├── BANG.json
│       │   ├── BASE64_MAP.json
│       │   ├── baseFlags.json
│       │   ├── Basic_Template.json
│       │   ├── bgModel.json
│       │   ├── bignum.json
│       │   ├── blockKeywordsStr.json
│       │   ├── BOMChar.json
│       │   ├── boundary.json
│       │   ├── brackets.json
│       │   ├── BROWSER_VAR.json
│       │   ├── bt.json
│       │   ├── BUILTIN.json
│       │   ├── BULLET.json
│       │   ├── c.json
│       │   ├── C.json
│       │   ├── CACHE_VERSION.json
│       │   ├── cacheControl.json
│       │   ├── cacheProp.json
│       │   ├── category.py
│       │   ├── CHANGE_EVENT.json
│       │   ├── CHAR_CODE_0.json
│       │   ├── chars.json
│       │   ├── cjsPattern.json
│       │   ├── cKeywords.json
│       │   ├── classForPercent.json
│       │   ├── classStr.json
│       │   ├── clientFirstMessageBare.json
│       │   ├── cmd.json
│       │   ├── Code_Diagram_Documentation_Creator.json
│       │   ├── Code_Refactoring_Assistant.json
│       │   ├── Code_Review_Assistant.json
│       │   ├── code.json
│       │   ├── coding
│       │   │   ├── __dirname.json
│       │   │   ├── _.json
│       │   │   ├── _DEFAULT_OPEN_DELIMITER.json
│       │   │   ├── _emojiRegex.json
│       │   │   ├── _UUID_CHARS.json
│       │   │   ├── a.json
│       │   │   ├── A.json
│       │   │   ├── aAnnotationPadding.json
│       │   │   ├── absoluteThresholdGroup.json
│       │   │   ├── add.json
│       │   │   ├── ADDITIONAL_PROPERTY_FLAG.json
│       │   │   ├── allOptionsList.json
│       │   │   ├── ANDROID_EVERGREEN_FIRST.json
│       │   │   ├── ANSI_ESCAPE_BELL.json
│       │   │   ├── argsTag.json
│       │   │   ├── ARROW.json
│       │   │   ├── astralRange.json
│       │   │   ├── at.json
│       │   │   ├── authorization_endpoint.json
│       │   │   ├── BABELIGNORE_FILENAME.json
│       │   │   ├── BACKSLASH.json
│       │   │   ├── BANG.json
│       │   │   ├── BASE64_MAP.json
│       │   │   ├── baseFlags.json
│       │   │   ├── bgModel.json
│       │   │   ├── bignum.json
│       │   │   ├── blockKeywordsStr.json
│       │   │   ├── BOMChar.json
│       │   │   ├── boundary.json
│       │   │   ├── brackets.json
│       │   │   ├── BROWSER_VAR.json
│       │   │   ├── bt.json
│       │   │   ├── BUILTIN.json
│       │   │   ├── BULLET.json
│       │   │   ├── c.json
│       │   │   ├── C.json
│       │   │   ├── CACHE_VERSION.json
│       │   │   ├── cacheControl.json
│       │   │   ├── cacheProp.json
│       │   │   ├── CHANGE_EVENT.json
│       │   │   ├── CHAR_CODE_0.json
│       │   │   ├── chars.json
│       │   │   ├── cjsPattern.json
│       │   │   ├── cKeywords.json
│       │   │   ├── classForPercent.json
│       │   │   ├── classStr.json
│       │   │   ├── clientFirstMessageBare.json
│       │   │   ├── cmd.json
│       │   │   ├── code.json
│       │   │   ├── colorCode.json
│       │   │   ├── comma.json
│       │   │   ├── command.json
│       │   │   ├── configJsContent.json
│       │   │   ├── connectionString.json
│       │   │   ├── cssClassStr.json
│       │   │   ├── currentBoundaryParse.json
│       │   │   ├── d.json
│       │   │   ├── data.json
│       │   │   ├── DATA.json
│       │   │   ├── dataWebpackPrefix.json
│       │   │   ├── debug.json
│       │   │   ├── decodeStateVectorV2.json
│       │   │   ├── DEFAULT_DELIMITER.json
│       │   │   ├── DEFAULT_DIAGRAM_DIRECTION.json
│       │   │   ├── DEFAULT_JS_PATTERN.json
│       │   │   ├── DEFAULT_LOG_TARGET.json
│       │   │   ├── defaultHelpOpt.json
│       │   │   ├── defaultHost.json
│       │   │   ├── deferY18nLookupPrefix.json
│       │   │   ├── DELIM.json
│       │   │   ├── delimiter.json
│       │   │   ├── DEPRECATION.json
│       │   │   ├── destMain.json
│       │   │   ├── DID_NOT_THROW.json
│       │   │   ├── direction.json
│       │   │   ├── displayValue.json
│       │   │   ├── DNS.json
│       │   │   ├── doc.json
│       │   │   ├── DOCUMENTATION_NOTE.json
│       │   │   ├── DOT.json
│       │   │   ├── DOTS.json
│       │   │   ├── dummyCompoundId.json
│       │   │   ├── e.json
│       │   │   ├── E.json
│       │   │   ├── earlyHintsLink.json
│       │   │   ├── elide.json
│       │   │   ├── EMPTY.json
│       │   │   ├── end.json
│       │   │   ├── endpoint.json
│       │   │   ├── environment.json
│       │   │   ├── ERR_CODE.json
│       │   │   ├── errMessage.json
│       │   │   ├── errMsg.json
│       │   │   ├── ERROR_MESSAGE.json
│       │   │   ├── error.json
│       │   │   ├── ERROR.json
│       │   │   ├── ERRORCLASS.json
│       │   │   ├── errorMessage.json
│       │   │   ├── es6Default.json
│       │   │   ├── ESC.json
│       │   │   ├── Escapable.json
│       │   │   ├── escapedChar.json
│       │   │   ├── escapeFuncStr.json
│       │   │   ├── escSlash.json
│       │   │   ├── ev.json
│       │   │   ├── event.json
│       │   │   ├── execaMessage.json
│       │   │   ├── EXPECTED_LABEL.json
│       │   │   ├── expected.json
│       │   │   ├── expectedString.json
│       │   │   ├── expression1.json
│       │   │   ├── EXTENSION.json
│       │   │   ├── f.json
│       │   │   ├── FAIL_TEXT.json
│       │   │   ├── FILE_BROWSER_FACTORY.json
│       │   │   ├── fill.json
│       │   │   ├── findPackageJson.json
│       │   │   ├── fnKey.json
│       │   │   ├── FORMAT.json
│       │   │   ├── formatted.json
│       │   │   ├── from.json
│       │   │   ├── fullpaths.json
│       │   │   ├── FUNC_ERROR_TEXT.json
│       │   │   ├── GenStateSuspendedStart.json
│       │   │   ├── GENSYNC_EXPECTED_START.json
│       │   │   ├── gutter.json
│       │   │   ├── h.json
│       │   │   ├── handlerFuncName.json
│       │   │   ├── HASH_UNDEFINED.json
│       │   │   ├── head.json
│       │   │   ├── helpMessage.json
│       │   │   ├── HINT_ARG.json
│       │   │   ├── HOOK_RETURNED_NOTHING_ERROR_MESSAGE.json
│       │   │   ├── i.json
│       │   │   ├── id.json
│       │   │   ├── identifier.json
│       │   │   ├── Identifier.json
│       │   │   ├── INDENT.json
│       │   │   ├── indentation.json
│       │   │   ├── index.json
│       │   │   ├── INDIRECTION_FRAGMENT.json
│       │   │   ├── input.json
│       │   │   ├── inputText.json
│       │   │   ├── insert.json
│       │   │   ├── insertPromptQuery.json
│       │   │   ├── INSPECT_MAX_BYTES.json
│       │   │   ├── intToCharMap.json
│       │   │   ├── IS_ITERABLE_SENTINEL.json
│       │   │   ├── IS_KEYED_SENTINEL.json
│       │   │   ├── isConfigType.json
│       │   │   ├── isoSentinel.json
│       │   │   ├── isSourceNode.json
│       │   │   ├── j.json
│       │   │   ├── JAKE_CMD.json
│       │   │   ├── JEST_GLOBAL_NAME.json
│       │   │   ├── JEST_GLOBALS_MODULE_NAME.json
│       │   │   ├── JSON_SYNTAX_CHAR.json
│       │   │   ├── json.json
│       │   │   ├── jsonType.json
│       │   │   ├── jupyter_namespaceObject.json
│       │   │   ├── JUPYTERLAB_DOCMANAGER_PLUGIN_ID.json
│       │   │   ├── k.json
│       │   │   ├── KERNEL_STATUS_ERROR_CLASS.json
│       │   │   ├── key.json
│       │   │   ├── l.json
│       │   │   ├── labelId.json
│       │   │   ├── LATEST_PROTOCOL_VERSION.json
│       │   │   ├── LETTERDASHNUMBER.json
│       │   │   ├── LF.json
│       │   │   ├── LIMIT_REPLACE_NODE.json
│       │   │   ├── logTime.json
│       │   │   ├── lstatkey.json
│       │   │   ├── lt.json
│       │   │   ├── m.json
│       │   │   ├── maliciousPayload.json
│       │   │   ├── mask.json
│       │   │   ├── match.json
│       │   │   ├── matchingDelim.json
│       │   │   ├── MAXIMUM_MESSAGE_SIZE.json
│       │   │   ├── mdcContent.json
│       │   │   ├── MERMAID_DOM_ID_PREFIX.json
│       │   │   ├── message.json
│       │   │   ├── messages.json
│       │   │   ├── meth.json
│       │   │   ├── minimatch.json
│       │   │   ├── MOCK_CONSTRUCTOR_NAME.json
│       │   │   ├── MOCKS_PATTERN.json
│       │   │   ├── moduleDirectory.json
│       │   │   ├── msg.json
│       │   │   ├── mtr.json
│       │   │   ├── multipartType.json
│       │   │   ├── n.json
│       │   │   ├── N.json
│       │   │   ├── name.json
│       │   │   ├── NATIVE_PLATFORM.json
│       │   │   ├── newUrl.json
│       │   │   ├── NM.json
│       │   │   ├── NO_ARGUMENTS.json
│       │   │   ├── NO_DIFF_MESSAGE.json
│       │   │   ├── NODE_MODULES.json
│       │   │   ├── nodeInternalPrefix.json
│       │   │   ├── nonASCIIidentifierStartChars.json
│       │   │   ├── nonKey.json
│       │   │   ├── NOT_A_DOT.json
│       │   │   ├── notCharacterOrDash.json
│       │   │   ├── notebookURL.json
│       │   │   ├── notSelector.json
│       │   │   ├── nullTag.json
│       │   │   ├── num.json
│       │   │   ├── NUMBER.json
│       │   │   ├── o.json
│       │   │   ├── O.json
│       │   │   ├── octChar.json
│       │   │   ├── octetStreamType.json
│       │   │   ├── operators.json
│       │   │   ├── out.json
│       │   │   ├── OUTSIDE_JEST_VM_PROTOCOL.json
│       │   │   ├── override.json
│       │   │   ├── p.json
│       │   │   ├── PACKAGE_FILENAME.json
│       │   │   ├── PACKAGE_JSON.json
│       │   │   ├── packageVersion.json
│       │   │   ├── paddedNumber.json
│       │   │   ├── page.json
│       │   │   ├── parseClass.json
│       │   │   ├── path.json
│       │   │   ├── pathExt.json
│       │   │   ├── pattern.json
│       │   │   ├── PatternBoolean.json
│       │   │   ├── pBuiltins.json
│       │   │   ├── pFloatForm.json
│       │   │   ├── pkg.json
│       │   │   ├── PLUGIN_ID_DOC_MANAGER.json
│       │   │   ├── plusChar.json
│       │   │   ├── PN_CHARS.json
│       │   │   ├── point.json
│       │   │   ├── prefix.json
│       │   │   ├── PRETTY_PLACEHOLDER.json
│       │   │   ├── property_prefix.json
│       │   │   ├── pubkey256.json
│       │   │   ├── Q.json
│       │   │   ├── qmark.json
│       │   │   ├── QO.json
│       │   │   ├── query.json
│       │   │   ├── querystringType.json
│       │   │   ├── queryText.json
│       │   │   ├── r.json
│       │   │   ├── R.json
│       │   │   ├── rangeStart.json
│       │   │   ├── re.json
│       │   │   ├── reI.json
│       │   │   ├── REQUIRED_FIELD_SYMBOL.json
│       │   │   ├── reserve.json
│       │   │   ├── resolvedDestination.json
│       │   │   ├── resolverDir.json
│       │   │   ├── responseType.json
│       │   │   ├── result.json
│       │   │   ├── ROOT_DESCRIBE_BLOCK_NAME.json
│       │   │   ├── ROOT_NAMESPACE_NAME.json
│       │   │   ├── ROOT_TASK_NAME.json
│       │   │   ├── route.json
│       │   │   ├── RUNNING_TEXT.json
│       │   │   ├── s.json
│       │   │   ├── SCHEMA_PATH.json
│       │   │   ├── se.json
│       │   │   ├── SEARCHABLE_CLASS.json
│       │   │   ├── secret.json
│       │   │   ├── selector.json
│       │   │   ├── SEMVER_SPEC_VERSION.json
│       │   │   ├── sensitiveHeaders.json
│       │   │   ├── sep.json
│       │   │   ├── separator.json
│       │   │   ├── SHAPE_STATE.json
│       │   │   ├── shape.json
│       │   │   ├── SHARED.json
│       │   │   ├── short.json
│       │   │   ├── side.json
│       │   │   ├── SNAPSHOT_VERSION.json
│       │   │   ├── SOURCE_MAPPING_PREFIX.json
│       │   │   ├── source.json
│       │   │   ├── sourceMapContent.json
│       │   │   ├── SPACE_SYMBOL.json
│       │   │   ├── SPACE.json
│       │   │   ├── sqlKeywords.json
│       │   │   ├── sranges.json
│       │   │   ├── st.json
│       │   │   ├── ST.json
│       │   │   ├── stack.json
│       │   │   ├── START_HIDING.json
│       │   │   ├── START_OF_LINE.json
│       │   │   ├── startNoTraversal.json
│       │   │   ├── STATES.json
│       │   │   ├── stats.json
│       │   │   ├── statSync.json
│       │   │   ├── storageStatus.json
│       │   │   ├── storageType.json
│       │   │   ├── str.json
│       │   │   ├── stringifiedObject.json
│       │   │   ├── stringPath.json
│       │   │   ├── stringResult.json
│       │   │   ├── stringTag.json
│       │   │   ├── strValue.json
│       │   │   ├── style.json
│       │   │   ├── SUB_NAME.json
│       │   │   ├── subkey.json
│       │   │   ├── SUBPROTOCOL.json
│       │   │   ├── SUITE_NAME.json
│       │   │   ├── symbolPattern.json
│       │   │   ├── symbolTag.json
│       │   │   ├── t.json
│       │   │   ├── T.json
│       │   │   ├── templateDir.json
│       │   │   ├── tempName.json
│       │   │   ├── text.json
│       │   │   ├── time.json
│       │   │   ├── titleSeparator.json
│       │   │   ├── tmpl.json
│       │   │   ├── tn.json
│       │   │   ├── toValue.json
│       │   │   ├── transform.json
│       │   │   ├── trustProxyDefaultSymbol.json
│       │   │   ├── typeArgumentsKey.json
│       │   │   ├── typeKey.json
│       │   │   ├── typeMessage.json
│       │   │   ├── typesRegistryPackageName.json
│       │   │   ├── u.json
│       │   │   ├── UNDEFINED.json
│       │   │   ├── unit.json
│       │   │   ├── UNMATCHED_SURROGATE_PAIR_REPLACE.json
│       │   │   ├── ur.json
│       │   │   ├── USAGE.json
│       │   │   ├── value.json
│       │   │   ├── Vr.json
│       │   │   ├── watchmanURL.json
│       │   │   ├── webkit.json
│       │   │   ├── xhtml.json
│       │   │   ├── XP_DEFAULT_PATHEXT.json
│       │   │   └── y.json
│       │   ├── Collaborative_Development_with_MCP_Integration.json
│       │   ├── colorCode.json
│       │   ├── comma.json
│       │   ├── command.json
│       │   ├── completionShTemplate.json
│       │   ├── configJsContent.json
│       │   ├── connectionString.json
│       │   ├── Consolidated_TypeScript_Interfaces_Template.json
│       │   ├── Could_you_interpret_the_assumed_applicat___.json
│       │   ├── cssClassStr.json
│       │   ├── currentBoundaryParse.json
│       │   ├── d.json
│       │   ├── Data_Analysis_Template.json
│       │   ├── data.json
│       │   ├── DATA.json
│       │   ├── Database_Query_Assistant.json
│       │   ├── dataWebpackPrefix.json
│       │   ├── debug.json
│       │   ├── Debugging_Assistant.json
│       │   ├── decodeStateVectorV2.json
│       │   ├── DEFAULT_DELIMITER.json
│       │   ├── DEFAULT_DIAGRAM_DIRECTION.json
│       │   ├── DEFAULT_INDENT.json
│       │   ├── DEFAULT_JS_PATTERN.json
│       │   ├── DEFAULT_LOG_TARGET.json
│       │   ├── defaultHelpOpt.json
│       │   ├── defaultHost.json
│       │   ├── deferY18nLookupPrefix.json
│       │   ├── DELIM.json
│       │   ├── delimiter.json
│       │   ├── DEPRECATION.json
│       │   ├── DESCENDING.json
│       │   ├── destMain.json
│       │   ├── development
│       │   │   ├── Collaborative_Development_with_MCP_Integration.json
│       │   │   ├── Consolidated_TypeScript_Interfaces_Template.json
│       │   │   ├── Development_Workflow.json
│       │   │   ├── index.json
│       │   │   ├── MCP_Server_Development_Prompt_Combiner.json
│       │   │   └── Monorepo_Migration_and_Code_Organization_Guide.json
│       │   ├── Development_System_Prompt.json
│       │   ├── Development_Workflow.json
│       │   ├── devops
│       │   │   ├── Docker_Compose_Prompt_Combiner.json
│       │   │   ├── Docker_Containerization_Guide.json
│       │   │   └── index.json
│       │   ├── DID_NOT_THROW.json
│       │   ├── direction.json
│       │   ├── displayValue.json
│       │   ├── DNS.json
│       │   ├── doc.json
│       │   ├── Docker_Compose_Prompt_Combiner.json
│       │   ├── Docker_Containerization_Guide.json
│       │   ├── Docker_MCP_Servers_Orchestration_Guide.json
│       │   ├── DOCUMENTATION_NOTE.json
│       │   ├── DOT.json
│       │   ├── DOTS.json
│       │   ├── dummyCompoundId.json
│       │   ├── e.json
│       │   ├── E.json
│       │   ├── earlyHintsLink.json
│       │   ├── elide.json
│       │   ├── EMPTY.json
│       │   ├── encoded.json
│       │   ├── end.json
│       │   ├── endpoint.json
│       │   ├── environment.json
│       │   ├── ERR_CODE.json
│       │   ├── errMessage.json
│       │   ├── errMsg.json
│       │   ├── ERROR_MESSAGE.json
│       │   ├── error.json
│       │   ├── ERROR.json
│       │   ├── ERRORCLASS.json
│       │   ├── errorMessage.json
│       │   ├── es6Default.json
│       │   ├── ESC.json
│       │   ├── Escapable.json
│       │   ├── escapedChar.json
│       │   ├── escapeFuncStr.json
│       │   ├── escSlash.json
│       │   ├── ev.json
│       │   ├── event.json
│       │   ├── execaMessage.json
│       │   ├── EXPECTED_LABEL.json
│       │   ├── expected.json
│       │   ├── expectedString.json
│       │   ├── expression1.json
│       │   ├── EXTENSION.json
│       │   ├── f.json
│       │   ├── FAIL_TEXT.json
│       │   ├── FILE_BROWSER_FACTORY.json
│       │   ├── fill.json
│       │   ├── findPackageJson.json
│       │   ├── fnKey.json
│       │   ├── Foresight_Assistant.json
│       │   ├── FORMAT.json
│       │   ├── formatted.json
│       │   ├── from.json
│       │   ├── fullpaths.json
│       │   ├── FUNC_ERROR_TEXT.json
│       │   ├── general
│       │   │   └── index.json
│       │   ├── Generate_different_types_of_questions_ab___.json
│       │   ├── Generate_Mermaid_Diagram.json
│       │   ├── GenStateSuspendedStart.json
│       │   ├── GENSYNC_EXPECTED_START.json
│       │   ├── GitHub_Repository_Explorer.json
│       │   ├── gutter.json
│       │   ├── h.json
│       │   ├── handlerFuncName.json
│       │   ├── HASH_UNDEFINED.json
│       │   ├── head.json
│       │   ├── helpMessage.json
│       │   ├── HINT_ARG.json
│       │   ├── HOOK_RETURNED_NOTHING_ERROR_MESSAGE.json
│       │   ├── i.json
│       │   ├── id.json
│       │   ├── identifier.json
│       │   ├── Identifier.json
│       │   ├── INDENT.json
│       │   ├── indentation.json
│       │   ├── index.json
│       │   ├── INDIRECTION_FRAGMENT.json
│       │   ├── Initialize_project_setup_for_a_new_micro___.json
│       │   ├── input.json
│       │   ├── inputText.json
│       │   ├── insert.json
│       │   ├── insertPromptQuery.json
│       │   ├── INSPECT_MAX_BYTES.json
│       │   ├── install_dependencies__build__run__test____.json
│       │   ├── intToCharMap.json
│       │   ├── IS_ITERABLE_SENTINEL.json
│       │   ├── IS_KEYED_SENTINEL.json
│       │   ├── isConfigType.json
│       │   ├── isoSentinel.json
│       │   ├── isSourceNode.json
│       │   ├── j.json
│       │   ├── J.json
│       │   ├── JAKE_CMD.json
│       │   ├── JEST_GLOBAL_NAME.json
│       │   ├── JEST_GLOBALS_MODULE_NAME.json
│       │   ├── JSON_SYNTAX_CHAR.json
│       │   ├── json.json
│       │   ├── jsonType.json
│       │   ├── jupyter_namespaceObject.json
│       │   ├── JUPYTERLAB_DOCMANAGER_PLUGIN_ID.json
│       │   ├── k.json
│       │   ├── KERNEL_STATUS_ERROR_CLASS.json
│       │   ├── key.json
│       │   ├── l.json
│       │   ├── labelId.json
│       │   ├── LATEST_PROTOCOL_VERSION.json
│       │   ├── LETTERDASHNUMBER.json
│       │   ├── LF.json
│       │   ├── LIMIT_REPLACE_NODE.json
│       │   ├── LINE_FEED.json
│       │   ├── logTime.json
│       │   ├── lstatkey.json
│       │   ├── lt.json
│       │   ├── m.json
│       │   ├── maliciousPayload.json
│       │   ├── manager.py
│       │   ├── marker.json
│       │   ├── mask.json
│       │   ├── match.json
│       │   ├── matchingDelim.json
│       │   ├── MAXIMUM_MESSAGE_SIZE.json
│       │   ├── MCP_Integration_Assistant.json
│       │   ├── MCP_Resources_Explorer.json
│       │   ├── MCP_Resources_Integration_Guide.json
│       │   ├── MCP_Server_Development_Prompt_Combiner.json
│       │   ├── MCP_Server_Integration_Guide.json
│       │   ├── mcp-code-generator.json
│       │   ├── mdcContent.json
│       │   ├── Mermaid_Analysis_Expert.json
│       │   ├── Mermaid_Class_Diagram_Generator.json
│       │   ├── Mermaid_Diagram_Generator.json
│       │   ├── Mermaid_Diagram_Modifier.json
│       │   ├── MERMAID_DOM_ID_PREFIX.json
│       │   ├── message.json
│       │   ├── messages.json
│       │   ├── meth.json
│       │   ├── minimatch.json
│       │   ├── MOBILE_QUERY.json
│       │   ├── MOCK_CONSTRUCTOR_NAME.json
│       │   ├── MOCKS_PATTERN.json
│       │   ├── Modify_Mermaid_Diagram.json
│       │   ├── moduleDirectory.json
│       │   ├── Monorepo_Migration_and_Code_Organization_Guide.json
│       │   ├── msg.json
│       │   ├── mtr.json
│       │   ├── Multi-Resource_Context_Assistant.json
│       │   ├── multipartType.json
│       │   ├── n.json
│       │   ├── N.json
│       │   ├── name.json
│       │   ├── NATIVE_PLATFORM.json
│       │   ├── newUrl.json
│       │   ├── NM.json
│       │   ├── NO_ARGUMENTS.json
│       │   ├── NO_DIFF_MESSAGE.json
│       │   ├── NODE_MODULES.json
│       │   ├── nodeInternalPrefix.json
│       │   ├── nonASCIIidentifierStartChars.json
│       │   ├── nonKey.json
│       │   ├── NOT_A_DOT.json
│       │   ├── notCharacterOrDash.json
│       │   ├── notebookURL.json
│       │   ├── notSelector.json
│       │   ├── nullTag.json
│       │   ├── num.json
│       │   ├── NUMBER.json
│       │   ├── o.json
│       │   ├── O.json
│       │   ├── octChar.json
│       │   ├── octetStreamType.json
│       │   ├── operators.json
│       │   ├── other
│       │   │   ├── __image_1___describe_the_icon_in_one_sen___.json
│       │   │   ├── __type.json
│       │   │   ├── Advanced_Multi-Server_Integration_Template.json
│       │   │   ├── Analyze_Mermaid_Diagram.json
│       │   │   ├── Basic_Template.json
│       │   │   ├── Code_Diagram_Documentation_Creator.json
│       │   │   ├── Collaborative_Development_with_MCP_Integration.json
│       │   │   ├── completionShTemplate.json
│       │   │   ├── Could_you_interpret_the_assumed_applicat___.json
│       │   │   ├── DEFAULT_INDENT.json
│       │   │   ├── Docker_MCP_Servers_Orchestration_Guide.json
│       │   │   ├── Generate_different_types_of_questions_ab___.json
│       │   │   ├── Generate_Mermaid_Diagram.json
│       │   │   ├── GitHub_Repository_Explorer.json
│       │   │   ├── index.json
│       │   │   ├── Initialize_project_setup_for_a_new_micro___.json
│       │   │   ├── install_dependencies__build__run__test____.json
│       │   │   ├── LINE_FEED.json
│       │   │   ├── MCP_Resources_Explorer.json
│       │   │   ├── MCP_Resources_Integration_Guide.json
│       │   │   ├── MCP_Server_Integration_Guide.json
│       │   │   ├── mcp-code-generator.json
│       │   │   ├── Mermaid_Class_Diagram_Generator.json
│       │   │   ├── Mermaid_Diagram_Generator.json
│       │   │   ├── Mermaid_Diagram_Modifier.json
│       │   │   ├── Modify_Mermaid_Diagram.json
│       │   │   ├── Multi-Resource_Context_Assistant.json
│       │   │   ├── output.json
│       │   │   ├── sseUrl.json
│       │   │   ├── string.json
│       │   │   ├── Task_List_Helper.json
│       │   │   ├── Template-Based_MCP_Integration.json
│       │   │   ├── Test_Prompt.json
│       │   │   ├── type.json
│       │   │   ├── VERSION.json
│       │   │   ├── WIN_SLASH.json
│       │   │   └── You_are_limited_to_respond_Yes_or_No_onl___.json
│       │   ├── out.json
│       │   ├── output.json
│       │   ├── OUTSIDE_JEST_VM_PROTOCOL.json
│       │   ├── override.json
│       │   ├── p.json
│       │   ├── PACKAGE_FILENAME.json
│       │   ├── PACKAGE_JSON.json
│       │   ├── packageVersion.json
│       │   ├── paddedNumber.json
│       │   ├── page.json
│       │   ├── parseClass.json
│       │   ├── PATH_NODE_MODULES.json
│       │   ├── path.json
│       │   ├── pathExt.json
│       │   ├── pattern.json
│       │   ├── PatternBoolean.json
│       │   ├── pBuiltins.json
│       │   ├── pFloatForm.json
│       │   ├── pkg.json
│       │   ├── PLUGIN_ID_DOC_MANAGER.json
│       │   ├── plusChar.json
│       │   ├── PN_CHARS.json
│       │   ├── point.json
│       │   ├── prefix.json
│       │   ├── PRETTY_PLACEHOLDER.json
│       │   ├── Project_Analysis_Assistant.json
│       │   ├── ProjectsUpdatedInBackgroundEvent.json
│       │   ├── PromptCombiner_Interface.json
│       │   ├── promptId.json
│       │   ├── property_prefix.json
│       │   ├── pubkey256.json
│       │   ├── Q.json
│       │   ├── qmark.json
│       │   ├── QO.json
│       │   ├── query.json
│       │   ├── querystringType.json
│       │   ├── queryText.json
│       │   ├── r.json
│       │   ├── R.json
│       │   ├── rangeStart.json
│       │   ├── re.json
│       │   ├── reI.json
│       │   ├── REQUIRED_FIELD_SYMBOL.json
│       │   ├── Research_Assistant.json
│       │   ├── reserve.json
│       │   ├── resolvedDestination.json
│       │   ├── resolverDir.json
│       │   ├── responseType.json
│       │   ├── result.json
│       │   ├── ROOT_DESCRIBE_BLOCK_NAME.json
│       │   ├── ROOT_NAMESPACE_NAME.json
│       │   ├── ROOT_TASK_NAME.json
│       │   ├── route.json
│       │   ├── RUNNING_TEXT.json
│       │   ├── RXstyle.json
│       │   ├── s.json
│       │   ├── SCHEMA_PATH.json
│       │   ├── schemaQuery.json
│       │   ├── se.json
│       │   ├── SEARCHABLE_CLASS.json
│       │   ├── secret.json
│       │   ├── selector.json
│       │   ├── SEMVER_SPEC_VERSION.json
│       │   ├── sensitiveHeaders.json
│       │   ├── sep.json
│       │   ├── separator.json
│       │   ├── Sequential_Data_Analysis_with_MCP_Integration.json
│       │   ├── SHAPE_STATE.json
│       │   ├── shape.json
│       │   ├── SHARED.json
│       │   ├── short.json
│       │   ├── side.json
│       │   ├── SNAPSHOT_VERSION.json
│       │   ├── SOLID_Code_Analysis_Visualizer.json
│       │   ├── SOURCE_MAPPING_PREFIX.json
│       │   ├── source.json
│       │   ├── sourceMapContent.json
│       │   ├── SPACE_SYMBOL.json
│       │   ├── SPACE.json
│       │   ├── sqlKeywords.json
│       │   ├── sranges.json
│       │   ├── sseUrl.json
│       │   ├── st.json
│       │   ├── ST.json
│       │   ├── stack.json
│       │   ├── START_HIDING.json
│       │   ├── START_OF_LINE.json
│       │   ├── startNoTraversal.json
│       │   ├── STATES.json
│       │   ├── stats.json
│       │   ├── statSync.json
│       │   ├── status.json
│       │   ├── storageStatus.json
│       │   ├── storageType.json
│       │   ├── str.json
│       │   ├── string.json
│       │   ├── stringifiedObject.json
│       │   ├── stringPath.json
│       │   ├── stringResult.json
│       │   ├── stringTag.json
│       │   ├── strValue.json
│       │   ├── style.json
│       │   ├── SUB_NAME.json
│       │   ├── subkey.json
│       │   ├── SUBPROTOCOL.json
│       │   ├── SUITE_NAME.json
│       │   ├── symbolPattern.json
│       │   ├── symbolTag.json
│       │   ├── system
│       │   │   ├── Aa.json
│       │   │   ├── b.json
│       │   │   ├── Development_System_Prompt.json
│       │   │   ├── index.json
│       │   │   ├── marker.json
│       │   │   ├── PATH_NODE_MODULES.json
│       │   │   ├── ProjectsUpdatedInBackgroundEvent.json
│       │   │   ├── RXstyle.json
│       │   │   ├── status.json
│       │   │   └── versionMajorMinor.json
│       │   ├── t.json
│       │   ├── T.json
│       │   ├── Task_List_Helper.json
│       │   ├── Template-Based_MCP_Integration.json
│       │   ├── template.py
│       │   ├── templateDir.json
│       │   ├── tempName.json
│       │   ├── Test_Prompt.json
│       │   ├── text.json
│       │   ├── time.json
│       │   ├── titleSeparator.json
│       │   ├── tmpl.json
│       │   ├── tn.json
│       │   ├── TOPBAR_FACTORY.json
│       │   ├── toValue.json
│       │   ├── transform.json
│       │   ├── trustProxyDefaultSymbol.json
│       │   ├── txt.json
│       │   ├── type.json
│       │   ├── typeArgumentsKey.json
│       │   ├── typeKey.json
│       │   ├── typeMessage.json
│       │   ├── typesRegistryPackageName.json
│       │   ├── u.json
│       │   ├── UNDEFINED.json
│       │   ├── unit.json
│       │   ├── UNMATCHED_SURROGATE_PAIR_REPLACE.json
│       │   ├── ur.json
│       │   ├── usage.json
│       │   ├── USAGE.json
│       │   ├── user
│       │   │   ├── backupId.json
│       │   │   ├── DESCENDING.json
│       │   │   ├── encoded.json
│       │   │   ├── index.json
│       │   │   ├── J.json
│       │   │   ├── MOBILE_QUERY.json
│       │   │   ├── promptId.json
│       │   │   ├── schemaQuery.json
│       │   │   ├── TOPBAR_FACTORY.json
│       │   │   ├── txt.json
│       │   │   └── usage.json
│       │   ├── value.json
│       │   ├── VERSION.json
│       │   ├── version.py
│       │   ├── versionMajorMinor.json
│       │   ├── Vr.json
│       │   ├── watchmanURL.json
│       │   ├── webkit.json
│       │   ├── WIN_SLASH.json
│       │   ├── xhtml.json
│       │   ├── XP_DEFAULT_PATHEXT.json
│       │   ├── y.json
│       │   └── You_are_limited_to_respond_Yes_or_No_onl___.json
│       ├── resources
│       │   ├── __init__.py
│       │   ├── code_examples
│       │   │   └── index.json
│       │   ├── config
│       │   │   └── index.json
│       │   ├── documentation
│       │   │   └── index.json
│       │   ├── images
│       │   │   └── index.json
│       │   ├── index.json
│       │   └── other
│       │       └── index.json
│       ├── server.py
│       ├── templates
│       │   ├── __init__.py
│       │   ├── AbstractFactory.json
│       │   ├── Adapter.json
│       │   ├── base.py
│       │   ├── Builder.json
│       │   ├── Chain.json
│       │   ├── Command.json
│       │   ├── component
│       │   │   ├── AbstractFactory.json
│       │   │   ├── Adapter.json
│       │   │   ├── Builder.json
│       │   │   ├── Chain.json
│       │   │   ├── Command.json
│       │   │   ├── Decorator.json
│       │   │   ├── Facade.json
│       │   │   ├── Factory.json
│       │   │   ├── Iterator.json
│       │   │   ├── Mediator.json
│       │   │   ├── Memento.json
│       │   │   ├── Observer.json
│       │   │   ├── Prototype.json
│       │   │   ├── Proxy.json
│       │   │   ├── Singleton.json
│       │   │   ├── State.json
│       │   │   ├── Strategy.json
│       │   │   ├── TemplateMethod.json
│       │   │   └── Visitor.json
│       │   ├── component.py
│       │   ├── Decorator.json
│       │   ├── Facade.json
│       │   ├── Factory.json
│       │   ├── index.json
│       │   ├── Iterator.json
│       │   ├── manager.py
│       │   ├── Mediator.json
│       │   ├── Memento.json
│       │   ├── Observer.json
│       │   ├── project.py
│       │   ├── Prototype.json
│       │   ├── Proxy.json
│       │   ├── renderer.py
│       │   ├── Singleton.json
│       │   ├── State.json
│       │   ├── Strategy.json
│       │   ├── template_manager.py
│       │   ├── TemplateMethod.json
│       │   ├── types.py
│       │   └── Visitor.json
│       └── utils
│           └── __init__.py
├── SUMMARY.md
├── TASK_COMPLETION_SUMMARY.md
├── templates
│   └── openssl
│       ├── files
│       │   ├── CMakeLists.txt.jinja2
│       │   ├── conanfile.py.jinja2
│       │   ├── main.cpp.jinja2
│       │   └── README.md.jinja2
│       ├── openssl-consumer.json
│       └── template.json
├── test_openssl_integration.sh
├── test_package
│   └── conanfile.py
└── tests
    ├── __init__.py
    ├── conftest.py
    ├── integration
    │   ├── test_core_integration.py
    │   ├── test_mermaid_integration.py
    │   ├── test_prompt_manager_integration.py
    │   └── test_server_integration.py
    ├── test_aws_mcp.py
    ├── test_base_classes.py
    ├── test_config.py
    ├── test_exceptions.py
    ├── test_mermaid.py
    ├── test_prompts.py
    └── test_templates.py
```

# Files

--------------------------------------------------------------------------------
/tests/integration/test_server_integration.py:
--------------------------------------------------------------------------------

```python
"""
Integration tests for the MCP Project Orchestrator server.

These tests verify that all components work together correctly in the server.
"""

import os
import pytest
import tempfile
import asyncio
import json
from pathlib import Path
from unittest.mock import patch, AsyncMock

from mcp_project_orchestrator.core import MCPConfig
from mcp_project_orchestrator.server import ProjectOrchestratorServer


class TestServerIntegration:
    """Integration tests for the MCP Project Orchestrator server."""
    
    @pytest.fixture
    def temp_server_dir(self):
        """Create a temporary server directory with all required subdirectories."""
        with tempfile.TemporaryDirectory() as temp_dir:
            server_dir = Path(temp_dir)
            
            # Create required directories
            (server_dir / "prompts").mkdir(exist_ok=True)
            (server_dir / "templates").mkdir(exist_ok=True)
            (server_dir / "mermaid").mkdir(exist_ok=True)
            (server_dir / "mermaid" / "templates").mkdir(exist_ok=True)
            (server_dir / "mermaid" / "output").mkdir(exist_ok=True)
            (server_dir / "resources").mkdir(exist_ok=True)
            
            yield server_dir
    
    @pytest.fixture
    def config(self, temp_server_dir):
        """Create a test configuration."""
        config_data = {
            "name": "test-orchestrator",
            "version": "0.1.0",
            "description": "Test Project Orchestrator",
            "server": {
                "host": "127.0.0.1",
                "port": 8080
            },
            "paths": {
                "prompts": str(temp_server_dir / "prompts"),
                "templates": str(temp_server_dir / "templates"),
                "mermaid_templates": str(temp_server_dir / "mermaid" / "templates"),
                "mermaid_output": str(temp_server_dir / "mermaid" / "output"),
                "resources": str(temp_server_dir / "resources")
            }
        }
        
        config_file = temp_server_dir / "config.json"
        with open(config_file, "w") as f:
            json.dump(config_data, f)
            
        return MCPConfig(config_file=config_file)
    
    @pytest.fixture
    def sample_prompt_template(self, temp_server_dir):
        """Create a sample prompt template."""
        template = {
            "name": "project-description",
            "description": "A template for describing projects",
            "template": "# {{ project_name }}\n\n{{ project_description }}\n\n## Features\n\n{{ features }}",
            "variables": {
                "project_name": {
                    "type": "string",
                    "description": "The name of the project"
                },
                "project_description": {
                    "type": "string",
                    "description": "A brief description of the project"
                },
                "features": {
                    "type": "string",
                    "description": "Key features of the project"
                }
            },
            "category": "documentation",
            "tags": ["project", "documentation"]
        }
        
        template_file = temp_server_dir / "prompts" / "project-description.json"
        with open(template_file, "w") as f:
            json.dump(template, f)
            
        return template
    
    @pytest.fixture
    def sample_mermaid_template(self, temp_server_dir):
        """Create a sample mermaid template."""
        template = {
            "name": "simple-flowchart",
            "type": "flowchart",
            "content": "flowchart TD\n    A[{start}] --> B[{process}]\n    B --> C[{end}]",
            "variables": {
                "start": "Start",
                "process": "Process",
                "end": "End"
            }
        }
        
        template_file = temp_server_dir / "mermaid" / "templates" / "simple-flowchart.json"
        with open(template_file, "w") as f:
            json.dump(template, f)
            
        return template
    
    @pytest.mark.asyncio
    async def test_server_initialization(self, config, sample_prompt_template, sample_mermaid_template):
        """Test that the server initializes properly with all components."""
        # Mock the CLI path check for mermaid
        with patch("pathlib.Path.exists", return_value=True):
            server = ProjectOrchestratorServer(config=config)
            await server.initialize()
            
            # Check if the components were initialized
            assert server.prompt_manager is not None
            assert server.mermaid_service is not None
            assert server.template_manager is not None
            
    @pytest.mark.asyncio
    async def test_prompt_rendering_tool(self, config, sample_prompt_template):
        """Test that the prompt rendering tool works."""
        with patch("pathlib.Path.exists", return_value=True):
            server = ProjectOrchestratorServer(config=config)
            await server.initialize()
            
            # Get the registered tool
            render_prompt_tool = server.mcp.tools.get("renderPrompt")
            assert render_prompt_tool is not None
            
            # Call the tool
            params = {
                "template_name": "project-description",
                "variables": {
                    "project_name": "Test Project",
                    "project_description": "A project for testing",
                    "features": "- Feature 1\n- Feature 2"
                }
            }
            
            result = await render_prompt_tool["handler"](params)
            
            # Check the result
            assert result is not None
            assert "# Test Project" in result["content"]
            assert "A project for testing" in result["content"]
            assert "- Feature 1" in result["content"]
            assert "- Feature 2" in result["content"]
    
    @pytest.mark.asyncio
    async def test_mermaid_generation_tool(self, config, sample_mermaid_template):
        """Test that the mermaid generation tool works."""
        with patch("pathlib.Path.exists", return_value=True):
            # Mock the renderer to avoid actual CLI calls
            async def mock_render(*args, **kwargs):
                return Path(config.mermaid_output_dir) / "test-diagram.svg"
                
            with patch("mcp_project_orchestrator.mermaid.MermaidRenderer.render_to_file", 
                       AsyncMock(side_effect=mock_render)):
                server = ProjectOrchestratorServer(config=config)
                await server.initialize()
                
                # Get the registered tool
                generate_diagram_tool = server.mcp.tools.get("generateDiagram")
                assert generate_diagram_tool is not None
                
                # Call the tool
                params = {
                    "template_name": "simple-flowchart",
                    "variables": {
                        "start": "Begin",
                        "process": "Transform",
                        "end": "Finish"
                    },
                    "output_format": "svg"
                }
                
                result = await generate_diagram_tool["handler"](params)
                
                # Check the result
                assert result is not None
                assert "diagram_url" in result
                
    @pytest.mark.asyncio
    async def test_client_message_handling(self, config, sample_prompt_template, sample_mermaid_template):
        """Test that the server handles client messages properly."""
        with patch("pathlib.Path.exists", return_value=True):
            server = ProjectOrchestratorServer(config=config)
            await server.initialize()
            
            # Create a mock initialize message
            initialize_msg = {
                "jsonrpc": "2.0",
                "id": 1,
                "method": "mcp/initialize",
                "params": {
                    "capabilities": {}
                }
            }
            
            # Handle the message
            response = await server.handle_client_message(initialize_msg)
            
            # Check the response
            assert response["jsonrpc"] == "2.0"
            assert response["id"] == 1
            assert "result" in response
            assert "capabilities" in response["result"]
            
            # Create a mock listTools message
            list_tools_msg = {
                "jsonrpc": "2.0",
                "id": 2,
                "method": "mcp/listTools"
            }
            
            # Handle the message
            response = await server.handle_client_message(list_tools_msg)
            
            # Check the response
            assert response["jsonrpc"] == "2.0"
            assert response["id"] == 2
            assert "result" in response
            assert "tools" in response["result"]
            
            # Check if our tools are in the list
            tool_names = [tool["name"] for tool in response["result"]["tools"]]
            assert "renderPrompt" in tool_names
            assert "generateDiagram" in tool_names
            
    @pytest.mark.asyncio
    async def test_error_handling(self, config):
        """Test that the server handles errors properly."""
        with patch("pathlib.Path.exists", return_value=True):
            server = ProjectOrchestratorServer(config=config)
            await server.initialize()
            
            # Create an invalid message
            invalid_msg = {
                "jsonrpc": "2.0",
                "id": 1,
                "method": "invalid/method"
            }
            
            # Handle the message
            response = await server.handle_client_message(invalid_msg)
            
            # Check the error response
            assert response["jsonrpc"] == "2.0"
            assert response["id"] == 1
            assert "error" in response
            assert response["error"]["code"] == -32601  # Method not found
            
            # Create a valid method but with invalid params
            invalid_params_msg = {
                "jsonrpc": "2.0",
                "id": 2,
                "method": "mcp/callTool",
                "params": {
                    "name": "renderPrompt",
                    "params": {
                        "template_name": "non-existent-template",
                        "variables": {}
                    }
                }
            }
            
            # Handle the message
            response = await server.handle_client_message(invalid_params_msg)
            
            # Check the error response
            assert response["jsonrpc"] == "2.0"
            assert response["id"] == 2
            assert "error" in response
```

--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/server.py:
--------------------------------------------------------------------------------

```python
"""
MCP Project Orchestrator Server.

This is the main entry point for the MCP Project Orchestrator server.
"""

from typing import Dict, Any, Optional

from .core import FastMCPServer, MCPConfig, setup_logging
from .prompt_manager import PromptManager
from .mermaid import MermaidGenerator, MermaidRenderer
from .templates import ProjectTemplateManager, ComponentTemplateManager


class ProjectOrchestratorServer:
    """
    MCP Project Orchestrator Server.
    
    This server integrates prompt management, diagram generation, and project templating
    capabilities into a unified MCP server.
    """
    
    def __init__(self, config: MCPConfig):
        """
        Initialize the server with configuration.
        
        Args:
            config: The server configuration
        """
        self.config = config
        self.mcp = FastMCPServer(config=config)
        self.prompt_manager = None
        self.mermaid_service = None
        self.template_manager = None
        self.logger = setup_logging(log_file=config.log_file)
        
    async def initialize(self) -> None:
        """Initialize all components and register tools."""
        self.logger.info("Initializing Project Orchestrator Server")
        
        # Initialize prompt manager
        self.prompt_manager = PromptManager(self.config)
        await self.prompt_manager.initialize()
        
        # Initialize mermaid service
        self.mermaid_service = MermaidGenerator(self.config)
        await self.mermaid_service.initialize()
        
        # Initialize template manager
        self.template_manager = {
            "project": ProjectTemplateManager(self.config),
            "component": ComponentTemplateManager(self.config)
        }
        await self.template_manager["project"].initialize()
        await self.template_manager["component"].initialize()
        
        # Register tools
        self._register_tools()
        
        # Initialize MCP server
        await self.mcp.initialize()
        
        self.logger.info("Project Orchestrator Server initialized successfully")
        
    def _register_tools(self) -> None:
        """Register all tools with the MCP server."""
        self.logger.info("Registering tools")
        
        # Register prompt rendering tool
        self.mcp.register_tool(
            name="renderPrompt",
            description="Render a prompt template with variables",
            parameters={
                "type": "object",
                "properties": {
                    "template_name": {
                        "type": "string",
                        "description": "Name of the template to render"
                    },
                    "variables": {
                        "type": "object",
                        "description": "Variables to use for rendering"
                    }
                },
                "required": ["template_name"]
            },
            handler=self._handle_render_prompt
        )
        
        # Register diagram generation tool
        self.mcp.register_tool(
            name="generateDiagram",
            description="Generate a Mermaid diagram",
            parameters={
                "type": "object",
                "properties": {
                    "template_name": {
                        "type": "string",
                        "description": "Name of the diagram template"
                    },
                    "variables": {
                        "type": "object",
                        "description": "Variables to use for rendering"
                    },
                    "output_format": {
                        "type": "string",
                        "enum": ["svg", "png", "pdf"],
                        "default": "svg",
                        "description": "Output format for the diagram"
                    }
                },
                "required": ["template_name"]
            },
            handler=self._handle_generate_diagram
        )
        
        # Register project generation tool
        self.mcp.register_tool(
            name="generateProject",
            description="Generate a project from a template",
            parameters={
                "type": "object",
                "properties": {
                    "template_name": {
                        "type": "string",
                        "description": "Name of the project template"
                    },
                    "variables": {
                        "type": "object",
                        "description": "Variables to use for generation"
                    },
                    "output_dir": {
                        "type": "string",
                        "description": "Output directory for the project"
                    }
                },
                "required": ["template_name", "output_dir"]
            },
            handler=self._handle_generate_project
        )
        
        # Register component generation tool
        self.mcp.register_tool(
            name="generateComponent",
            description="Generate a component from a template",
            parameters={
                "type": "object",
                "properties": {
                    "template_name": {
                        "type": "string",
                        "description": "Name of the component template"
                    },
                    "variables": {
                        "type": "object",
                        "description": "Variables to use for generation"
                    },
                    "output_dir": {
                        "type": "string",
                        "description": "Output directory for the component"
                    }
                },
                "required": ["template_name", "output_dir"]
            },
            handler=self._handle_generate_component
        )
        
    async def _handle_render_prompt(self, params: Dict[str, Any]) -> Dict[str, Any]:
        """
        Handle the renderPrompt tool call.
        
        Args:
            params: Tool parameters
            
        Returns:
            Dict with rendered content
        """
        template_name = params["template_name"]
        variables = params.get("variables", {})
        
        try:
            rendered = await self.prompt_manager.render_template(template_name, variables)
            return {"content": rendered}
        except Exception as e:
            self.logger.error(f"Error rendering prompt template: {str(e)}")
            raise
    
    async def _handle_generate_diagram(self, params: Dict[str, Any]) -> Dict[str, Any]:
        """
        Handle the generateDiagram tool call.
        
        Args:
            params: Tool parameters
            
        Returns:
            Dict with diagram URL
        """
        template_name = params["template_name"]
        variables = params.get("variables", {})
        output_format = params.get("output_format", "svg")
        
        try:
            # Generate diagram content
            diagram = self.mermaid_service.generate_from_template(template_name, variables)
            
            # Render to file
            renderer = MermaidRenderer(self.config)
            await renderer.initialize()
            
            output_file = await renderer.render_to_file(
                diagram,
                template_name,
                output_format=output_format
            )
            
            # Create a relative URL
            url = f"/mermaid/{output_file.name}"
            
            return {
                "diagram_url": url,
                "diagram_path": str(output_file)
            }
        except Exception as e:
            self.logger.error(f"Error generating diagram: {str(e)}")
            raise
    
    async def _handle_generate_project(self, params: Dict[str, Any]) -> Dict[str, Any]:
        """
        Handle the generateProject tool call.
        
        Args:
            params: Tool parameters
            
        Returns:
            Dict with generation result
        """
        template_name = params["template_name"]
        variables = params.get("variables", {})
        output_dir = params["output_dir"]
        
        try:
            # Generate project
            result = await self.template_manager["project"].generate_project(
                template_name,
                variables,
                output_dir
            )
            
            return result
        except Exception as e:
            self.logger.error(f"Error generating project: {str(e)}")
            raise
    
    async def _handle_generate_component(self, params: Dict[str, Any]) -> Dict[str, Any]:
        """
        Handle the generateComponent tool call.
        
        Args:
            params: Tool parameters
            
        Returns:
            Dict with generation result
        """
        template_name = params["template_name"]
        variables = params.get("variables", {})
        output_dir = params["output_dir"]
        
        try:
            # Generate component
            result = await self.template_manager["component"].generate_component(
                template_name,
                variables,
                output_dir
            )
            
            return result
        except Exception as e:
            self.logger.error(f"Error generating component: {str(e)}")
            raise
    
    async def handle_client_message(self, message: Dict[str, Any]) -> Dict[str, Any]:
        """
        Handle client messages.
        
        Args:
            message: The client message
            
        Returns:
            Response message
        """
        try:
            return await self.mcp.handle_message(message)
        except Exception as e:
            self.logger.error(f"Error handling client message: {str(e)}")
            
            # Create an error response
            return {
                "jsonrpc": "2.0",
                "id": message.get("id"),
                "error": {
                    "code": -32603,
                    "message": f"Internal error: {str(e)}"
                }
            }
    
    async def start(self) -> None:
        """Start the server."""
        await self.mcp.start()
        
    async def stop(self) -> None:
        """Stop the server."""
        await self.mcp.stop()


# Convenience function for starting the server
async def start_server(config_path: Optional[str] = None) -> "ProjectOrchestratorServer":
    """
    Start the MCP Project Orchestrator server.
    
    Args:
        config_path: Path to configuration file (optional)
    """
    # Load configuration
    config = MCPConfig(config_file=config_path)
    
    # Create and initialize the server
    server = ProjectOrchestratorServer(config)
    await server.initialize()
    
    # Start the server
    await server.start()
    
    return server

```

--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/mcp-py/FileAnnotator.py:
--------------------------------------------------------------------------------

```python
import anthropic

client = anthropic.Anthropic(
    # defaults to os.environ.get("ANTHROPIC_API_KEY")
    api_key="my_api_key",
)

# Replace placeholders like {{FILE_CONTENTS}} with real values,
# because the SDK does not support variables.
message = client.messages.create(
    model="claude-3-5-haiku-20241022",
    max_tokens=1000,
    temperature=0,
    messages=[
        {
            "role": "user",
            "content": [
                {
                    "type": "text",
                    "text": "<examples>\n<example>\n<FILE_CONTENTS>\nimport anthropic\n\nclient = anthropic.Anthropic(\n    # defaults to os.environ.get(\"ANTHROPIC_API_KEY\")\n    api_key=\"my_api_key\",\n)\n\n# Replace placeholders like {{SOURCE_CODE_VERSION_1}} with real values,\n# because the SDK does not support variables.\nmessage = client.messages.create(\n    model=\"claude-3-5-sonnet-20241022\",\n    max_tokens=8192,\n    temperature=0,\n    messages=[\n        {\n            \"role\": \"user\",\n            \"content\": [\n                {\n                    \"type\": \"text\",\n                    \"text\": \"You are tasked with selecting the best version of a source code file from multiple available versions. All versions are attempting to implement the same goal, but may differ in their approach, efficiency, readability, or other aspects. Your task is to analyze these versions and select the best one, or if appropriate, suggest an aggregated best result combining multiple versions.\\n\\nYou will be presented with different versions of the source code:\\n\\n<source_code_versions>\\nsource_code_version_1 = \\\"{{SOURCE_CODE_VERSION_1}}\\\"\\nsource_code_version_2 = \\\"{{SOURCE_CODE_VERSION_2}}\\\"\\nsource_code_version_3 = \\\"{{SOURCE_CODE_VERSION_3}}\\\"\\n</source_code_versions>\\n\\nTo evaluate and select the best version(s), follow these steps:\\n\\n1. Carefully read and understand the implementation goal.\\n2. Review each version of the source code, paying attention to:\\n   a. Correctness: Does the code accurately implement the stated goal?\\n   b. Efficiency: Is the code optimized for performance and resource usage?\\n   c. Readability: Is the code well-structured, properly commented, and easy to understand?\\n   d. Maintainability: Is the code modular and easy to modify or extend?\\n   e. Best practices: Does the code follow established coding standards and best practices for the language used?\\n\\n3. Compare the versions based on the above criteria. Consider the strengths and weaknesses of each approach.\\n\\n4. If one version clearly stands out as superior in most or all aspects, select it as the best version.\\n\\n5. If multiple versions have different strengths, consider whether an aggregated best result can be created by combining the best aspects of multiple versions. If so, describe how this aggregation could be done.\\n\\n6. Provide your analysis and selection in the following format:\\n\\n<thinking>\\n[Provide a detailed analysis of each version, discussing their strengths and weaknesses based on the evaluation criteria.]\\n[State your selection for the best version or describe the aggregated best result if applicable.]\\n[Explain your reasoning for the selection, referencing specific aspects of the chosen version(s) that make it/them superior.]\\n</thinking>\\n\\n<answer>\\n[Return only the content for the final source code version file.]\\n</answer>\\n\\nRemember to be thorough in your analysis because you have only this one chance to provide top-quality response to the user, because the user cannot message or respond you anymore.\"\n                }\n            ]\n        }\n    ]\n)\nprint(message.content)\n\n</FILE_CONTENTS>\n<ADDITIONAL_CONTEXT>\nfilename is AggregateVersions.py\n</ADDITIONAL_CONTEXT>\n<ideal_output>\n<file_analysis>\n1. File type: Python script (.py)\n2. Main components identified:\n   - Imports anthropic library\n   - Creates Anthropic client instance\n   - Configures and sends a message request to Claude-3\n3. Key features:\n   - Uses Claude-3-5-sonnet-20241022 model\n   - Contains a detailed prompt for code version analysis\n   - Sets specific parameters (max_tokens=8192, temperature=0)\n4. Structure analysis:\n   - Single API call implementation\n   - Structured message content with clear instructions\n   - Template placeholders for source code versions\n5. Security implications:\n   - Contains API key reference (though placeholder)\n   - Potential sensitive information in source code versions\n6. Notable patterns:\n   - Uses structured format tags (thinking, answer)\n   - Clear evaluation criteria structure\n   - Template variable placeholders\n</file_analysis>\n\n<brief_summary>\nThis Python script implements an interface to the Anthropic Claude-3 API for analyzing and selecting the best version of source code from multiple alternatives. It contains a comprehensive prompt that instructs the AI to evaluate different code versions based on various criteria including correctness, efficiency, readability, and maintainability, ultimately selecting or aggregating the best implementation.\n</brief_summary>\n\n{\n  \"file_type\": \"Python Script\",\n  \"main_purpose\": \"To facilitate automated source code version analysis and selection using the Anthropic Claude-3 API\",\n  \"key_features\": [\n    \"Anthropic API client initialization\",\n    \"Structured prompt for code analysis\",\n    \"Detailed evaluation criteria\",\n    \"Template system for source code input\",\n    \"Zero temperature setting for deterministic output\",\n    \"High token limit (8192) for comprehensive analysis\"\n  ],\n  \"patterns_and_recurring_elements\": [\n    \"XML-style tags for structure (<thinking>, <answer>)\",\n    \"Template variables ({{SOURCE_CODE_VERSION_X}})\",\n    \"Numbered evaluation steps\",\n    \"Hierarchical evaluation criteria\"\n  ],\n  \"detailed_description\": \"The script sets up an Anthropic API client and constructs a detailed prompt for analyzing multiple versions of source code. It uses the claude-3-5-sonnet-20241022 model with parameters optimized for analytical tasks. The prompt includes a comprehensive framework for evaluating code versions based on correctness, efficiency, readability, maintainability, and adherence to best practices. The script expects to receive multiple code versions as input and is designed to output either the best version or an aggregated solution.\",\n  \"potential_applications\": [\n    \"Automated code review processes\",\n    \"Code quality assessment\",\n    \"Best practices enforcement\",\n    \"Code optimization selection\",\n    \"Technical documentation generation\"\n  ],\n  \"security_considerations\": \"The script contains a placeholder for an API key which should be handled securely in production. The actual API key should be stored in environment variables rather than hardcoded. Care should be taken when handling potentially sensitive source code through the system.\",\n  \"additional_observations\": \"The script is designed for one-time analysis without follow-up interactions, as indicated by the final reminder in the prompt. The temperature setting of 0 suggests a focus on consistency and deterministic outputs rather than creative variations.\"\n}\n</ideal_output>\n</example>\n</examples>\n\n"
                },
                {
                    "type": "text",
                    "text": "You are FileAnnotator, an AI assistant designed to analyze and describe the contents of a single file. Your task is to carefully examine the provided file contents and any additional context, then provide both a brief summary and a detailed description of the file.\n\nHere are the file contents you need to analyze:\n\n<file_contents>\n{{FILE_CONTENTS}}\n</file_contents>\n\nIf available, here is additional context about the file:\n\n<additional_context>\n{{ADDITIONAL_CONTEXT}}\n</additional_context>\n\nPlease follow these steps to complete your task:\n\n1. Carefully read and analyze the file contents and additional context (if provided).\n\n2. Wrap your analysis process in <file_analysis> tags:\n   a. Identify the file type based on its contents and structure.\n   b. Determine the main purpose or function of the file.\n   c. List and count key elements, patterns, or important features within the file.\n   d. For code files: Identify the programming language and main components (e.g., functions, classes, imports).\n   e. For configuration files: Identify the system or application it's likely configuring.\n   f. For text documents: Summarize the main topics or themes.\n   g. Identify and list any patterns or recurring elements in the file.\n   h. Consider potential use cases or applications of the file.\n   i. Evaluate any potential security implications or sensitive information in the file.\n   j. Note any additional observations or insights.\n\n3. After your analysis, provide a brief summary (2-3 sentences) of the file in <brief_summary> tags.\n\n4. Then, provide a detailed description in JSON format using the following structure:\n\n{\n  \"file_type\": \"Identified file type\",\n  \"main_purpose\": \"Primary function or purpose of the file\",\n  \"key_features\": [\n    \"Notable element 1\",\n    \"Notable element 2\",\n    \"...\"\n  ],\n  \"patterns_and_recurring_elements\": [\n    \"Pattern 1\",\n    \"Pattern 2\",\n    \"...\"\n  ],\n  \"detailed_description\": \"In-depth analysis of the file contents\",\n  \"potential_applications\": [\n    \"Possible use case 1\",\n    \"Possible use case 2\",\n    \"...\"\n  ],\n  \"security_considerations\": \"Any security implications or sensitive information identified\",\n  \"additional_observations\": \"Any other relevant information or insights\"\n}\n\nEnsure that your JSON is properly formatted and valid. If certain aspects are unclear or cannot be determined from the available information, state this in the relevant fields of your JSON output.\n\nRemember to base your analysis solely on the provided file contents and additional context (if given). Do not make assumptions about information not present in the given input.\n\nHere's an example of the structure for your response (do not use this content, it's just to illustrate the format):\n\n<file_analysis>\n[Your detailed analysis process here]\n</file_analysis>\n\n<brief_summary>\n[2-3 sentence summary of the file]\n</brief_summary>\n\n{\n  \"file_type\": \"Example file type\",\n  \"main_purpose\": \"Example main purpose\",\n  \"key_features\": [\n    \"Example feature 1\",\n    \"Example feature 2\"\n  ],\n  \"patterns_and_recurring_elements\": [\n    \"Example pattern 1\",\n    \"Example pattern 2\"\n  ],\n  \"detailed_description\": \"Example detailed description\",\n  \"potential_applications\": [\n    \"Example application 1\",\n    \"Example application 2\"\n  ],\n  \"security_considerations\": \"Example security considerations\",\n  \"additional_observations\": \"Example additional observations\"\n}\n\nPlease proceed with your analysis and description of the file contents provided."
                }
            ]
        },
        {
            "role": "assistant",
            "content": [
                {
                    "type": "text",
                    "text": "<file_analysis>"
                }
            ]
        }
    ]
)
print(message.content)

```

--------------------------------------------------------------------------------
/aws-sip-trunk/scripts/deploy-asterisk-aws.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
#
# Manual Deployment Script for Asterisk SIP Trunk on AWS
# Alternative to Terraform - creates infrastructure using AWS CLI
#

set -euo pipefail

# Error handling
trap 'echo "Error on line $LINENO"; exit 1' ERR

echo "=== Asterisk SIP Trunk for ElevenLabs - AWS Deployment ==="
echo ""

# Check prerequisites
command -v aws >/dev/null 2>&1 || { echo "AWS CLI is required but not installed. Aborting."; exit 1; }

# Required environment variables
: "${AWS_REGION:?Environment variable AWS_REGION is required}"
: "${ELEVENLABS_PHONE_E164:?Environment variable ELEVENLABS_PHONE_E164 is required}"
: "${ELEVENLABS_SIP_PASSWORD:?Environment variable ELEVENLABS_SIP_PASSWORD is required}"
: "${SSH_KEY_NAME:?Environment variable SSH_KEY_NAME is required}"

# Optional variables with defaults
PROJECT_NAME="${PROJECT_NAME:-asterisk-sip-trunk}"
INSTANCE_TYPE="${INSTANCE_TYPE:-t3.medium}"
VPC_CIDR="${VPC_CIDR:-10.0.0.0/16}"
SUBNET_CIDR="${SUBNET_CIDR:-10.0.1.0/24}"

echo "Configuration:"
echo "=============="
echo "Project Name: $PROJECT_NAME"
echo "AWS Region: $AWS_REGION"
echo "Instance Type: $INSTANCE_TYPE"
echo "VPC CIDR: $VPC_CIDR"
echo "ElevenLabs Phone: $ELEVENLABS_PHONE_E164"
echo ""

# Step 1: Create VPC
echo "[1/10] Creating VPC..."
VPC_ID=$(aws ec2 create-vpc \
    --cidr-block "$VPC_CIDR" \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=vpc,Tags=[{Key=Name,Value=$PROJECT_NAME-vpc},{Key=Project,Value=$PROJECT_NAME}]" \
    --query 'Vpc.VpcId' \
    --output text)

echo "Created VPC: $VPC_ID"

# Enable DNS hostnames
aws ec2 modify-vpc-attribute \
    --vpc-id "$VPC_ID" \
    --enable-dns-hostnames \
    --region "$AWS_REGION"

# Step 2: Create Internet Gateway
echo "[2/10] Creating Internet Gateway..."
IGW_ID=$(aws ec2 create-internet-gateway \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=internet-gateway,Tags=[{Key=Name,Value=$PROJECT_NAME-igw},{Key=Project,Value=$PROJECT_NAME}]" \
    --query 'InternetGateway.InternetGatewayId' \
    --output text)

aws ec2 attach-internet-gateway \
    --internet-gateway-id "$IGW_ID" \
    --vpc-id "$VPC_ID" \
    --region "$AWS_REGION"

echo "Created Internet Gateway: $IGW_ID"

# Step 3: Create Subnet
echo "[3/10] Creating Public Subnet..."
SUBNET_ID=$(aws ec2 create-subnet \
    --vpc-id "$VPC_ID" \
    --cidr-block "$SUBNET_CIDR" \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=subnet,Tags=[{Key=Name,Value=$PROJECT_NAME-public-subnet},{Key=Project,Value=$PROJECT_NAME}]" \
    --query 'Subnet.SubnetId' \
    --output text)

echo "Created Subnet: $SUBNET_ID"

# Step 4: Create Route Table
echo "[4/10] Creating Route Table..."
ROUTE_TABLE_ID=$(aws ec2 create-route-table \
    --vpc-id "$VPC_ID" \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=route-table,Tags=[{Key=Name,Value=$PROJECT_NAME-public-rt},{Key=Project,Value=$PROJECT_NAME}]" \
    --query 'RouteTable.RouteTableId' \
    --output text)

aws ec2 create-route \
    --route-table-id "$ROUTE_TABLE_ID" \
    --destination-cidr-block "0.0.0.0/0" \
    --gateway-id "$IGW_ID" \
    --region "$AWS_REGION"

aws ec2 associate-route-table \
    --subnet-id "$SUBNET_ID" \
    --route-table-id "$ROUTE_TABLE_ID" \
    --region "$AWS_REGION"

echo "Created Route Table: $ROUTE_TABLE_ID"

# Step 5: Create Security Group
echo "[5/10] Creating Security Group..."
SG_ID=$(aws ec2 create-security-group \
    --group-name "$PROJECT_NAME-asterisk-sg" \
    --description "Security group for Asterisk SIP trunk" \
    --vpc-id "$VPC_ID" \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=security-group,Tags=[{Key=Name,Value=$PROJECT_NAME-asterisk-sg},{Key=Project,Value=$PROJECT_NAME}]" \
    --query 'GroupId' \
    --output text)

echo "Created Security Group: $SG_ID"

# Add security group rules
echo "Adding security group rules..."

# SSH (if SSH_ALLOWED_CIDR is set)
if [ -n "${SSH_ALLOWED_CIDR:-}" ]; then
    aws ec2 authorize-security-group-ingress \
        --group-id "$SG_ID" \
        --protocol tcp \
        --port 22 \
        --cidr "$SSH_ALLOWED_CIDR" \
        --region "$AWS_REGION" \
        --group-rule-description "SSH access"
fi

# SIP TCP
aws ec2 authorize-security-group-ingress \
    --group-id "$SG_ID" \
    --protocol tcp \
    --port 5060 \
    --cidr 0.0.0.0/0 \
    --region "$AWS_REGION" \
    --group-rule-description "SIP TCP signaling"

# SIP UDP
aws ec2 authorize-security-group-ingress \
    --group-id "$SG_ID" \
    --protocol udp \
    --port 5060 \
    --cidr 0.0.0.0/0 \
    --region "$AWS_REGION" \
    --group-rule-description "SIP UDP signaling"

# RTP Ports
aws ec2 authorize-security-group-ingress \
    --group-id "$SG_ID" \
    --ip-permissions \
    "IpProtocol=udp,FromPort=10000,ToPort=20000,IpRanges=[{CidrIp=0.0.0.0/0,Description='RTP media streams'}]" \
    --region "$AWS_REGION"

# Step 6: Allocate Elastic IP
echo "[6/10] Allocating Elastic IP..."
ELASTIC_IP_ALLOC=$(aws ec2 allocate-address \
    --domain vpc \
    --region "$AWS_REGION" \
    --tag-specifications "ResourceType=elastic-ip,Tags=[{Key=Name,Value=$PROJECT_NAME-eip},{Key=Project,Value=$PROJECT_NAME}]")

ELASTIC_IP=$(echo "$ELASTIC_IP_ALLOC" | jq -r '.PublicIp')
ALLOCATION_ID=$(echo "$ELASTIC_IP_ALLOC" | jq -r '.AllocationId')

echo "Allocated Elastic IP: $ELASTIC_IP (Allocation ID: $ALLOCATION_ID)"

# Step 7: Store credentials in Parameter Store
echo "[7/10] Storing credentials in Parameter Store..."
aws ssm put-parameter \
    --name "/$PROJECT_NAME/elevenlabs/phone_e164" \
    --value "$ELEVENLABS_PHONE_E164" \
    --type SecureString \
    --region "$AWS_REGION" \
    --overwrite 2>/dev/null || true

aws ssm put-parameter \
    --name "/$PROJECT_NAME/elevenlabs/sip_password" \
    --value "$ELEVENLABS_SIP_PASSWORD" \
    --type SecureString \
    --region "$AWS_REGION" \
    --overwrite 2>/dev/null || true

aws ssm put-parameter \
    --name "/$PROJECT_NAME/network/elastic_ip" \
    --value "$ELASTIC_IP" \
    --type String \
    --region "$AWS_REGION" \
    --overwrite 2>/dev/null || true

echo "Credentials stored in Parameter Store"

# Step 8: Create IAM Role for EC2
echo "[8/10] Creating IAM Role..."
ROLE_NAME="$PROJECT_NAME-asterisk-role"

cat > /tmp/trust-policy.json <<EOF
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Effect": "Allow",
      "Principal": {
        "Service": "ec2.amazonaws.com"
      },
      "Action": "sts:AssumeRole"
    }
  ]
}
EOF

aws iam create-role \
    --role-name "$ROLE_NAME" \
    --assume-role-policy-document file:///tmp/trust-policy.json \
    --region "$AWS_REGION" 2>/dev/null || echo "Role already exists"

cat > /tmp/role-policy.json <<EOF
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Effect": "Allow",
      "Action": [
        "cloudwatch:PutMetricData",
        "logs:CreateLogGroup",
        "logs:CreateLogStream",
        "logs:PutLogEvents",
        "ssm:GetParameter",
        "ssm:GetParameters",
        "ec2:DescribeAddresses",
        "ec2:AssociateAddress"
      ],
      "Resource": "*"
    }
  ]
}
EOF

aws iam put-role-policy \
    --role-name "$ROLE_NAME" \
    --policy-name "$PROJECT_NAME-asterisk-policy" \
    --policy-document file:///tmp/role-policy.json \
    --region "$AWS_REGION"

aws iam create-instance-profile \
    --instance-profile-name "$ROLE_NAME" \
    --region "$AWS_REGION" 2>/dev/null || echo "Instance profile already exists"

aws iam add-role-to-instance-profile \
    --instance-profile-name "$ROLE_NAME" \
    --role-name "$ROLE_NAME" \
    --region "$AWS_REGION" 2>/dev/null || true

# Wait for instance profile to be available
sleep 10

echo "Created IAM Role: $ROLE_NAME"

# Step 9: Get Amazon Linux 2 AMI
echo "[9/10] Finding Amazon Linux 2 AMI..."
AMI_ID=$(aws ec2 describe-images \
    --owners amazon \
    --filters \
        "Name=name,Values=amzn2-ami-hvm-*-x86_64-gp2" \
        "Name=state,Values=available" \
    --query 'sort_by(Images, &CreationDate)[-1].ImageId' \
    --output text \
    --region "$AWS_REGION")

echo "Using AMI: $AMI_ID"

# Step 10: Launch EC2 Instance
echo "[10/10] Launching EC2 Instance..."

# Create user data script
cat > /tmp/user-data.sh <<'USERDATA_EOF'
#!/bin/bash
set -euo pipefail

# Get instance metadata
INSTANCE_ID=$(ec2-metadata --instance-id | cut -d " " -f 2)
PRIVATE_IP=$(ec2-metadata --local-ipv4 | cut -d " " -f 2)

# Retrieve configuration from Parameter Store
AWS_REGION="$(ec2-metadata --availability-zone | cut -d " " -f 2 | sed 's/[a-z]$//')"
PROJECT_NAME="REPLACE_PROJECT_NAME"
ELASTIC_IP=$(aws ssm get-parameter --name "/$PROJECT_NAME/network/elastic_ip" --query 'Parameter.Value' --output text --region "$AWS_REGION")
ELEVENLABS_PHONE_E164=$(aws ssm get-parameter --name "/$PROJECT_NAME/elevenlabs/phone_e164" --with-decryption --query 'Parameter.Value' --output text --region "$AWS_REGION")
ELEVENLABS_PASSWORD=$(aws ssm get-parameter --name "/$PROJECT_NAME/elevenlabs/sip_password" --with-decryption --query 'Parameter.Value' --output text --region "$AWS_REGION")

# Download and run full installation script
aws s3 cp "s3://$PROJECT_NAME-scripts/user-data.sh" /tmp/install-asterisk.sh --region "$AWS_REGION" 2>/dev/null || {
    # If S3 script not available, use inline installation
    yum update -y
    yum groupinstall -y "Development Tools"
    # ... rest of installation continues inline ...
    echo "Installation complete"
}
USERDATA_EOF

sed -i "s/REPLACE_PROJECT_NAME/$PROJECT_NAME/g" /tmp/user-data.sh

INSTANCE_ID=$(aws ec2 run-instances \
    --image-id "$AMI_ID" \
    --instance-type "$INSTANCE_TYPE" \
    --key-name "$SSH_KEY_NAME" \
    --security-group-ids "$SG_ID" \
    --subnet-id "$SUBNET_ID" \
    --iam-instance-profile "Name=$ROLE_NAME" \
    --user-data "file:///tmp/user-data.sh" \
    --block-device-mappings '[{"DeviceName":"/dev/xvda","Ebs":{"VolumeSize":30,"VolumeType":"gp3","Encrypted":true}}]' \
    --tag-specifications "ResourceType=instance,Tags=[{Key=Name,Value=$PROJECT_NAME-asterisk},{Key=Project,Value=$PROJECT_NAME},{Key=Role,Value=Primary}]" \
    --region "$AWS_REGION" \
    --query 'Instances[0].InstanceId' \
    --output text)

echo "Launched EC2 Instance: $INSTANCE_ID"
echo "Waiting for instance to be running..."

aws ec2 wait instance-running \
    --instance-ids "$INSTANCE_ID" \
    --region "$AWS_REGION"

# Associate Elastic IP
echo "Associating Elastic IP..."
aws ec2 associate-address \
    --instance-id "$INSTANCE_ID" \
    --allocation-id "$ALLOCATION_ID" \
    --region "$AWS_REGION"

echo ""
echo "=== Deployment Complete ==="
echo ""
echo "Infrastructure Details:"
echo "======================="
echo "VPC ID: $VPC_ID"
echo "Subnet ID: $SUBNET_ID"
echo "Security Group ID: $SG_ID"
echo "Instance ID: $INSTANCE_ID"
echo "Elastic IP: $ELASTIC_IP"
echo "SIP Endpoint: sip:$ELASTIC_IP:5060"
echo ""
echo "Next Steps:"
echo "==========="
echo "1. Wait 10-15 minutes for Asterisk installation to complete"
echo "2. SSH into instance: ssh -i ~/.ssh/$SSH_KEY_NAME.pem ec2-user@$ELASTIC_IP"
echo "3. Check installation logs: tail -f /var/log/asterisk-setup.log"
echo "4. Verify Asterisk: sudo asterisk -rx 'pjsip show endpoints'"
echo ""
echo "Save these values for later:"
echo "export INSTANCE_ID=$INSTANCE_ID"
echo "export ELASTIC_IP=$ELASTIC_IP"
echo "export VPC_ID=$VPC_ID"
echo ""

# Cleanup temporary files
rm -f /tmp/trust-policy.json /tmp/role-policy.json /tmp/user-data.sh

echo "Deployment script finished successfully"

```

--------------------------------------------------------------------------------
/aws-sip-trunk/docs/DEPLOYMENT.md:
--------------------------------------------------------------------------------

```markdown
# AWS SIP Trunk Deployment Guide

Complete step-by-step guide for deploying Asterisk-based SIP trunk infrastructure on AWS for ElevenLabs integration.

## Prerequisites

### Required Tools
- AWS CLI v2.x configured with credentials
- Terraform >= 1.5.0 (for IaC deployment) OR Bash (for manual deployment)
- SSH client for server access
- jq (for JSON parsing in scripts)

### AWS Account Requirements
- Active AWS account with administrative access
- EC2, VPC, S3, CloudWatch, Systems Manager permissions
- Available Elastic IP quota (at least 1)
- SSH key pair created in target region

### ElevenLabs Requirements
- ElevenLabs account with SIP trunk capability
- Phone number registered in E.164 format
- SIP trunk credentials (username/password)

## Deployment Method 1: Terraform (Recommended)

### Step 1: Prepare Environment

```bash
# Clone or navigate to project directory
cd /workspace/aws-sip-trunk

# Export required variables
export AWS_REGION="us-east-1"
export TF_VAR_ssh_key_name="your-ssh-key-name"
export TF_VAR_elevenlabs_phone_e164="+12025551234"
export TF_VAR_elevenlabs_sip_password="your-sip-password"
export TF_VAR_alarm_email="[email protected]"  # Optional

# Optional: Customize deployment
export TF_VAR_instance_type="t3.medium"
export TF_VAR_environment="prod"
export TF_VAR_enable_high_availability="false"
```

### Step 2: Initialize Terraform

```bash
cd terraform
terraform init
```

### Step 3: Review Planned Changes

```bash
terraform plan
```

Review the output to understand what resources will be created:
- VPC with public subnet
- EC2 instance (t3.medium by default)
- Elastic IP
- Security Groups with SIP/RTP rules
- S3 buckets for recordings and backups
- CloudWatch monitoring and alarms
- Systems Manager parameters for credentials

### Step 4: Deploy Infrastructure

```bash
terraform apply
```

Type `yes` when prompted. Deployment takes approximately 15-20 minutes:
- 2-3 minutes for infrastructure provisioning
- 12-15 minutes for Asterisk compilation and configuration

### Step 5: Verify Deployment

```bash
# Get deployment outputs
terraform output

# Save important values
INSTANCE_ID=$(terraform output -raw asterisk_instance_id)
ELASTIC_IP=$(terraform output -raw asterisk_public_ip)
SIP_ENDPOINT=$(terraform output -raw sip_endpoint)

echo "SIP Endpoint: $SIP_ENDPOINT"
```

### Step 6: Test SIP Connectivity

```bash
# SSH into instance
SSH_COMMAND=$(terraform output -raw ssh_command)
eval $SSH_COMMAND

# Once logged in, check Asterisk status
sudo asterisk -rx "core show version"
sudo asterisk -rx "pjsip show endpoints"
sudo asterisk -rx "pjsip show transports"

# Enable detailed logging for troubleshooting
sudo asterisk -rx "pjsip set logger on"

# Check logs
sudo tail -f /var/log/asterisk/full
```

### Step 7: Configure ElevenLabs

In your ElevenLabs dashboard:

1. Navigate to SIP Trunk configuration
2. Add new SIP trunk with these settings:
   - **SIP Server**: `sip:YOUR_ELASTIC_IP:5060`
   - **Transport**: TCP
   - **Username**: Your E.164 phone number (e.g., `+12025551234`)
   - **Password**: Your SIP trunk password
   - **Codec**: ulaw, alaw

3. Assign the SIP trunk to your ElevenLabs agent

### Step 8: Test Call Flow

```bash
# From Asterisk CLI, test outbound call to ElevenLabs
sudo asterisk -rx "channel originate PJSIP/YOUR_AGENT_NUMBER@elevenlabs extension s@from-elevenlabs"

# Monitor call progress
sudo asterisk -rx "core show channels"
sudo asterisk -rx "pjsip show channelstats"
```

## Deployment Method 2: Manual Script

Alternative deployment using AWS CLI commands directly.

### Step 1: Set Environment Variables

```bash
export AWS_REGION="us-east-1"
export PROJECT_NAME="asterisk-sip-trunk"
export ELEVENLABS_PHONE_E164="+12025551234"
export ELEVENLABS_SIP_PASSWORD="your-sip-password"
export SSH_KEY_NAME="your-ssh-key-name"
export SSH_ALLOWED_CIDR="YOUR_IP/32"  # Optional, for SSH access
```

### Step 2: Run Deployment Script

```bash
cd /workspace/aws-sip-trunk/scripts
./deploy-asterisk-aws.sh
```

The script will:
1. Create VPC and networking components
2. Configure security groups
3. Allocate Elastic IP
4. Store credentials in Parameter Store
5. Create IAM roles
6. Launch EC2 instance with Asterisk

### Step 3: Monitor Installation

```bash
# Wait for instance to be running
aws ec2 describe-instances \
  --instance-ids $INSTANCE_ID \
  --query 'Reservations[0].Instances[0].State.Name' \
  --output text

# SSH and monitor installation logs
ssh -i ~/.ssh/$SSH_KEY_NAME.pem ec2-user@$ELASTIC_IP
tail -f /var/log/asterisk-setup.log
```

Installation is complete when you see:
```
=== Asterisk SIP Trunk Installation Complete ===
```

## Post-Deployment Configuration

### Enable Call Recordings

Edit `/etc/asterisk/extensions.conf` on the server:

```asterisk
[from-elevenlabs]
exten => _X.,1,NoOp(Incoming call from ElevenLabs)
 same => n,Set(CALLFILENAME=rec_${STRFTIME(${EPOCH},,%Y%m%d-%H%M%S)}_${CALLERID(num)})
 same => n,MixMonitor(/var/spool/asterisk/recordings/${CALLFILENAME}.wav)
 same => n,Answer()
 ; ... rest of dialplan
```

Reload configuration:
```bash
sudo asterisk -rx "dialplan reload"
```

### Configure TLS (Optional but Recommended)

Generate self-signed certificate:
```bash
sudo openssl req -new -x509 -days 365 -nodes \
  -out /etc/asterisk/asterisk.pem \
  -keyout /etc/asterisk/asterisk.key
sudo chown asterisk:asterisk /etc/asterisk/asterisk.*
```

Update `/etc/asterisk/pjsip.conf`:
```ini
[transport-tls]
type=transport
protocol=tls
bind=0.0.0.0:5061
cert_file=/etc/asterisk/asterisk.pem
priv_key_file=/etc/asterisk/asterisk.key
external_media_address=YOUR_ELASTIC_IP
external_signaling_address=YOUR_ELASTIC_IP
```

Update Security Group to allow TCP 5061:
```bash
aws ec2 authorize-security-group-ingress \
  --group-id $SG_ID \
  --protocol tcp \
  --port 5061 \
  --cidr 0.0.0.0/0 \
  --region $AWS_REGION
```

Reload Asterisk:
```bash
sudo systemctl restart asterisk
```

### Configure DNS (Optional)

If using Route 53:

```bash
# Create A record
aws route53 change-resource-record-sets \
  --hosted-zone-id YOUR_ZONE_ID \
  --change-batch '{
    "Changes": [{
      "Action": "CREATE",
      "ResourceRecordSet": {
        "Name": "sip.yourdomain.com",
        "Type": "A",
        "TTL": 300,
        "ResourceRecords": [{"Value": "YOUR_ELASTIC_IP"}]
      }
    }]
  }'

# Create SRV record
aws route53 change-resource-record-sets \
  --hosted-zone-id YOUR_ZONE_ID \
  --change-batch '{
    "Changes": [{
      "Action": "CREATE",
      "ResourceRecordSet": {
        "Name": "_sip._tcp.yourdomain.com",
        "Type": "SRV",
        "TTL": 300,
        "ResourceRecords": [{"Value": "10 50 5060 sip.yourdomain.com"}]
      }
    }]
  }'
```

## Monitoring and Maintenance

### CloudWatch Dashboard

Access your deployment dashboard:
```
https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=asterisk-sip-trunk-dashboard
```

Key metrics to monitor:
- **CPU Utilization**: Should be < 30% under normal load
- **Memory Usage**: Should be < 70%
- **SIP Registration Failures**: Should be 0
- **Call Failures**: Should be < 5%
- **RTP Packet Loss**: Should be < 1%

### Log Analysis

View Asterisk logs:
```bash
# Full log
sudo tail -f /var/log/asterisk/full

# Filter for errors
sudo grep ERROR /var/log/asterisk/full | tail -20

# View specific call
sudo grep "Call-ID-HERE" /var/log/asterisk/full
```

CloudWatch Logs Insights queries:
```
# Count errors by type
fields @timestamp, @message
| filter @message like /ERROR/
| stats count() by @message
| sort count desc

# Call duration analysis
fields @timestamp, @message
| filter @message like /CDR/
| parse @message "duration=*," as duration
| stats avg(duration), max(duration), min(duration)
```

### Backup Configuration

Manual backup:
```bash
# Create backup archive
sudo tar -czf /tmp/asterisk-config-$(date +%Y%m%d).tar.gz \
  /etc/asterisk/

# Upload to S3
aws s3 cp /tmp/asterisk-config-*.tar.gz \
  s3://$PROJECT_NAME-backups-$ACCOUNT_ID/
```

Automated daily backup (already configured via cron):
```bash
# Check backup cron job
sudo crontab -l
```

### Restore from Backup

```bash
# Download backup
aws s3 cp s3://$PROJECT_NAME-backups-$ACCOUNT_ID/asterisk-config-YYYYMMDD.tar.gz /tmp/

# Extract
sudo tar -xzf /tmp/asterisk-config-YYYYMMDD.tar.gz -C /

# Reload Asterisk
sudo asterisk -rx "core reload"
```

## Scaling and High Availability

### Enable HA Mode

Update Terraform variables:
```bash
export TF_VAR_enable_high_availability="true"
terraform apply
```

This creates:
- Secondary EC2 instance in different AZ
- Secondary Elastic IP
- Automatic failover mechanism

### Manual Failover

```bash
# Disassociate EIP from primary
aws ec2 disassociate-address \
  --association-id $ASSOCIATION_ID

# Associate with standby
aws ec2 associate-address \
  --instance-id $STANDBY_INSTANCE_ID \
  --allocation-id $ALLOCATION_ID
```

### Horizontal Scaling

For high call volumes, deploy multiple Asterisk instances behind load balancer:
1. Create Application Load Balancer (TCP mode)
2. Deploy multiple Asterisk instances
3. Use shared RDS database for CDR
4. Configure SIP registration sharing

## Troubleshooting

See [TROUBLESHOOTING.md](TROUBLESHOOTING.md) for detailed troubleshooting guide.

Common issues:
- One-way audio → Check Security Group RTP rules
- Registration failures → Verify credentials in Parameter Store
- High CPU → Check for SIP attacks, enable Fail2Ban
- No audio → Verify NAT configuration in pjsip.conf

## Cost Optimization

### Production Environment
- Use t3.medium for up to 50 concurrent calls
- Enable detailed CloudWatch monitoring
- Set S3 lifecycle policies for recordings
- Estimated cost: ~$50-60/month

### Development Environment
- Use t3.small for testing
- Disable CloudWatch detailed monitoring
- Shorter S3 retention periods
- Estimated cost: ~$25-30/month

### Cost Reduction Tips
1. Use Reserved Instances for 1-year savings (30-40% discount)
2. Enable S3 Intelligent-Tiering for recordings
3. Use VPC Flow Logs only when troubleshooting
4. Delete old CloudWatch logs regularly

## Security Best Practices

1. **Network Security**
   - Restrict SSH access to specific IP ranges
   - Consider VPN access instead of public SSH
   - Enable VPC Flow Logs for audit

2. **Credential Management**
   - Rotate SIP passwords quarterly
   - Use AWS Secrets Manager for production
   - Enable MFA for AWS console access

3. **SIP Security**
   - Enable Fail2Ban (already configured)
   - Monitor for brute-force attacks
   - Consider IP whitelisting for known endpoints

4. **System Security**
   - Enable automatic security updates
   - Regular AMI updates
   - Enable AWS Config for compliance

## Next Steps

1. **Production Readiness Checklist**
   - [ ] Enable TLS for SIP transport
   - [ ] Configure DNS with Route 53
   - [ ] Set up CloudWatch alarms
   - [ ] Test failover procedures
   - [ ] Document call flows
   - [ ] Create runbook for operations

2. **Integration Testing**
   - [ ] Test inbound calls from ElevenLabs
   - [ ] Test outbound calls to ElevenLabs
   - [ ] Verify call recordings
   - [ ] Test DTMF functionality
   - [ ] Load testing with multiple concurrent calls

3. **Monitoring Setup**
   - [ ] Configure SNS notifications
   - [ ] Set up PagerDuty/OpsGenie integration
   - [ ] Create custom CloudWatch dashboards
   - [ ] Enable AWS Cost Anomaly Detection

## Support and Resources

- **Asterisk Documentation**: https://docs.asterisk.org/
- **ElevenLabs SIP Trunk**: https://elevenlabs.io/docs/agents-platform/phone-numbers/sip-trunking
- **AWS VoIP Best Practices**: https://docs.aws.amazon.com/whitepapers/latest/real-time-communication-on-aws/
- **Project Repository**: /workspace/aws-sip-trunk/

```

--------------------------------------------------------------------------------
/aws-sip-trunk/PROJECT_INDEX.md:
--------------------------------------------------------------------------------

```markdown
# AWS SIP Trunk Project Index

Complete file reference for the AWS SIP trunk deployment project.

## 📋 Quick Navigation

- **Getting Started**: [QUICKSTART.md](QUICKSTART.md)
- **Full Deployment**: [docs/DEPLOYMENT.md](docs/DEPLOYMENT.md)
- **Troubleshooting**: [docs/TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)
- **Project Overview**: [DEPLOYMENT_SUMMARY.md](DEPLOYMENT_SUMMARY.md)

## 📁 File Structure

```
aws-sip-trunk/
├── Documentation Files
│   ├── README.md                     # Project overview and introduction
│   ├── QUICKSTART.md                 # 5-minute setup guide
│   ├── DEPLOYMENT_SUMMARY.md         # Architecture and design decisions
│   ├── PROJECT_INDEX.md              # This file
│   └── docs/
│       ├── DEPLOYMENT.md             # Detailed deployment instructions
│       └── TROUBLESHOOTING.md        # Issue resolution guide
│
├── Infrastructure as Code
│   └── terraform/
│       ├── main.tf                   # Core Terraform configuration
│       ├── variables.tf              # Input variable definitions
│       ├── outputs.tf                # Output value definitions
│       ├── networking.tf             # VPC, subnets, security groups
│       ├── ec2.tf                    # EC2 instances, IAM roles
│       ├── storage.tf                # S3 buckets, Parameter Store
│       ├── monitoring.tf             # CloudWatch alarms, dashboard
│       └── terraform.tfvars.example  # Example configuration
│
├── Deployment Scripts
│   └── scripts/
│       ├── deploy-asterisk-aws.sh    # Manual AWS CLI deployment
│       └── user-data.sh              # EC2 bootstrap script
│
├── Configuration Templates
│   └── config/
│       ├── pjsip.conf.j2             # PJSIP Jinja2 template
│       ├── extensions.conf.j2        # Dialplan Jinja2 template
│       └── rtp.conf                  # RTP configuration
│
├── Testing
│   └── tests/
│       └── test_sip_connectivity.py  # Integration tests
│
└── Project Configuration
    ├── pyproject.toml                # Python project metadata
    └── .gitignore                    # Git exclusions
```

## 📄 File Descriptions

### Documentation

#### [README.md](README.md)
- **Purpose**: Main project documentation and entry point
- **Audience**: All users
- **Contains**: 
  - Project overview and features
  - AWS services used
  - Design patterns
  - Known issues summary
  - Deployment script overview
  - Environment variables reference

#### [QUICKSTART.md](QUICKSTART.md)
- **Purpose**: Fast-track deployment guide
- **Audience**: Users who want to deploy quickly
- **Contains**:
  - Prerequisites checklist
  - 5-minute Terraform setup
  - ElevenLabs configuration steps
  - Quick troubleshooting
  - Production checklist
  - Cost estimates

#### [DEPLOYMENT_SUMMARY.md](DEPLOYMENT_SUMMARY.md)
- **Purpose**: Comprehensive project summary
- **Audience**: Technical decision makers, architects
- **Contains**:
  - Architecture diagrams
  - AWS resources detailed list
  - Configuration file explanations
  - Testing procedures
  - Security best practices
  - Maintenance procedures

#### [docs/DEPLOYMENT.md](docs/DEPLOYMENT.md)
- **Purpose**: Step-by-step deployment instructions
- **Audience**: DevOps engineers, system administrators
- **Contains**:
  - Detailed prerequisites
  - Two deployment methods (Terraform + Manual)
  - Post-deployment configuration
  - Monitoring setup
  - Backup/restore procedures
  - Scaling and HA configuration

#### [docs/TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)
- **Purpose**: Comprehensive issue resolution guide
- **Audience**: Operations team, support engineers
- **Contains**:
  - Quick diagnostic commands
  - 9 common issues with solutions
  - Emergency procedures
  - Performance tuning
  - Diagnostic report script

### Infrastructure as Code (Terraform)

#### [terraform/main.tf](terraform/main.tf)
- **Purpose**: Core Terraform configuration
- **Resources**: Provider, data sources, local variables
- **Key Components**:
  - AWS provider configuration
  - Default tags
  - Amazon Linux 2 AMI lookup
  - Version requirements

#### [terraform/variables.tf](terraform/variables.tf)
- **Purpose**: Input variable definitions with validation
- **Variables** (27 total):
  - AWS configuration (region, instance type)
  - Network settings (VPC CIDR, subnets)
  - ElevenLabs credentials
  - Feature flags (HA, TLS, recordings)
  - Monitoring configuration
  - DNS settings

#### [terraform/outputs.tf](terraform/outputs.tf)
- **Purpose**: Export deployment information
- **Outputs** (20 total):
  - Instance IDs and IPs
  - SIP endpoint URI
  - S3 bucket names
  - CloudWatch resources
  - SSH commands
  - Deployment summary

#### [terraform/networking.tf](terraform/networking.tf)
- **Purpose**: Network infrastructure
- **Resources**:
  - VPC with DNS enabled
  - Internet Gateway
  - Public subnets (2x for HA)
  - Route tables
  - Elastic IPs (1-2)
  - Security Groups with rules
  - Route 53 DNS records (optional)

#### [terraform/ec2.tf](terraform/ec2.tf)
- **Purpose**: Compute resources
- **Resources**:
  - IAM role and policies
  - Instance profile
  - EC2 instances (primary + optional standby)
  - EBS volumes
  - CloudWatch log groups
  - User data script integration

#### [terraform/storage.tf](terraform/storage.tf)
- **Purpose**: Storage and secrets management
- **Resources**:
  - S3 bucket for recordings (encrypted, versioned)
  - S3 bucket for backups (optional)
  - Lifecycle policies
  - Parameter Store parameters (3x)
  - Public access blocks

#### [terraform/monitoring.tf](terraform/monitoring.tf)
- **Purpose**: Observability infrastructure
- **Resources**:
  - SNS topic for alarms
  - CloudWatch alarms (6x):
    - Instance status check
    - CPU utilization
    - Memory utilization
    - Disk space
    - SIP registration failures
    - Call failure rate
  - Log metric filters
  - CloudWatch dashboard

#### [terraform/terraform.tfvars.example](terraform/terraform.tfvars.example)
- **Purpose**: Example configuration file
- **Usage**: Copy to `terraform.tfvars` and customize
- **Contains**: All configurable variables with examples

### Deployment Scripts

#### [scripts/deploy-asterisk-aws.sh](scripts/deploy-asterisk-aws.sh)
- **Purpose**: Manual deployment using AWS CLI
- **When to Use**: Alternative to Terraform, one-time deployments
- **Steps**:
  1. Create VPC and networking (10 steps)
  2. Configure Security Groups
  3. Allocate Elastic IP
  4. Store credentials in Parameter Store
  5. Create IAM roles
  6. Launch EC2 instance
  7. Associate Elastic IP
- **Requirements**: AWS CLI, environment variables set
- **Execution Time**: ~20-25 minutes

#### [scripts/user-data.sh](scripts/user-data.sh)
- **Purpose**: EC2 instance bootstrap script
- **Execution**: Runs on first boot via EC2 user data
- **Actions**:
  1. Update system packages
  2. Install build dependencies
  3. Download and compile Asterisk 21
  4. Configure PJSIP, RTP, dialplan
  5. Set up systemd service
  6. Configure Fail2Ban
  7. Install CloudWatch agent
  8. Create health check script
- **Execution Time**: ~15-20 minutes
- **Logs**: `/var/log/asterisk-setup.log`

### Configuration Templates

#### [config/pjsip.conf.j2](config/pjsip.conf.j2)
- **Purpose**: PJSIP configuration template
- **Format**: Jinja2 template
- **Sections**:
  - Global settings
  - Transport configuration (TCP/TLS)
  - Endpoint definition
  - AOR (Address of Record)
  - Authentication
  - Identify rules
  - ACL (optional)
  - Custom endpoints support
- **Template Variables**: 20+ customizable parameters

#### [config/extensions.conf.j2](config/extensions.conf.j2)
- **Purpose**: Asterisk dialplan template
- **Format**: Jinja2 template
- **Contexts**:
  - `from-elevenlabs`: Inbound calls
  - `outbound-to-elevenlabs`: Outbound calls
  - `default`: Unauthorized calls
  - `health-check`: Monitoring endpoint
  - Custom contexts support
- **Features**:
  - Call recording with S3 upload
  - IVR menu system
  - DTMF handling
  - Hangup handlers
  - CDR tracking

### Testing

#### [tests/test_sip_connectivity.py](tests/test_sip_connectivity.py)
- **Purpose**: Integration test suite
- **Framework**: pytest
- **Test Classes**:
  - `TestSIPConnectivity`: Infrastructure and connectivity (9 tests)
  - `TestSIPRegistration`: SIP endpoint configuration
  - `TestCallFlow`: Call establishment and audio
  - `TestMonitoring`: CloudWatch alarms and dashboards
- **Requirements**: boto3, AWS credentials, deployed infrastructure
- **Execution**: `pytest tests/test_sip_connectivity.py`

### Project Configuration

#### [pyproject.toml](pyproject.toml)
- **Purpose**: Python project metadata and dependencies
- **Build System**: setuptools >= 68
- **Dependencies**:
  - boto3, botocore (AWS SDK)
  - pyyaml, jinja2 (configuration)
  - python-dotenv (environment)
- **Dev Dependencies**:
  - pytest, pytest-cov (testing)
  - mypy, ruff, black (linting)
- **Scripts**: `aws-sip-deploy` CLI command

#### [.gitignore](.gitignore)
- **Purpose**: Git exclusions for sensitive and generated files
- **Excludes**:
  - Terraform state files
  - AWS credentials
  - SSH keys
  - Logs and temporary files
  - IDE configurations

## 🔗 File Relationships

### Deployment Flow
```
1. terraform.tfvars
   ↓
2. main.tf + variables.tf
   ↓
3. networking.tf → ec2.tf → storage.tf → monitoring.tf
   ↓
4. user-data.sh (runs on EC2)
   ↓
5. pjsip.conf.j2 + extensions.conf.j2 (generated)
   ↓
6. Asterisk running with SIP trunk
```

### Documentation Flow
```
README.md (overview)
   ↓
QUICKSTART.md (fast start)
   ↓
docs/DEPLOYMENT.md (detailed setup)
   ↓
docs/TROUBLESHOOTING.md (issue resolution)
```

## 🎯 Use Cases by Role

### DevOps Engineer
**Primary Files:**
1. `terraform/` - Infrastructure provisioning
2. `scripts/deploy-asterisk-aws.sh` - Manual deployment
3. `docs/DEPLOYMENT.md` - Deployment procedures
4. `docs/TROUBLESHOOTING.md` - Issue resolution

### System Administrator
**Primary Files:**
1. `QUICKSTART.md` - Fast deployment
2. `config/pjsip.conf.j2` - SIP configuration
3. `config/extensions.conf.j2` - Dialplan customization
4. `docs/TROUBLESHOOTING.md` - Operations guide

### Developer
**Primary Files:**
1. `pyproject.toml` - Project setup
2. `tests/test_sip_connectivity.py` - Integration tests
3. `config/*.j2` - Configuration templates
4. `DEPLOYMENT_SUMMARY.md` - Architecture understanding

### Technical Architect
**Primary Files:**
1. `DEPLOYMENT_SUMMARY.md` - Architecture overview
2. `README.md` - Design patterns
3. `terraform/*.tf` - Infrastructure design
4. `docs/DEPLOYMENT.md` - Deployment options

## 📊 File Statistics

- **Total Files**: 18 main files
- **Terraform Files**: 7 (main + 6 modules)
- **Documentation Files**: 6
- **Scripts**: 2
- **Configuration Templates**: 2
- **Tests**: 1

## 🔐 Security-Sensitive Files

**Never Commit to Git:**
- `terraform.tfvars` (contains credentials)
- `*.pem` (SSH keys)
- `*.key` (TLS private keys)
- `.env` (environment variables)

**Encrypted Storage:**
- ElevenLabs credentials → Parameter Store (AWS)
- SIP passwords → Parameter Store (AWS)
- TLS certificates → S3 with encryption

## 🔄 Update Frequency

| File | Update Frequency | Reason |
|------|-----------------|--------|
| `terraform/*.tf` | Quarterly | AWS provider updates |
| `scripts/user-data.sh` | As needed | Asterisk version updates |
| `config/*.j2` | As needed | Configuration changes |
| `docs/*.md` | Monthly | Documentation improvements |
| `tests/*.py` | As needed | New test scenarios |

## 📞 Support

For questions about specific files:
1. Check file header comments
2. Review related documentation
3. Consult [TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)
4. Create issue in project repository

---

**Last Updated**: 2025-10-01  
**Project Version**: 1.0.0  
**Maintained By**: MCP Project Orchestrator

```

--------------------------------------------------------------------------------
/scripts/consolidate_templates.py:
--------------------------------------------------------------------------------

```python
#!/usr/bin/env python3
"""
Template Consolidation Script for MCP Project Orchestrator.

This script consolidates project and component templates from various sources into a standardized format
and stores them in the target project's templates directory.

Sources:
1. /home/sparrow/projects/mcp-servers/src/templates (if exists)
2. /home/sparrow/projects/mcp-project-orchestrator/component_templates.json
3. /home/sparrow/projects/mcp-prompts-template/templates (if exists)
4. /home/sparrow/mcp/data/templates (if exists)

Target:
/home/sparrow/projects/mcp-project-orchestrator/src/mcp_project_orchestrator/templates
"""

import os
import sys
import json
import shutil
from pathlib import Path
from typing import Dict, Any, List, Optional


# Source directories and files
SOURCES = [
    Path("/home/sparrow/projects/mcp-servers/src/templates"),
    Path("/home/sparrow/projects/mcp-project-orchestrator/component_templates.json"),
    Path("/home/sparrow/projects/mcp-prompts-template/templates"),
    Path("/home/sparrow/mcp/data/templates")
]

# Target directory
TARGET = Path("/home/sparrow/projects/mcp-project-orchestrator/src/mcp_project_orchestrator/templates")


def ensure_target_directory():
    """Ensure the target directory exists with required subdirectories."""
    TARGET.mkdir(parents=True, exist_ok=True)
    (TARGET / "project").mkdir(exist_ok=True)
    (TARGET / "component").mkdir(exist_ok=True)


def get_template_files(source_dir: Path) -> List[Path]:
    """Get all template files from a source directory."""
    if not source_dir.exists():
        print(f"Source directory does not exist: {source_dir}")
        return []
        
    # Look for JSON files
    json_files = list(source_dir.glob("**/*.json"))
    
    # Look for YAML/YML files
    yaml_files = list(source_dir.glob("**/*.yaml")) + list(source_dir.glob("**/*.yml"))
    
    return json_files + yaml_files


def extract_component_templates(file_path: Path) -> List[Dict[str, Any]]:
    """Extract component templates from a component_templates.json file."""
    templates = []
    
    try:
        with open(file_path, 'r') as f:
            data = json.load(f)
            
        if file_path.name == "component_templates.json" and "component_templates" in data:
            # Process each component template
            for component in data["component_templates"]:
                # Create a standardized template
                template = {
                    "name": component["name"],
                    "description": component.get("description", ""),
                    "type": "component",
                    "files": component.get("files", []),
                    "dependencies": component.get("dependencies", []),
                    "variables": component.get("variables", {}),
                    "metadata": {
                        "source": str(file_path),
                        "imported": True
                    }
                }
                
                # Include mermaid diagram if present
                if "mermaid" in component and component["mermaid"]:
                    template["mermaid"] = component["mermaid"]
                    
                templates.append(template)
    
    except Exception as e:
        print(f"Error extracting component templates from {file_path}: {str(e)}")
    
    return templates


def extract_project_template(file_path: Path) -> Optional[Dict[str, Any]]:
    """Extract a project template from a file."""
    try:
        with open(file_path, 'r') as f:
            if file_path.suffix == '.json':
                data = json.load(f)
            else:
                # For simplicity, treat other files as JSON for now
                # In a real implementation, add YAML parsing for .yaml/.yml files
                data = json.load(f)
                
        # Determine if this is a project template
        if "type" in data and data["type"] == "project":
            return data
            
        # Check if this follows project template structure
        if "name" in data and "structure" in data:
            # Create a standardized template
            template = {
                "name": data["name"],
                "description": data.get("description", ""),
                "type": "project",
                "structure": data["structure"],
                "variables": data.get("variables", {}),
                "scripts": data.get("scripts", []),
                "metadata": {
                    "source": str(file_path),
                    "imported": True
                }
            }
            
            return template
                
    except Exception as e:
        print(f"Error extracting project template from {file_path}: {str(e)}")
        
    return None


def normalize_component_template(template: Dict[str, Any]) -> Dict[str, Any]:
    """Normalize a component template to the standard format."""
    # Create standardized template structure
    normalized = {
        "name": template["name"],
        "description": template.get("description", ""),
        "type": "component",
        "files": template.get("files", []),
        "dependencies": template.get("dependencies", []),
        "variables": template.get("variables", {}),
        "version": template.get("version", {
            "major": 1,
            "minor": 0,
            "patch": 0
        }),
        "metadata": template.get("metadata", {
            "imported": True
        })
    }
    
    # Include mermaid diagram if present
    if "mermaid" in template:
        normalized["mermaid"] = template["mermaid"]
    
    return normalized


def normalize_project_template(template: Dict[str, Any]) -> Dict[str, Any]:
    """Normalize a project template to the standard format."""
    # Create standardized template structure
    normalized = {
        "name": template["name"],
        "description": template.get("description", ""),
        "type": "project",
        "structure": template.get("structure", []),
        "variables": template.get("variables", {}),
        "scripts": template.get("scripts", []),
        "version": template.get("version", {
            "major": 1,
            "minor": 0,
            "patch": 0
        }),
        "metadata": template.get("metadata", {
            "imported": True
        })
    }
    
    return normalized


def save_template(template: Dict[str, Any]):
    """Save a normalized template to the target directory."""
    name = template["name"]
    template_type = template["type"]
    
    # Generate safe filename
    safe_name = "".join(c if c.isalnum() or c in "-_" else "_" for c in name)
    
    # Save to both the main directory and the type-specific directory
    for save_path in [TARGET / f"{safe_name}.json", TARGET / template_type / f"{safe_name}.json"]:
        with open(save_path, 'w') as f:
            json.dump(template, f, indent=2)
            
    return safe_name


def process_all_sources():
    """Process all source files and consolidate templates."""
    ensure_target_directory()
    
    # Track processed templates by name and type
    processed = {
        "component": {},
        "project": {}
    }
    
    # Process each source
    for source in SOURCES:
        print(f"Processing source: {source}")
        
        if source.is_file() and source.suffix == '.json':
            # Handle component templates JSON file
            if source.name == "component_templates.json":
                templates = extract_component_templates(source)
                for template in templates:
                    name = template["name"]
                    if name in processed["component"]:
                        print(f"  Skipping duplicate component template: {name}")
                        continue
                    
                    normalized = normalize_component_template(template)
                    safe_name = save_template(normalized)
                    processed["component"][name] = safe_name
                    print(f"  Processed component template: {name} -> {safe_name}.json")
        
        elif source.is_dir():
            # Handle directories
            template_files = get_template_files(source)
            
            for file_path in template_files:
                # Try to extract as project template first
                project_template = extract_project_template(file_path)
                
                if project_template:
                    name = project_template["name"]
                    if name in processed["project"]:
                        print(f"  Skipping duplicate project template: {name}")
                        continue
                    
                    normalized = normalize_project_template(project_template)
                    safe_name = save_template(normalized)
                    processed["project"][name] = safe_name
                    print(f"  Processed project template: {name} -> {safe_name}.json")
                
                # If not a project template, check if it has component templates
                elif file_path.suffix == '.json':
                    try:
                        with open(file_path, 'r') as f:
                            data = json.load(f)
                            
                        # Check if this contains component templates
                        if "component_templates" in data:
                            for component in data["component_templates"]:
                                template = {
                                    "name": component["name"],
                                    "description": component.get("description", ""),
                                    "type": "component",
                                    "files": component.get("files", []),
                                    "dependencies": component.get("dependencies", []),
                                    "variables": component.get("variables", {}),
                                    "metadata": {
                                        "source": str(file_path),
                                        "imported": True
                                    }
                                }
                                
                                # Include mermaid diagram if present
                                if "mermaid" in component and component["mermaid"]:
                                    template["mermaid"] = component["mermaid"]
                                
                                name = template["name"]
                                if name in processed["component"]:
                                    print(f"  Skipping duplicate component template: {name}")
                                    continue
                                
                                normalized = normalize_component_template(template)
                                safe_name = save_template(normalized)
                                processed["component"][name] = safe_name
                                print(f"  Processed component template: {name} -> {safe_name}.json")
                    
                    except Exception as e:
                        print(f"Error processing file {file_path}: {str(e)}")
    
    # Generate an index file
    index = {
        "component_templates": {
            "templates": list(processed["component"].keys()),
            "count": len(processed["component"])
        },
        "project_templates": {
            "templates": list(processed["project"].keys()),
            "count": len(processed["project"])
        }
    }
    
    # Save index file
    with open(TARGET / "index.json", 'w') as f:
        json.dump(index, f, indent=2)
    
    print(f"\nConsolidation complete.")
    print(f"Component templates: {len(processed['component'])}")
    print(f"Project templates: {len(processed['project'])}")


if __name__ == "__main__":
    process_all_sources() 
```

--------------------------------------------------------------------------------
/.github/workflows/ci-cd.yml:
--------------------------------------------------------------------------------

```yaml
name: CI/CD Pipeline

on:
  push:
    branches: [ main ]
    tags:
      - 'v*'
  pull_request:
    branches: [ main ]
  workflow_dispatch:
    inputs:
      deploy:
        description: 'Deploy to production'
        required: false
        default: false
        type: boolean

jobs:
  lint:
    name: Lint Code
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Set up Python
        uses: actions/setup-python@v4
        with:
          python-version: '3.12'
      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          pip install ruff mypy
          pip install -e .
      - name: Lint with ruff
        run: |
          ruff check .
      - name: Type check with mypy
        run: |
          mypy src/
      - name: Check for TODO markers
        run: |
          # Find TODO comments and report them
          TODOS=$(grep -r "TODO" --include="*.py" src/ || true)
          if [ -n "$TODOS" ]; then
            echo "::warning::Found TODO comments in code:"
            echo "$TODOS"
          fi

  test:
    name: Run Tests
    needs: lint
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Set up Python
        uses: actions/setup-python@v4
        with:
          python-version: '3.12'
      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          pip install pytest pytest-cov
          pip install -e .
      - name: Test with pytest
        run: |
          pytest --cache-clear --cov=src/ --cov-report=xml
      - name: Upload coverage report
        uses: codecov/codecov-action@v3
        with:
          file: ./coverage.xml
      - name: Verify environment
        run: |
          # Run the environment verification script
          python scripts/verify_environment.py

  update-changelog:
    name: Update Changelog
    runs-on: ubuntu-latest
    if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
    steps:
      - uses: actions/checkout@v4
        with:
          fetch-depth: 0  # Fetch all history for changelog generation
      - name: Set up Python
        uses: actions/setup-python@v4
        with:
          python-version: '3.12'
      - name: Update changelog
        run: |
          python scripts/update_changelog.py
      - name: Commit updated changelog
        if: github.ref == 'refs/heads/main'
        run: |
          if git diff --name-only | grep -q "CHANGELOG.md"; then
            git config --local user.email "[email protected]"
            git config --local user.name "GitHub Action"
            git add CHANGELOG.md
            git commit -m "Update CHANGELOG.md [skip ci]"
            git push
          else
            echo "No changes to CHANGELOG.md"
          fi

  build:
    name: Build Container
    needs: test
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Set up Docker
        uses: docker/setup-buildx-action@v3
      - name: Cache Docker layers
        uses: actions/cache@v3
        with:
          path: /tmp/.buildx-cache
          key: ${{ runner.os }}-buildx-${{ github.sha }}
          restore-keys: |
            ${{ runner.os }}-buildx-
      - name: Build container image
        uses: docker/build-push-action@v5
        with:
          context: .
          file: ./Containerfile
          push: false
          tags: mcp-project-orchestrator:${{ github.sha }}
          cache-from: type=local,src=/tmp/.buildx-cache
          cache-to: type=local,dest=/tmp/.buildx-cache-new
          outputs: type=docker,dest=/tmp/mcp-image.tar
      - name: Move cache
        run: |
          rm -rf /tmp/.buildx-cache
          mv /tmp/.buildx-cache-new /tmp/.buildx-cache
      - name: Upload image artifact
        uses: actions/upload-artifact@v3
        with:
          name: container-image
          path: /tmp/mcp-image.tar
          retention-days: 1

  mcp-inspect:
    name: Test MCP Server
    needs: build
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Set up Node.js
        uses: actions/setup-node@v3
        with:
          node-version: '18'
      - name: Set up Docker
        uses: docker/setup-buildx-action@v3
      - name: Download container image
        uses: actions/download-artifact@v3
        with:
          name: container-image
          path: /tmp
      - name: Load container image
        run: |
          docker load -i /tmp/mcp-image.tar
      - name: Run MCP Server
        run: |
          docker run -d -p 8080:8080 \
            -v ${{ github.workspace }}:/app \
            --workdir /app \
            --entrypoint python \
            --name mcp-server \
            mcp-project-orchestrator:${{ github.sha }} \
            -m mcp_project_orchestrator.fastmcp
      - name: Install MCP Inspector
        run: |
          npm install -g @modelcontextprotocol/inspector
      - name: Wait for server to start
        run: sleep 5
      - name: Test with MCP Inspector
        run: |
          # Basic connectivity test
          npx @modelcontextprotocol/inspector http://localhost:8080
          
          # Validation test
          npx @modelcontextprotocol/inspector http://localhost:8080 --validate
          
          # Try interactive mode but exit immediately (for testing connectivity)
          echo "exit" | npx @modelcontextprotocol/inspector http://localhost:8080 --interactive || true
      - name: Stop MCP Server
        run: |
          docker stop mcp-server
          docker rm mcp-server

  publish:
    name: Publish Container
    if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
    needs: [mcp-inspect, update-changelog]
    runs-on: ubuntu-latest
    steps:
      - name: Download container image
        uses: actions/download-artifact@v3
        with:
          name: container-image
          path: /tmp
      - name: Set up Docker
        uses: docker/setup-buildx-action@v3
      - name: Load container image
        run: |
          docker load -i /tmp/mcp-image.tar
      - name: Extract version for tags
        id: get_version
        run: |
          if [[ $GITHUB_REF == refs/tags/v* ]]; then
            VERSION=${GITHUB_REF#refs/tags/v}
            echo "version=$VERSION" >> $GITHUB_OUTPUT
            echo "Using tag version: $VERSION"
          else
            echo "version=latest" >> $GITHUB_OUTPUT
            echo "Using latest version"
          fi
      - name: Tag container image
        run: |
          if [[ "${{ steps.get_version.outputs.version }}" == "latest" ]]; then
            docker tag mcp-project-orchestrator:${{ github.sha }} ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:latest
            docker tag mcp-project-orchestrator:${{ github.sha }} ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:${{ github.sha }}
          else
            docker tag mcp-project-orchestrator:${{ github.sha }} ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:${{ steps.get_version.outputs.version }}
            docker tag mcp-project-orchestrator:${{ github.sha }} ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:latest
          fi
      - name: Login to GitHub Container Registry
        uses: docker/login-action@v3
        with:
          registry: ghcr.io
          username: ${{ github.actor }}
          password: ${{ secrets.GITHUB_TOKEN }}
      - name: Push container image
        run: |
          if [[ "${{ steps.get_version.outputs.version }}" == "latest" ]]; then
            docker push ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:latest
            docker push ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:${{ github.sha }}
          else
            docker push ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:${{ steps.get_version.outputs.version }}
            docker push ghcr.io/${{ github.repository_owner }}/mcp-project-orchestrator:latest
          fi

  deploy:
    name: Deploy to Production
    if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.event.inputs.deploy == 'true') || startsWith(github.ref, 'refs/tags/v')
    needs: publish
    runs-on: ubuntu-latest
    steps:
      - name: Deploy to production
        run: |
          echo "Deploying to production environment"
          # Add deployment steps here, such as:
          # - SSH into server
          # - Pull latest container image
          # - Restart services
      - name: Create GitHub Release
        if: startsWith(github.ref, 'refs/tags/v')
        uses: actions/checkout@v4
        with:
          fetch-depth: 0
      - name: Generate Release Notes
        if: startsWith(github.ref, 'refs/tags/v')
        id: generate_notes
        run: |
          VERSION=${GITHUB_REF#refs/tags/}
          # Extract the relevant section from CHANGELOG.md
          NOTES=$(awk -v ver="$VERSION" 'BEGIN{p=0} $0 ~ "^## \\[" ver "\\]" {p=1;next} $0 ~ "^## \\[" {p=0} p' CHANGELOG.md)
          echo "RELEASE_NOTES<<EOF" >> $GITHUB_ENV
          echo "$NOTES" >> $GITHUB_ENV
          echo "EOF" >> $GITHUB_ENV
      - name: Create Release
        if: startsWith(github.ref, 'refs/tags/v')
        uses: softprops/action-gh-release@v1
        with:
          body: ${{ env.RELEASE_NOTES }}
          draft: false
          prerelease: false
        env:
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

  # Add a new job for Podman testing (if CI system supports it)
  podman-test:
    name: Test with Podman
    if: false  # Disabled by default - enable if your CI runners support Podman
    needs: build
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v3
      - name: Install Podman
        run: |
          sudo apt-get update
          sudo apt-get install -y podman
      - name: Build with Podman
        run: |
          podman build -t mcp-project-orchestrator:${{ github.sha }} -f Containerfile .
      - name: Run with Podman
        run: |
          podman run -d --rm -p 8080:8080 \
            -v ${{ github.workspace }}:/app:Z \
            --workdir /app \
            --entrypoint python \
            localhost/mcp-project-orchestrator:${{ github.sha }} \
            -m mcp_project_orchestrator.fastmcp
      - name: Set up Node.js
        uses: actions/setup-node@v3
        with:
          node-version: '18'
      - name: Install MCP Inspector
        run: |
          npm install -g @modelcontextprotocol/inspector
      - name: Wait for server to start
        run: sleep 5
      - name: Test with MCP Inspector under Podman
        run: |
          npx @modelcontextprotocol/inspector http://localhost:8080
      - name: Stop Podman containers
        run: |
          podman stop --all

  # Add a manual validation job that can be triggered separately
  manual-mcp-validate:
    name: Manual MCP Validation
    if: github.event_name == 'workflow_dispatch'
    needs: build
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v3
      - name: Set up Node.js
        uses: actions/setup-node@v3
        with:
          node-version: '18'
      - name: Set up Docker
        uses: docker/setup-buildx-action@v2
      - name: Download container image
        uses: actions/download-artifact@v3
        with:
          name: container-image
      - name: Load container image
        run: |
          docker load -i mcp-project-orchestrator-image.tar
      - name: Run MCP Server
        run: |
          docker run -d -p 8080:8080 \
            -v ${{ github.workspace }}:/app \
            --workdir /app \
            --entrypoint python \
            --name mcp-server \
            mcp-project-orchestrator:${{ github.sha }} \
            -m mcp_project_orchestrator.fastmcp
      - name: Install MCP Inspector
        run: |
          npm install -g @modelcontextprotocol/inspector
      - name: Wait for server to start
        run: sleep 5
      - name: Comprehensive MCP Validation
        run: |
          # Basic connection test
          npx @modelcontextprotocol/inspector http://localhost:8080
          
          # Validation test
          npx @modelcontextprotocol/inspector http://localhost:8080 --validate
          
          # Check server details with verbose output
          npx @modelcontextprotocol/inspector http://localhost:8080 --verbose
      - name: Stop MCP Server
        run: |
          docker stop mcp-server
          docker rm mcp-server 
```

--------------------------------------------------------------------------------
/scripts/test_mcp.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
# MCP Project Orchestrator - Unified Testing Script
# Consolidates multiple testing scripts for easier testing

set -e

# Color definitions
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[0;33m'
COLOR_RED='\033[0;31m'
COLOR_RESET='\033[0m'

# Get script and project directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
MCP_DATA_DIR="/home/sparrow/mcp/data"
CLAUDE_CONFIG_DIR="$HOME/.config/Claude"
CLAUDE_LOG_DIR="$CLAUDE_CONFIG_DIR/logs"

# Default values
CONTAINER_ENGINE="docker"
TEST_TYPE=""
INTERACTIVE=false
SKIP_DB_INIT=false
CLAUDE_DESKTOP_BIN="$HOME/bin/run-claude.sh"

# Function to display help message
display_help() {
  echo "MCP Project Orchestrator - Unified Testing Script"
  echo
  echo "Usage: $0 [options]"
  echo
  echo "Options:"
  echo "  --basic            Run basic MCP server tests"
  echo "  --claude-desktop   Test Claude Desktop integration"
  echo "  --docker           Use Docker for container operations (default)"
  echo "  --podman           Use Podman for container operations"
  echo "  --interactive      Run in interactive mode"
  echo "  --skip-db-init     Skip database initialization"
  echo "  --help             Display this help message"
  echo
}

# Process command line arguments
while [[ $# -gt 0 ]]; do
  case "$1" in
    --basic)
      TEST_TYPE="basic"
      shift
      ;;
    --claude-desktop)
      TEST_TYPE="claude-desktop"
      shift
      ;;
    --docker)
      CONTAINER_ENGINE="docker"
      shift
      ;;
    --podman)
      CONTAINER_ENGINE="podman"
      shift
      ;;
    --interactive)
      INTERACTIVE=true
      shift
      ;;
    --skip-db-init)
      SKIP_DB_INIT=true
      shift
      ;;
    --help)
      display_help
      exit 0
      ;;
    *)
      echo -e "${COLOR_RED}Unknown option: $1${COLOR_RESET}"
      display_help
      exit 1
      ;;
  esac
done

# If no test type specified, ask the user
if [ -z "$TEST_TYPE" ]; then
  echo "Please select test type:"
  echo "1) Basic MCP server tests"
  echo "2) Claude Desktop integration tests"
  echo "3) Exit"
  
  read -p "Enter your choice (1-3): " choice
  
  case "$choice" in
    1)
      TEST_TYPE="basic"
      ;;
    2)
      TEST_TYPE="claude-desktop"
      ;;
    3)
      echo "Exiting..."
      exit 0
      ;;
    *)
      echo -e "${COLOR_RED}Invalid choice. Exiting.${COLOR_RESET}"
      exit 1
      ;;
  esac
fi

# Function to check if container is running
check_container_running() {
  local container_name="$1"
  if $CONTAINER_ENGINE ps --filter "name=$container_name" --format "{{.Names}}" | grep -q "$container_name"; then
    echo -e "${COLOR_GREEN}✅ Container '$container_name' is running${COLOR_RESET}"
    return 0
  else
    echo -e "${COLOR_RED}❌ Container '$container_name' is NOT running${COLOR_RESET}"
    return 1
  fi
}

# Function to clean up existing containers
cleanup_containers() {
  echo -e "${COLOR_GREEN}Cleaning up existing containers...${COLOR_RESET}"
  
  # List of containers to clean up
  containers=(
    "mcp-postgres-db-container"
    "pgai-vectorizer-worker"
    "mcp-prompt-manager-py"
    "mcp-prompts-sse"
    "mcp-prompts-stdio"
    "mcp-postgres-server"
    "mcp-server"
  )
  
  for container in "${containers[@]}"; do
    echo "Stopping and removing container: $container"
    $CONTAINER_ENGINE stop "$container" 2>/dev/null || true
    $CONTAINER_ENGINE rm "$container" 2>/dev/null || true
  done
}

# Function to initialize PostgreSQL
initialize_postgres() {
  if [ "$SKIP_DB_INIT" = true ]; then
    echo -e "${COLOR_YELLOW}Skipping PostgreSQL initialization as requested${COLOR_RESET}"
    return 0
  fi
  
  echo -e "${COLOR_GREEN}Initializing PostgreSQL...${COLOR_RESET}"
  "$SCRIPT_DIR/setup_mcp.sh" --db-only
}

# Function to check Claude Desktop logs
check_claude_logs() {
  echo -e "${COLOR_GREEN}Checking Claude Desktop logs...${COLOR_RESET}"
  
  if [ ! -d "$CLAUDE_LOG_DIR" ]; then
    echo -e "${COLOR_YELLOW}No Claude Desktop logs found at $CLAUDE_LOG_DIR${COLOR_RESET}"
    return 0
  fi
  
  local error_count=$(grep -r "ERROR" "$CLAUDE_LOG_DIR" 2>/dev/null | wc -l)
  
  echo "Found $error_count ERROR entries in Claude Desktop logs"
  if [ $error_count -gt 0 ]; then
    echo -e "${COLOR_YELLOW}Most recent errors:${COLOR_RESET}"
    grep -r "ERROR" "$CLAUDE_LOG_DIR" 2>/dev/null | tail -10
  fi
  
  # Look for specific MCP server errors
  echo -e "${COLOR_GREEN}Checking for specific MCP server errors:${COLOR_RESET}"
  grep -r "prompt-manager-py" "$CLAUDE_LOG_DIR" 2>/dev/null | grep "ERROR" || echo "No prompt-manager-py errors"
  grep -r "prompts-sse" "$CLAUDE_LOG_DIR" 2>/dev/null | grep "ERROR" || echo "No prompts-sse errors"
  grep -r "prompts-stdio" "$CLAUDE_LOG_DIR" 2>/dev/null | grep "ERROR" || echo "No prompts-stdio errors"
  grep -r "db" "$CLAUDE_LOG_DIR" 2>/dev/null | grep "ERROR" || echo "No db errors"
  grep -r "project-orchestrator" "$CLAUDE_LOG_DIR" 2>/dev/null | grep "ERROR" || echo "No project-orchestrator errors"
  
  return 0
}

# Function to run basic MCP server tests
run_basic_tests() {
  echo -e "${COLOR_GREEN}Running basic MCP server tests...${COLOR_RESET}"
  
  # Check if Node.js and npm are installed
  if ! command -v npm &> /dev/null; then
    echo -e "${COLOR_RED}Error: npm not found. Please install Node.js${COLOR_RESET}"
    exit 1
  fi
  
  # Build the container
  echo -e "${COLOR_GREEN}Building container image...${COLOR_RESET}"
  $CONTAINER_ENGINE build -t mcp-project-orchestrator:latest -f "$PROJECT_DIR/Containerfile" "$PROJECT_DIR"
  
  # Run the container with volume mounting
  echo -e "${COLOR_GREEN}Starting MCP server with volume mounting...${COLOR_RESET}"
  
  VOLUME_OPTION=""
  if [ "$CONTAINER_ENGINE" = "podman" ]; then
    VOLUME_OPTION=":Z"
    IMAGE_NAME="localhost/mcp-project-orchestrator:latest"
  else
    IMAGE_NAME="mcp-project-orchestrator:latest"
  fi
  
  $CONTAINER_ENGINE run -d --rm -p 8080:8080 \
    -v "$PROJECT_DIR:/app$VOLUME_OPTION" \
    --workdir /app \
    --entrypoint python \
    --name mcp-server \
    $IMAGE_NAME \
    -m mcp_project_orchestrator.fastmcp
  
  # Wait for server to start
  echo "Waiting for server to start..."
  sleep 5
  
  # Verify the server is running
  if ! $CONTAINER_ENGINE ps | grep -q mcp-server; then
    echo -e "${COLOR_RED}Error: MCP server failed to start${COLOR_RESET}"
    $CONTAINER_ENGINE logs mcp-server
    exit 1
  fi
  
  # Install MCP Inspector if needed
  echo -e "${COLOR_GREEN}Setting up MCP Inspector...${COLOR_RESET}"
  npm list -g @modelcontextprotocol/inspector &>/dev/null || npm install -g @modelcontextprotocol/inspector
  
  # Run basic test
  echo -e "${COLOR_GREEN}Running basic connectivity test...${COLOR_RESET}"
  npx @modelcontextprotocol/inspector http://localhost:8080 || { 
    echo -e "${COLOR_RED}Basic test failed${COLOR_RESET}"; 
    $CONTAINER_ENGINE stop mcp-server; 
    exit 1; 
  }
  
  # Run validation
  echo -e "${COLOR_GREEN}Running validation test...${COLOR_RESET}"
  npx @modelcontextprotocol/inspector http://localhost:8080 --validate || { 
    echo -e "${COLOR_RED}Validation test failed${COLOR_RESET}"; 
    $CONTAINER_ENGINE stop mcp-server; 
    exit 1; 
  }
  
  # Run interactive test if requested
  if [ "$INTERACTIVE" = true ]; then
    echo -e "${COLOR_GREEN}Running interactive test...${COLOR_RESET}"
    npx @modelcontextprotocol/inspector http://localhost:8080 --interactive
  fi
  
  # Stop the container
  echo -e "${COLOR_GREEN}Tests completed successfully. Stopping container...${COLOR_RESET}"
  $CONTAINER_ENGINE stop mcp-server
  
  echo -e "${COLOR_GREEN}All basic tests passed!${COLOR_RESET}"
}

# Function to run Claude Desktop integration tests
run_claude_desktop_tests() {
  echo -e "${COLOR_GREEN}Running Claude Desktop integration tests...${COLOR_RESET}"
  
  # Kill Claude Desktop if running
  echo "Killing Claude Desktop if running..."
  pkill -f "Claude Desktop" || true
  sleep 3  # Wait for Claude Desktop to fully terminate
  
  # Delete old logs
  echo "Cleaning up old Claude Desktop logs..."
  mkdir -p "$CLAUDE_LOG_DIR"
  rm -rf "$CLAUDE_LOG_DIR"/*
  
  # Initialize PostgreSQL if not skipped
  if [ "$SKIP_DB_INIT" = false ]; then
    initialize_postgres
  fi
  
  # Create a sample prompt template if directory is empty
  if [ ! "$(ls -A $MCP_DATA_DIR/prompts)" ]; then
    echo "Adding a sample prompt template..."
    mkdir -p "$MCP_DATA_DIR/prompts"
    cat > "$MCP_DATA_DIR/prompts/sample-template.json" << EOF
{
  "id": "sample-template",
  "name": "Sample Template",
  "description": "A sample prompt template",
  "content": "This is a sample template with a {{variable}}",
  "isTemplate": true,
  "variables": ["variable"],
  "tags": ["sample"],
  "createdAt": "$(date -Iseconds)",
  "updatedAt": "$(date -Iseconds)",
  "version": 1
}
EOF
  fi
  
  # Start prompt-manager-py container if available
  echo "Checking for prompt-manager image..."
  if $CONTAINER_ENGINE images | grep -q "prompt-manager"; then
    echo "Starting prompt-manager-py container..."
    $CONTAINER_ENGINE run -d --restart=on-failure:5 --network=host \
      -v "$MCP_DATA_DIR/prompts:/data/prompts" \
      --name mcp-prompt-manager-py \
      -e MCP_PROMPT_MANAGER_NAME=prompt-manager-py \
      -e MCP_PROMPT_MANAGER_LOG_LEVEL=debug \
      -e MCP_PROMPT_MANAGER_TEMPLATE_DIR=/data/prompts/ \
      -e MCP_PROMPT_MANAGER_PERSISTENCE=true \
      -e MCP_PROMPT_MANAGER_PERSISTENCE_FILE=/data/prompts/prompt-templates.json \
      -e PYTHONPATH=. \
      -e MCP_PROMPT_MANAGER_STANDALONE=true \
      prompt-manager:latest --storage-dir /data/prompts --standalone
  else
    echo -e "${COLOR_YELLOW}Warning: prompt-manager image not found. Skipping prompt-manager container.${COLOR_RESET}"
  fi
  
  # Start prompts-sse container if available
  echo "Checking for mcp-prompts image..."
  if $CONTAINER_ENGINE images | grep -q "sparesparrow/mcp-prompts" || $CONTAINER_ENGINE images | grep -q "mcp-prompts"; then
    echo "Starting prompts-sse container..."
    $CONTAINER_ENGINE run -d --restart=on-failure:5 --network=host \
      -v "$MCP_DATA_DIR/prompts:/app/prompts" \
      -v "$MCP_DATA_DIR/backups:/app/backups" \
      --name mcp-prompts-sse \
      -e STORAGE_TYPE=postgres \
      -e PROMPTS_DIR=/app/prompts \
      -e BACKUPS_DIR=/app/backups \
      -e HTTP_SERVER=true \
      -e PORT=3003 \
      -e HOST=0.0.0.0 \
      -e ENABLE_SSE=true \
      -e SSE_PORT=3003 \
      -e SSE_PATH=/sse \
      -e CORS_ORIGIN="*" \
      -e DEBUG="mcp:*" \
      -e POSTGRES_HOST=localhost \
      -e POSTGRES_PORT=5432 \
      -e POSTGRES_DATABASE=prompts \
      -e POSTGRES_USER=postgres \
      -e POSTGRES_PASSWORD=postgres \
      sparesparrow/mcp-prompts:latest --sse --port=3003 --path=/sse
  fi
  
  # Check if Claude Desktop launch script is available
  if [ ! -f "$CLAUDE_DESKTOP_BIN" ]; then
    echo -e "${COLOR_YELLOW}Warning: Claude Desktop binary not found at $CLAUDE_DESKTOP_BIN"
    echo "Please provide the path to the Claude Desktop launch script:${COLOR_RESET}"
    read -p "Claude Desktop path (leave empty to skip launch): " CLAUDE_PATH
    
    if [ -n "$CLAUDE_PATH" ]; then
      CLAUDE_DESKTOP_BIN="$CLAUDE_PATH"
    else
      echo -e "${COLOR_YELLOW}Skipping Claude Desktop launch${COLOR_RESET}"
      CLAUDE_DESKTOP_BIN=""
    fi
  fi
  
  # Launch Claude Desktop if binary is available
  if [ -n "$CLAUDE_DESKTOP_BIN" ]; then
    echo "Launching Claude Desktop..."
    "$CLAUDE_DESKTOP_BIN" &
    
    echo "Waiting for Claude Desktop to initialize (60 seconds)..."
    sleep 60
    
    echo "Stopping Claude Desktop..."
    pkill -f "Claude Desktop" || true
    sleep 5
  fi
  
  # Check logs for errors
  check_claude_logs
  
  # Check if all containers are running
  echo -e "${COLOR_GREEN}Checking containers status...${COLOR_RESET}"
  check_container_running "mcp-postgres-db-container" || echo -e "${COLOR_YELLOW}PostgreSQL container not running${COLOR_RESET}"
  check_container_running "pgai-vectorizer-worker" || echo -e "${COLOR_YELLOW}Vectorizer worker not running (optional)${COLOR_RESET}"
  check_container_running "mcp-postgres-server" || echo -e "${COLOR_YELLOW}Postgres server not running${COLOR_RESET}"
  check_container_running "mcp-prompt-manager-py" || echo -e "${COLOR_YELLOW}Prompt manager not running${COLOR_RESET}"
  check_container_running "mcp-prompts-sse" || echo -e "${COLOR_YELLOW}Prompts SSE not running${COLOR_RESET}"
  
  echo -e "${COLOR_GREEN}Claude Desktop tests completed!${COLOR_RESET}"
}

# Main execution
echo -e "${COLOR_GREEN}=== MCP Project Orchestrator Testing Script ===${COLOR_RESET}"
echo "Using container engine: $CONTAINER_ENGINE"

# Clean up existing containers
cleanup_containers

# Initialize PostgreSQL if needed (for basic tests)
if [ "$TEST_TYPE" = "basic" ] && [ "$SKIP_DB_INIT" = false ]; then
  initialize_postgres
fi

# Run selected tests
case "$TEST_TYPE" in
  "basic")
    run_basic_tests
    ;;
  "claude-desktop")
    run_claude_desktop_tests
    ;;
  *)
    echo -e "${COLOR_RED}Invalid test type: $TEST_TYPE${COLOR_RESET}"
    exit 1
    ;;
esac

echo -e "${COLOR_GREEN}All tests completed successfully!${COLOR_RESET}" 
```

--------------------------------------------------------------------------------
/aws-sip-trunk/docs/TROUBLESHOOTING.md:
--------------------------------------------------------------------------------

```markdown
# AWS SIP Trunk Troubleshooting Guide

Comprehensive guide for diagnosing and resolving common issues with Asterisk SIP trunk deployment on AWS.

## Quick Diagnostic Commands

```bash
# Check Asterisk service status
sudo systemctl status asterisk

# Verify PJSIP endpoints
sudo asterisk -rx "pjsip show endpoints"

# Check transport configuration
sudo asterisk -rx "pjsip show transports"

# View active channels
sudo asterisk -rx "core show channels"

# Enable SIP debug logging
sudo asterisk -rx "pjsip set logger on"

# Check RTP configuration
sudo asterisk -rx "rtp show settings"

# View registration status
sudo asterisk -rx "pjsip show registrations"

# Network connectivity test
sudo tcpdump -i eth0 -n port 5060 or portrange 10000-20000
```

## Issue 1: One-Way Audio or No Audio

### Symptoms
- Call connects but no audio heard
- Audio works in one direction only
- Caller hears nothing, or callee hears nothing

### Root Causes
1. **Security Group blocking RTP ports**
2. **NAT configuration incorrect in pjsip.conf**
3. **Firewall blocking UDP traffic**
4. **Codec mismatch**

### Diagnostic Steps

```bash
# 1. Verify Security Group allows RTP
aws ec2 describe-security-groups \
  --group-ids $SG_ID \
  --query 'SecurityGroups[0].IpPermissions[?FromPort>=`10000` && ToPort<=`20000`]'

# 2. Check if RTP packets are flowing
sudo tcpdump -i eth0 -n udp portrange 10000-20000 -c 20

# 3. Verify NAT configuration
sudo asterisk -rx "pjsip show transport transport-tcp"

# 4. Check codec negotiation
sudo asterisk -rx "pjsip show endpoints" | grep -A 5 "elevenlabs"
```

### Solutions

**Solution A: Fix Security Group Rules**
```bash
# Add RTP rule if missing
aws ec2 authorize-security-group-ingress \
  --group-id $SG_ID \
  --protocol udp \
  --port 10000-20000 \
  --cidr 0.0.0.0/0 \
  --region $AWS_REGION
```

**Solution B: Fix NAT Configuration**

Edit `/etc/asterisk/pjsip.conf`:
```ini
[transport-tcp]
type=transport
protocol=tcp
bind=0.0.0.0:5060
external_media_address=YOUR_ELASTIC_IP      # Must be public IP
external_signaling_address=YOUR_ELASTIC_IP  # Must be public IP
local_net=PRIVATE_IP/16                     # Your VPC CIDR
```

Reload:
```bash
sudo asterisk -rx "pjsip reload"
```

**Solution C: Verify Codec Support**

Edit `/etc/asterisk/pjsip.conf`:
```ini
[elevenlabs]
type=endpoint
allow=!all,ulaw,alaw  # Ensure these codecs are allowed
```

## Issue 2: SIP Registration Failures

### Symptoms
- `pjsip show endpoints` shows "Unavailable"
- Logs show "401 Unauthorized" or "403 Forbidden"
- Cannot place calls

### Root Causes
1. **Incorrect credentials**
2. **E.164 format issue**
3. **Network connectivity problem**
4. **ElevenLabs server unreachable**

### Diagnostic Steps

```bash
# 1. Check endpoint status
sudo asterisk -rx "pjsip show endpoint elevenlabs"

# 2. Test DNS resolution
dig sip.elevenlabs.io

# 3. Test connectivity
telnet sip.elevenlabs.io 5060

# 4. Check logs for auth errors
sudo grep "401\|403\|Authentication" /var/log/asterisk/full | tail -20

# 5. Verify credentials in Parameter Store
aws ssm get-parameter \
  --name "/$PROJECT_NAME/elevenlabs/phone_e164" \
  --with-decryption \
  --query 'Parameter.Value' \
  --output text
```

### Solutions

**Solution A: Fix Credentials**

Update Parameter Store:
```bash
aws ssm put-parameter \
  --name "/$PROJECT_NAME/elevenlabs/sip_password" \
  --value "NEW_PASSWORD" \
  --type SecureString \
  --overwrite
```

Update pjsip.conf:
```bash
sudo vim /etc/asterisk/pjsip.conf
# Update password in [elevenlabs-auth] section
sudo asterisk -rx "pjsip reload"
```

**Solution B: Verify E.164 Format**

Ensure phone number is in correct format:
- ✅ Correct: `+12025551234`
- ❌ Wrong: `12025551234`, `+1 (202) 555-1234`

**Solution C: Enable Debug Logging**
```bash
sudo asterisk -rx "pjsip set logger on"
sudo tail -f /var/log/asterisk/full | grep -i "elevenlabs"
```

## Issue 3: TCP Transport Not Enabling

### Symptoms
- TCP port 5060 not listening
- Only UDP transport available
- Cannot connect to ElevenLabs TCP endpoint

### Root Cause
TCP binding requires full system reboot, not just Asterisk reload

### Diagnostic Steps

```bash
# Check listening ports
sudo netstat -tulnp | grep 5060

# Expected output:
# tcp  0  0.0.0.0:5060  0.0.0.0:*  LISTEN  asterisk
# udp  0  0.0.0.0:5060  0.0.0.0:*         asterisk
```

### Solution

**Full System Reboot Required**
```bash
# Save any work
sudo sync

# Reboot instance
sudo reboot

# After reboot, verify
sudo netstat -tulnp | grep 5060
```

**Alternative: Force Asterisk Restart**
```bash
sudo systemctl stop asterisk
sleep 5
sudo systemctl start asterisk
sudo netstat -tulnp | grep 5060
```

## Issue 4: High CPU Usage

### Symptoms
- CPU utilization > 80%
- CloudWatch alarm triggered
- Asterisk becomes unresponsive

### Root Causes
1. **SIP attack (brute force)**
2. **Too many concurrent calls**
3. **Codec transcoding overhead**
4. **Resource leak**

### Diagnostic Steps

```bash
# 1. Check CPU usage
top -b -n 1 | head -20

# 2. Check number of active channels
sudo asterisk -rx "core show channels"

# 3. Check for attack patterns
sudo grep "Failed to authenticate" /var/log/asterisk/full | wc -l

# 4. Check Fail2Ban status
sudo fail2ban-client status asterisk

# 5. Memory usage
free -h
```

### Solutions

**Solution A: Enable/Verify Fail2Ban**
```bash
# Check Fail2Ban status
sudo systemctl status fail2ban

# View banned IPs
sudo fail2ban-client status asterisk

# Manually ban IP
sudo fail2ban-client set asterisk banip ATTACKER_IP
```

**Solution B: Optimize Asterisk Configuration**

Reduce unnecessary logging:
```bash
sudo vim /etc/asterisk/logger.conf
```
```ini
[logfiles]
console => notice,warning,error  # Remove verbose,debug
messages => notice,warning,error
full => notice,warning,error     # Remove verbose
```

**Solution C: Limit Concurrent Calls**

Edit `/etc/asterisk/pjsip.conf`:
```ini
[elevenlabs]
type=endpoint
max_audio_streams=10  # Limit concurrent calls
```

**Solution D: Disable Direct Media**
```ini
[elevenlabs]
type=endpoint
direct_media=no  # Force RTP through Asterisk (better for NAT)
```

## Issue 5: RTP Port Exhaustion

### Symptoms
- Calls fail after specific number of concurrent calls
- "No RTP ports available" in logs
- New calls cannot establish audio

### Root Cause
Default RTP port range too small for concurrent call volume

### Solution

Edit `/etc/asterisk/rtp.conf`:
```ini
[general]
rtpstart=10000
rtpend=20000  # Increase from default 10000-10100
```

Update Security Group:
```bash
# Update to match new range
aws ec2 authorize-security-group-ingress \
  --group-id $SG_ID \
  --protocol udp \
  --port 10000-20000 \
  --cidr 0.0.0.0/0
```

Reload Asterisk:
```bash
sudo systemctl restart asterisk
```

## Issue 6: Elastic IP Not Persisting After Reboot

### Symptoms
- After instance reboot, Elastic IP not associated
- SIP endpoint unreachable
- Public IP changed

### Root Cause
Elastic IP association lost during stop/start cycle

### Solution

**Automatic Re-association Script**

Create `/usr/local/bin/associate-eip.sh`:
```bash
#!/bin/bash
INSTANCE_ID=$(ec2-metadata --instance-id | cut -d " " -f 2)
AWS_REGION=$(ec2-metadata --availability-zone | cut -d " " -f 2 | sed 's/[a-z]$//')

# Get Elastic IP allocation ID
ALLOCATION_ID=$(aws ec2 describe-addresses \
  --region $AWS_REGION \
  --filters "Name=tag:Project,Values=$PROJECT_NAME" \
  --query 'Addresses[0].AllocationId' \
  --output text)

# Associate EIP
aws ec2 associate-address \
  --instance-id $INSTANCE_ID \
  --allocation-id $ALLOCATION_ID \
  --region $AWS_REGION
```

Add to systemd:
```bash
sudo cat > /etc/systemd/system/associate-eip.service <<EOF
[Unit]
Description=Associate Elastic IP on boot
After=network.target

[Service]
Type=oneshot
ExecStart=/usr/local/bin/associate-eip.sh
RemainAfterExit=yes

[Install]
WantedBy=multi-user.target
EOF

sudo systemctl enable associate-eip.service
```

## Issue 7: CloudWatch Logs Not Appearing

### Symptoms
- Log group exists but no streams
- Metrics not visible in CloudWatch
- Alarms not triggering

### Diagnostic Steps

```bash
# 1. Check CloudWatch agent status
sudo systemctl status amazon-cloudwatch-agent

# 2. View agent logs
sudo cat /opt/aws/amazon-cloudwatch-agent/logs/amazon-cloudwatch-agent.log

# 3. Verify IAM permissions
aws iam get-role-policy \
  --role-name $PROJECT_NAME-asterisk-role \
  --policy-name $PROJECT_NAME-asterisk-policy
```

### Solution

**Restart CloudWatch Agent**
```bash
sudo systemctl restart amazon-cloudwatch-agent
sudo systemctl status amazon-cloudwatch-agent
```

**Verify Log Group Permissions**
```bash
# Check if log group exists
aws logs describe-log-groups \
  --log-group-name-prefix "/aws/ec2/$PROJECT_NAME"

# Create if missing
aws logs create-log-group \
  --log-group-name "/aws/ec2/$PROJECT_NAME/asterisk"
```

## Issue 8: Call Quality Issues (Choppy Audio, Dropouts)

### Symptoms
- Audio cuts in and out
- Robotic/choppy voice
- High latency

### Root Causes
1. **Network congestion**
2. **Insufficient bandwidth**
3. **Packet loss**
4. **Jitter**

### Diagnostic Steps

```bash
# 1. Check network statistics
sudo asterisk -rx "pjsip show channelstats"

# 2. Monitor RTP packets
sudo tcpdump -i eth0 -n udp portrange 10000-20000 -vv

# 3. Check system performance
vmstat 1 10
iostat -x 1 10

# 4. Verify codec
sudo asterisk -rx "core show channels verbose"
```

### Solutions

**Solution A: QoS Configuration**

Enable QoS on instance:
```bash
# Set TOS bits for RTP
sudo sysctl -w net.ipv4.ip_default_ttl=64
sudo sysctl -w net.ipv4.tcp_mtu_probing=1
```

**Solution B: Adjust Jitter Buffer**

Edit `/etc/asterisk/pjsip.conf`:
```ini
[elevenlabs]
type=endpoint
use_ptime=yes
allow=!all,ulaw  # Use only ulaw for consistency
```

**Solution C: Increase Instance Size**

If CPU > 60% during calls:
```bash
# Stop instance
aws ec2 stop-instances --instance-ids $INSTANCE_ID

# Change instance type
aws ec2 modify-instance-attribute \
  --instance-id $INSTANCE_ID \
  --instance-type t3.large

# Start instance
aws ec2 start-instances --instance-ids $INSTANCE_ID
```

## Issue 9: Asterisk Won't Start After Reboot

### Symptoms
- `systemctl status asterisk` shows failed
- No Asterisk processes running
- Cannot access Asterisk CLI

### Diagnostic Steps

```bash
# 1. Check service status
sudo systemctl status asterisk -l

# 2. View Asterisk logs
sudo cat /var/log/asterisk/full | tail -50

# 3. Check for configuration errors
sudo asterisk -cvvvvv
# Look for error messages during startup

# 4. Verify file permissions
ls -la /etc/asterisk/
ls -la /var/lib/asterisk/
```

### Solutions

**Solution A: Fix Configuration Errors**
```bash
# Test configuration
sudo asterisk -cvvvvv

# If syntax errors, fix them
sudo vim /etc/asterisk/pjsip.conf

# Restart
sudo systemctl restart asterisk
```

**Solution B: Fix Permissions**
```bash
sudo chown -R asterisk:asterisk /etc/asterisk
sudo chown -R asterisk:asterisk /var/{lib,log,spool}/asterisk
sudo systemctl restart asterisk
```

**Solution C: Rebuild Asterisk**
```bash
# If all else fails, reinstall
cd /usr/src/asterisk-*
sudo make uninstall
sudo make install
sudo systemctl restart asterisk
```

## Emergency Procedures

### Complete System Reset

```bash
# 1. Stop Asterisk
sudo systemctl stop asterisk

# 2. Backup current configuration
sudo tar -czf /tmp/asterisk-backup-$(date +%Y%m%d).tar.gz /etc/asterisk/

# 3. Restore known-good configuration
sudo tar -xzf /path/to/backup.tar.gz -C /

# 4. Start Asterisk
sudo systemctl start asterisk
```

### Quick Health Check Script

Save as `/usr/local/bin/asterisk-health.sh`:
```bash
#!/bin/bash

echo "=== Asterisk Health Check ==="
echo "Service Status:"
systemctl is-active asterisk

echo -e "\nEndpoint Status:"
asterisk -rx "pjsip show endpoints" | grep elevenlabs

echo -e "\nActive Channels:"
asterisk -rx "core show channels" | tail -1

echo -e "\nCPU/Memory:"
top -bn1 | grep "Cpu\|asterisk" | head -2

echo -e "\nRTP Ports:"
netstat -an | grep -E ":(1[0-9]{4}|20000)" | wc -l

echo -e "\nRecent Errors:"
tail -20 /var/log/asterisk/full | grep ERROR
```

## Getting Help

### Collect Diagnostic Information

Before seeking help, collect:
```bash
#!/bin/bash
# diagnostic-report.sh

REPORT_DIR="/tmp/asterisk-diagnostics-$(date +%Y%m%d-%H%M%S)"
mkdir -p $REPORT_DIR

# System info
uname -a > $REPORT_DIR/system-info.txt
free -h >> $REPORT_DIR/system-info.txt
df -h >> $REPORT_DIR/system-info.txt

# Asterisk status
sudo asterisk -rx "core show version" > $REPORT_DIR/asterisk-version.txt
sudo asterisk -rx "pjsip show endpoints" > $REPORT_DIR/endpoints.txt
sudo asterisk -rx "pjsip show transports" > $REPORT_DIR/transports.txt

# Logs
sudo tail -1000 /var/log/asterisk/full > $REPORT_DIR/asterisk-full.log
sudo tail -1000 /var/log/asterisk/messages > $REPORT_DIR/asterisk-messages.log

# Configuration
sudo cp /etc/asterisk/pjsip.conf $REPORT_DIR/
sudo cp /etc/asterisk/extensions.conf $REPORT_DIR/
sudo cp /etc/asterisk/rtp.conf $REPORT_DIR/

# Network
sudo netstat -tulnp | grep asterisk > $REPORT_DIR/network.txt
ip addr > $REPORT_DIR/ip-config.txt

# Create archive
tar -czf $REPORT_DIR.tar.gz $REPORT_DIR
echo "Diagnostic report created: $REPORT_DIR.tar.gz"
```

### Community Resources

- **Asterisk Community Forum**: https://community.asterisk.org/
- **ElevenLabs Support**: https://help.elevenlabs.io/
- **AWS Support**: https://aws.amazon.com/premiumsupport/
- **Project Issues**: File issue in project repository

## Performance Tuning

### Optimize for High Call Volume

```ini
# /etc/asterisk/pjsip.conf
[global]
max_forwards=20  # Reduce from default 70
timer_t1=500     # SIP timer (default)
timer_b=32000    # Transaction timeout

[transport-tcp]
type=transport
async_operations=10  # Increase for high concurrency
```

### Kernel Tuning

```bash
# /etc/sysctl.conf
net.ipv4.ip_local_port_range = 10000 65535
net.core.rmem_default = 262144
net.core.rmem_max = 16777216
net.core.wmem_default = 262144
net.core.wmem_max = 16777216

# Apply
sudo sysctl -p
```

---

**Last Updated**: 2025-10-01
**Version**: 1.0.0

```

--------------------------------------------------------------------------------
/data/prompts/templates/mcp-resources-integration.json:
--------------------------------------------------------------------------------

```json
{
  "id": "mcp-resources-integration",
  "name": "MCP Resources Integration Guide",
  "description": "A comprehensive guide to working with and integrating resources across multiple MCP servers",
  "content": "# MCP Resources Integration Guide\\n\\nYou are an expert on the Model Context Protocol (MCP) ecosystem, specializing in resource integration across multiple MCP servers. Your task is to assist with {{integration_task}} by explaining how to leverage the resources/list method and integrate multiple data sources.\\n\\n## Understanding MCP Resources\\n\\nResources in the MCP ecosystem are named data objects that can be referenced and accessed across different MCP servers. They enable:\\n\\n1. **Cross-server data access**: Retrieving and using data from multiple specialized servers\\n2. **Contextual enrichment**: Adding relevant information to prompt templates\\n3. **Dynamic content generation**: Creating outputs based on up-to-date information\\n4. **Workflow orchestration**: Coordinating complex operations involving multiple data sources\\n\\n## The `resources/list` Method\\n\\nThe `resources/list` method is a powerful capability that enables discovery and exploration of available contextual data sources. It can be used to:\\n\\n- **Discover available resources**: List all accessible data sources across connected MCP servers\\n- **Filter resources by type**: Find specific kinds of resources (files, database records, API results)\\n- **Explore metadata**: View descriptions, timestamps, and other metadata about available resources\\n- **Support dynamic workflows**: Enable applications to adapt based on available context\\n\\n### Basic Usage\\n\\n```\\n// Example: Listing all available resources\\n{\\n  \\\\\\\"method\\\\\\\": \\\\\\\"resources/list\\\\\\\",\\n  \\\\\\\"params\\\\\\\": {}\\n}\\n\\n// Example: Filtering resources by prefix\\n{\\n  \\\\\\\"method\\\\\\\": \\\\\\\"resources/list\\\\\\\",\\n  \\\\\\\"params\\\\\\\": {\\n    \\\\\\\"prefix\\\\\\\": \\\\\\\"github://\\\\\\\"\\n  }\\n}\\n```\\n\\n## Integrating Resources from Different MCP Servers\\n\\n### Available Resource Types by Server\\n\\n| Server Type | Resource Prefix | Example URI | Description |\\n|-------------|----------------|-------------|-------------|\\n| GitHub | github:// | github://owner/repo/path/to/file | Repository files and metadata |\\n| Filesystem | file:// | file:///path/to/local/file | Local file system access |\\n| PostgreSQL | postgres:// | postgres://database/table/record | Database records and query results |\\n| Memory | memory:// | memory://session/key | Stored session context |\\n| Web | https:// | https://api.example.com/data | Web content and API responses |\\n| {{custom_server}} | {{custom_prefix}} | {{custom_example}} | {{custom_description}} |\\n\\n### Resource Integration Patterns\\n\\n#### 1. Chain of Resources Pattern\\nConnect multiple resources sequentially, where the output of one resource operation becomes the input for the next:\\n\\n```\\n// Step 1: Retrieve configuration from GitHub\\nconst config = await getResource('github://org/repo/config.json');\\n\\n// Step 2: Use config to query database\\nconst queryResults = await getResource(`postgres://database/table?query=${config.queryParams}`);\\n\\n// Step 3: Process results and store in memory\\nawait setResource('memory://session/processed_data', processData(queryResults));\\n```\\n\\n#### 2. Aggregation Pattern\\nCombine data from multiple resources to create a comprehensive context:\\n\\n```\\n// Collect data from multiple sources\\nconst codebase = await getResource('github://org/repo/src');\\nconst documentation = await getResource('file:///local/docs');\\nconst issueTracking = await getResource('https://issues.example.com/api/project');\\n\\n// Combine into unified context\\nconst projectContext = {\\n  code: codebase,\\n  docs: documentation,\\n  issues: issueTracking\\n};\\n```\\n\\n#### 3. Template Enrichment Pattern\\nUse resources to populate template variables dynamically:\\n\\n```\\n// Retrieve template\\nconst template = await getResource('prompts://templates/analysis');\\n\\n// Gather contextual data\\nconst repoStats = await getResource('github://org/repo/stats');\\nconst performanceData = await getResource('postgres://metrics/performance');\\n\\n// Apply template with resource data\\nconst enrichedPrompt = applyTemplate(template, {\\n  project_metrics: repoStats,\\n  performance_insights: performanceData\\n});\\n```\\n\\n## Implementation Guidelines for {{integration_task}}\\n\\n### Step 1: Resource Discovery\\nFirst, use the resources/list method to discover what data sources are available:\\n\\n```javascript\\n// Example resources/list implementation\\nasync function discoverResources() {\\n  const resources = await callMCP({\\n    method: 'resources/list',\\n    params: {}\\n  });\\n  \\n  console.log('Available resources:', resources);\\n  return resources;\\n}\\n```\\n\\n### Step 2: Resource Access Patterns\\nImplement standardized patterns for accessing different resource types:\\n\\n```javascript\\n// Example resource access function\\nasync function getResource(uri) {\\n  const serverType = getServerTypeFromUri(uri);\\n  \\n  const response = await callMCP({\\n    server: serverType,\\n    method: 'resources/get',\\n    params: { uri }\\n  });\\n  \\n  return response.data;\\n}\\n```\\n\\n### Step 3: Resource Integration\\nCombine resources using the appropriate integration pattern for your use case:\\n\\n{{integration_code}}\\n\\n### Step 4: Error Handling and Fallbacks\\nImplement robust error handling for cases where resources may be unavailable:\\n\\n```javascript\\ntry {\\n  const resource = await getResource('github://org/repo/file.json');\\n  // Process resource\\n} catch (error) {\\n  console.error('Error accessing resource:', error);\\n  // Use fallback resource or strategy\\n  const fallbackResource = await getResource('file:///local/fallback.json');\\n}\\n```\\n\\n## Best Practices for Resource Integration\\n\\n1. **Cache appropriately**: Some resources may be expensive to fetch repeatedly\\n2. **Handle failures gracefully**: Use fallbacks when resources are unavailable\\n3. **Consider resource formats**: Different servers may return different data structures\\n4. **Manage dependencies**: Be mindful of resource dependencies and potential circular references\\n5. **Document resource usage**: Make resource URIs and usage patterns explicit\\n6. **Security awareness**: Consider access control implications when sharing resources\\n{{additional_practices}}\\n\\n## Implementation Examples for Common Scenarios\\n\\n### Example 1: Project Analysis Dashboard\\nCombine code repository statistics, issue tracking, and documentation:\\n\\n```javascript\\nasync function buildProjectDashboard() {\\n  // Discover available resources\\n  const resources = await discoverResources();\\n  \\n  // Check if required resources are available\\n  const hasGitHub = resources.some(r => r.startsWith('github://'));\\n  const hasIssues = resources.some(r => r.startsWith('https://issues.'));\\n  \\n  // Gather data from available sources\\n  const repoData = hasGitHub ? \\n    await getResource('github://org/project/stats') : \\n    { error: 'GitHub data unavailable' };\\n    \\n  const issueData = hasIssues ?\\n    await getResource('https://issues.example.com/api/project/stats') :\\n    { error: 'Issue tracker unavailable' };\\n    \\n  // Combine into unified dashboard data\\n  return {\\n    code_metrics: repoData,\\n    issue_metrics: issueData,\\n    timestamp: new Date().toISOString()\\n  };\\n}\\n```\\n\\n### Example 2: Dynamic Document Generation\\nGenerate documentation by combining templates with real-time data:\\n\\n```javascript\\nasync function generateDocumentation() {\\n  // Get document template\\n  const template = await getResource('prompts://templates/documentation');\\n  \\n  // Gather data from multiple sources\\n  const apiSchema = await getResource('file:///api/schema.json');\\n  const usageStats = await getResource('postgres://analytics/api_usage');\\n  const exampleCode = await getResource('github://org/examples/api');\\n  \\n  // Generate documentation\\n  return applyTemplate(template, {\\n    schema: apiSchema,\\n    usage: usageStats,\\n    examples: exampleCode\\n  });\\n}\\n```\\n\\n### Example 3: {{custom_example_name}}\\n{{custom_example_description}}\\n\\n```javascript\\n{{custom_example_code}}\\n```\\n\\n## Resources/List Method in Action\\n\\nThe resources/list method serves multiple important functions:\\n\\n1. **Discovery and Exploration**: Clients can discover what contextual resources are available\\n2. **Workflow Orchestration**: Automated workflows can determine which resources to use\\n3. **Enhanced UI/UX**: User interfaces can show available resources for selection\\n4. **Integration with External Services**: Bridge between clients and external data sources\\n\\nExample implementation of a resource explorer using resources/list:\\n\\n```javascript\\nasync function exploreResources(prefix = '') {\\n  const resources = await callMCP({\\n    method: 'resources/list',\\n    params: { prefix }\\n  });\\n  \\n  // Group resources by type\\n  const resourcesByType = resources.reduce((groups, uri) => {\\n    const type = uri.split('://')[0];\\n    if (!groups[type]) groups[type] = [];\\n    groups[type].push(uri);\\n    return groups;\\n  }, {});\\n  \\n  // Display available resources by type\\n  for (const [type, uris] of Object.entries(resourcesByType)) {\\n    console.log(`${type} resources (${uris.length}):`);\\n    uris.forEach(uri => console.log(`  - ${uri}`));\\n  }\\n  \\n  return resourcesByType;\\n}\\n```\\n\\n## Conclusion\\n\\nEffective integration of resources across MCP servers is a powerful pattern that enables complex workflows, rich contextual awareness, and dynamic content generation. By understanding the resources/list method and implementing appropriate integration patterns, you can leverage the full potential of the MCP ecosystem for {{integration_task}}.\\n\\nWhat specific aspect of MCP resource integration would you like to explore further?\\\",\\n  \\\"isTemplate\\\": true,\\n  \\\"variables\\\": [\\n    \\\"integration_task\\\",\\n    \\\"custom_server\\\",\\n    \\\"custom_prefix\\\",\\n    \\\"custom_example\\\",\\n    \\\"custom_description\\\",\\n    \\\"integration_code\\\",\\n    \\\"additional_practices\\\",\\n    \\\"custom_example_name\\\",\\n    \\\"custom_example_description\\\",\\n    \\\"custom_example_code\\\"\\n  ],\\n  \\\"tags\\\": [\\n    \\\"mcp\\\",\\n    \\\"resources\\\",\\n    \\\"integration\\\",\\n    \\\"advanced\\\",\\n    \\\"multi-server\\\",\\n    \\\"template\\\"\\n  ],\\n  \\\"createdAt\\\": \\\"2025-03-15T16:00:00.000Z\\\",\\n  \\\"updatedAt\\\": \\\"2025-03-15T16:00:00.000Z\\\",\\n  \\\"version\\\": 1,\\n  \\\"metadata\\\": {\\n    \\\"recommended_servers\\\": [\\n      \\\"github\\\",\\n      \\\"filesystem\\\",\\n      \\\"postgres\\\",\\n      \\\"memory\\\",\\n      \\\"prompts\\\"\\n    ],\\n    \\\"example_variables\\\": {\\n      \\\"integration_task\\\": \\\"building a comprehensive project analysis tool\\\",\\n      \\\"custom_server\\\": \\\"TimeSeries\\\",\\n      \\\"custom_prefix\\\": \\\"timeseries://\\\",\\n      \\\"custom_example\\\": \\\"timeseries://metrics/cpu-usage/7d\\\",\\n      \\\"custom_description\\\": \\\"Historical time-series data for metrics and monitoring\\\",\\n      \\\"integration_code\\\": \\\"async function integrateProjectAnalysis() {\\\\n  // Get repository information\\\\n  const repoInfo = await getResource('github://org/repo/info');\\\\n  \\\\n  // Fetch relevant code files based on repo structure\\\\n  const codeFiles = await Promise.all(\\\\n    repoInfo.main_modules.map(module => \\\\n      getResource(`github://org/repo/src/${module}`)\\\\n    )\\\\n  );\\\\n  \\\\n  // Get database schema information\\\\n  const dbSchema = await getResource('postgres://database/information_schema');\\\\n  \\\\n  // Combine everything into a unified context\\\\n  const projectContext = {\\\\n    repository: repoInfo,\\\\n    code_modules: codeFiles,\\\\n    database_structure: dbSchema,\\\\n    analysis_timestamp: new Date().toISOString()\\\\n  };\\\\n  \\\\n  // Store the combined context in memory for future reference\\\\n  await setResource('memory://session/project_context', projectContext);\\\\n  \\\\n  return projectContext;\\\\n}\\\",\\n      \\\"additional_practices\\\": \\\"7. **Version awareness**: Consider resource version compatibility\\\\n8. **Performance tracking**: Monitor resource access patterns and optimize frequent operations\\\\n9. **Scope limitation**: Only access resources directly relevant to the current task\\\\n10. **Progressive enhancement**: Design systems that work with minimal resources but enhance capabilities when more are available\\\",\\n      \\\"custom_example_name\\\": \\\"Cross-Server Data Validation\\\",\\n      \\\"custom_example_description\\\": \\\"Validate data consistency across different storage systems by comparing repositories, databases, and local files:\\\",\\n      \\\"custom_example_code\\\": \\\"async function validateDataConsistency() {\\\\n  // Get configuration schema from repository\\\\n  const configSchema = await getResource('github://org/repo/schema/config.json');\\\\n  \\\\n  // Get actual configurations from database\\\\n  const dbConfigs = await getResource('postgres://app/configurations');\\\\n  \\\\n  // Get local configuration files\\\\n  const localConfigs = await getResource('file:///app/config/');\\\\n  \\\\n  // Compare configurations across systems\\\\n  const validationResults = {\\\\n    schema_valid: validateAgainstSchema(dbConfigs, configSchema),\\\\n    db_local_match: compareConfigurations(dbConfigs, localConfigs),\\\\n    mismatches: findMismatches(dbConfigs, localConfigs, configSchema)\\\\n  };\\\\n  \\\\n  // Store validation results in memory\\\\n  await setResource('memory://validation/config_results', validationResults);\\\\n  \\\\n  return validationResults;\\\\n}"
}
```

--------------------------------------------------------------------------------
/src/mcp_project_orchestrator/prompts/MCP_Resources_Integration_Guide.json:
--------------------------------------------------------------------------------

```json
{
  "name": "MCP Resources Integration Guide",
  "description": "A comprehensive guide to working with and integrating resources across multiple MCP servers",
  "type": "prompt",
  "category": "other",
  "content": "# MCP Resources Integration Guide\\n\\nYou are an expert on the Model Context Protocol (MCP) ecosystem, specializing in resource integration across multiple MCP servers. Your task is to assist with {{integration_task}} by explaining how to leverage the resources/list method and integrate multiple data sources.\\n\\n## Understanding MCP Resources\\n\\nResources in the MCP ecosystem are named data objects that can be referenced and accessed across different MCP servers. They enable:\\n\\n1. **Cross-server data access**: Retrieving and using data from multiple specialized servers\\n2. **Contextual enrichment**: Adding relevant information to prompt templates\\n3. **Dynamic content generation**: Creating outputs based on up-to-date information\\n4. **Workflow orchestration**: Coordinating complex operations involving multiple data sources\\n\\n## The `resources/list` Method\\n\\nThe `resources/list` method is a powerful capability that enables discovery and exploration of available contextual data sources. It can be used to:\\n\\n- **Discover available resources**: List all accessible data sources across connected MCP servers\\n- **Filter resources by type**: Find specific kinds of resources (files, database records, API results)\\n- **Explore metadata**: View descriptions, timestamps, and other metadata about available resources\\n- **Support dynamic workflows**: Enable applications to adapt based on available context\\n\\n### Basic Usage\\n\\n```\\n// Example: Listing all available resources\\n{\\n  \\\\\\\"method\\\\\\\": \\\\\\\"resources/list\\\\\\\",\\n  \\\\\\\"params\\\\\\\": {}\\n}\\n\\n// Example: Filtering resources by prefix\\n{\\n  \\\\\\\"method\\\\\\\": \\\\\\\"resources/list\\\\\\\",\\n  \\\\\\\"params\\\\\\\": {\\n    \\\\\\\"prefix\\\\\\\": \\\\\\\"github://\\\\\\\"\\n  }\\n}\\n```\\n\\n## Integrating Resources from Different MCP Servers\\n\\n### Available Resource Types by Server\\n\\n| Server Type | Resource Prefix | Example URI | Description |\\n|-------------|----------------|-------------|-------------|\\n| GitHub | github:// | github://owner/repo/path/to/file | Repository files and metadata |\\n| Filesystem | file:// | file:///path/to/local/file | Local file system access |\\n| PostgreSQL | postgres:// | postgres://database/table/record | Database records and query results |\\n| Memory | memory:// | memory://session/key | Stored session context |\\n| Web | https:// | https://api.example.com/data | Web content and API responses |\\n| {{custom_server}} | {{custom_prefix}} | {{custom_example}} | {{custom_description}} |\\n\\n### Resource Integration Patterns\\n\\n#### 1. Chain of Resources Pattern\\nConnect multiple resources sequentially, where the output of one resource operation becomes the input for the next:\\n\\n```\\n// Step 1: Retrieve configuration from GitHub\\nconst config = await getResource('github://org/repo/config.json');\\n\\n// Step 2: Use config to query database\\nconst queryResults = await getResource(`postgres://database/table?query=${config.queryParams}`);\\n\\n// Step 3: Process results and store in memory\\nawait setResource('memory://session/processed_data', processData(queryResults));\\n```\\n\\n#### 2. Aggregation Pattern\\nCombine data from multiple resources to create a comprehensive context:\\n\\n```\\n// Collect data from multiple sources\\nconst codebase = await getResource('github://org/repo/src');\\nconst documentation = await getResource('file:///local/docs');\\nconst issueTracking = await getResource('https://issues.example.com/api/project');\\n\\n// Combine into unified context\\nconst projectContext = {\\n  code: codebase,\\n  docs: documentation,\\n  issues: issueTracking\\n};\\n```\\n\\n#### 3. Template Enrichment Pattern\\nUse resources to populate template variables dynamically:\\n\\n```\\n// Retrieve template\\nconst template = await getResource('prompts://templates/analysis');\\n\\n// Gather contextual data\\nconst repoStats = await getResource('github://org/repo/stats');\\nconst performanceData = await getResource('postgres://metrics/performance');\\n\\n// Apply template with resource data\\nconst enrichedPrompt = applyTemplate(template, {\\n  project_metrics: repoStats,\\n  performance_insights: performanceData\\n});\\n```\\n\\n## Implementation Guidelines for {{integration_task}}\\n\\n### Step 1: Resource Discovery\\nFirst, use the resources/list method to discover what data sources are available:\\n\\n```javascript\\n// Example resources/list implementation\\nasync function discoverResources() {\\n  const resources = await callMCP({\\n    method: 'resources/list',\\n    params: {}\\n  });\\n  \\n  console.log('Available resources:', resources);\\n  return resources;\\n}\\n```\\n\\n### Step 2: Resource Access Patterns\\nImplement standardized patterns for accessing different resource types:\\n\\n```javascript\\n// Example resource access function\\nasync function getResource(uri) {\\n  const serverType = getServerTypeFromUri(uri);\\n  \\n  const response = await callMCP({\\n    server: serverType,\\n    method: 'resources/get',\\n    params: { uri }\\n  });\\n  \\n  return response.data;\\n}\\n```\\n\\n### Step 3: Resource Integration\\nCombine resources using the appropriate integration pattern for your use case:\\n\\n{{integration_code}}\\n\\n### Step 4: Error Handling and Fallbacks\\nImplement robust error handling for cases where resources may be unavailable:\\n\\n```javascript\\ntry {\\n  const resource = await getResource('github://org/repo/file.json');\\n  // Process resource\\n} catch (error) {\\n  console.error('Error accessing resource:', error);\\n  // Use fallback resource or strategy\\n  const fallbackResource = await getResource('file:///local/fallback.json');\\n}\\n```\\n\\n## Best Practices for Resource Integration\\n\\n1. **Cache appropriately**: Some resources may be expensive to fetch repeatedly\\n2. **Handle failures gracefully**: Use fallbacks when resources are unavailable\\n3. **Consider resource formats**: Different servers may return different data structures\\n4. **Manage dependencies**: Be mindful of resource dependencies and potential circular references\\n5. **Document resource usage**: Make resource URIs and usage patterns explicit\\n6. **Security awareness**: Consider access control implications when sharing resources\\n{{additional_practices}}\\n\\n## Implementation Examples for Common Scenarios\\n\\n### Example 1: Project Analysis Dashboard\\nCombine code repository statistics, issue tracking, and documentation:\\n\\n```javascript\\nasync function buildProjectDashboard() {\\n  // Discover available resources\\n  const resources = await discoverResources();\\n  \\n  // Check if required resources are available\\n  const hasGitHub = resources.some(r => r.startsWith('github://'));\\n  const hasIssues = resources.some(r => r.startsWith('https://issues.'));\\n  \\n  // Gather data from available sources\\n  const repoData = hasGitHub ? \\n    await getResource('github://org/project/stats') : \\n    { error: 'GitHub data unavailable' };\\n    \\n  const issueData = hasIssues ?\\n    await getResource('https://issues.example.com/api/project/stats') :\\n    { error: 'Issue tracker unavailable' };\\n    \\n  // Combine into unified dashboard data\\n  return {\\n    code_metrics: repoData,\\n    issue_metrics: issueData,\\n    timestamp: new Date().toISOString()\\n  };\\n}\\n```\\n\\n### Example 2: Dynamic Document Generation\\nGenerate documentation by combining templates with real-time data:\\n\\n```javascript\\nasync function generateDocumentation() {\\n  // Get document template\\n  const template = await getResource('prompts://templates/documentation');\\n  \\n  // Gather data from multiple sources\\n  const apiSchema = await getResource('file:///api/schema.json');\\n  const usageStats = await getResource('postgres://analytics/api_usage');\\n  const exampleCode = await getResource('github://org/examples/api');\\n  \\n  // Generate documentation\\n  return applyTemplate(template, {\\n    schema: apiSchema,\\n    usage: usageStats,\\n    examples: exampleCode\\n  });\\n}\\n```\\n\\n### Example 3: {{custom_example_name}}\\n{{custom_example_description}}\\n\\n```javascript\\n{{custom_example_code}}\\n```\\n\\n## Resources/List Method in Action\\n\\nThe resources/list method serves multiple important functions:\\n\\n1. **Discovery and Exploration**: Clients can discover what contextual resources are available\\n2. **Workflow Orchestration**: Automated workflows can determine which resources to use\\n3. **Enhanced UI/UX**: User interfaces can show available resources for selection\\n4. **Integration with External Services**: Bridge between clients and external data sources\\n\\nExample implementation of a resource explorer using resources/list:\\n\\n```javascript\\nasync function exploreResources(prefix = '') {\\n  const resources = await callMCP({\\n    method: 'resources/list',\\n    params: { prefix }\\n  });\\n  \\n  // Group resources by type\\n  const resourcesByType = resources.reduce((groups, uri) => {\\n    const type = uri.split('://')[0];\\n    if (!groups[type]) groups[type] = [];\\n    groups[type].push(uri);\\n    return groups;\\n  }, {});\\n  \\n  // Display available resources by type\\n  for (const [type, uris] of Object.entries(resourcesByType)) {\\n    console.log(`${type} resources (${uris.length}):`);\\n    uris.forEach(uri => console.log(`  - ${uri}`));\\n  }\\n  \\n  return resourcesByType;\\n}\\n```\\n\\n## Conclusion\\n\\nEffective integration of resources across MCP servers is a powerful pattern that enables complex workflows, rich contextual awareness, and dynamic content generation. By understanding the resources/list method and implementing appropriate integration patterns, you can leverage the full potential of the MCP ecosystem for {{integration_task}}.\\n\\nWhat specific aspect of MCP resource integration would you like to explore further?\\\",\\n  \\\"isTemplate\\\": true,\\n  \\\"variables\\\": [\\n    \\\"integration_task\\\",\\n    \\\"custom_server\\\",\\n    \\\"custom_prefix\\\",\\n    \\\"custom_example\\\",\\n    \\\"custom_description\\\",\\n    \\\"integration_code\\\",\\n    \\\"additional_practices\\\",\\n    \\\"custom_example_name\\\",\\n    \\\"custom_example_description\\\",\\n    \\\"custom_example_code\\\"\\n  ],\\n  \\\"tags\\\": [\\n    \\\"mcp\\\",\\n    \\\"resources\\\",\\n    \\\"integration\\\",\\n    \\\"advanced\\\",\\n    \\\"multi-server\\\",\\n    \\\"template\\\"\\n  ],\\n  \\\"createdAt\\\": \\\"2025-03-15T16:00:00.000Z\\\",\\n  \\\"updatedAt\\\": \\\"2025-03-15T16:00:00.000Z\\\",\\n  \\\"version\\\": 1,\\n  \\\"metadata\\\": {\\n    \\\"recommended_servers\\\": [\\n      \\\"github\\\",\\n      \\\"filesystem\\\",\\n      \\\"postgres\\\",\\n      \\\"memory\\\",\\n      \\\"prompts\\\"\\n    ],\\n    \\\"example_variables\\\": {\\n      \\\"integration_task\\\": \\\"building a comprehensive project analysis tool\\\",\\n      \\\"custom_server\\\": \\\"TimeSeries\\\",\\n      \\\"custom_prefix\\\": \\\"timeseries://\\\",\\n      \\\"custom_example\\\": \\\"timeseries://metrics/cpu-usage/7d\\\",\\n      \\\"custom_description\\\": \\\"Historical time-series data for metrics and monitoring\\\",\\n      \\\"integration_code\\\": \\\"async function integrateProjectAnalysis() {\\\\n  // Get repository information\\\\n  const repoInfo = await getResource('github://org/repo/info');\\\\n  \\\\n  // Fetch relevant code files based on repo structure\\\\n  const codeFiles = await Promise.all(\\\\n    repoInfo.main_modules.map(module => \\\\n      getResource(`github://org/repo/src/${module}`)\\\\n    )\\\\n  );\\\\n  \\\\n  // Get database schema information\\\\n  const dbSchema = await getResource('postgres://database/information_schema');\\\\n  \\\\n  // Combine everything into a unified context\\\\n  const projectContext = {\\\\n    repository: repoInfo,\\\\n    code_modules: codeFiles,\\\\n    database_structure: dbSchema,\\\\n    analysis_timestamp: new Date().toISOString()\\\\n  };\\\\n  \\\\n  // Store the combined context in memory for future reference\\\\n  await setResource('memory://session/project_context', projectContext);\\\\n  \\\\n  return projectContext;\\\\n}\\\",\\n      \\\"additional_practices\\\": \\\"7. **Version awareness**: Consider resource version compatibility\\\\n8. **Performance tracking**: Monitor resource access patterns and optimize frequent operations\\\\n9. **Scope limitation**: Only access resources directly relevant to the current task\\\\n10. **Progressive enhancement**: Design systems that work with minimal resources but enhance capabilities when more are available\\\",\\n      \\\"custom_example_name\\\": \\\"Cross-Server Data Validation\\\",\\n      \\\"custom_example_description\\\": \\\"Validate data consistency across different storage systems by comparing repositories, databases, and local files:\\\",\\n      \\\"custom_example_code\\\": \\\"async function validateDataConsistency() {\\\\n  // Get configuration schema from repository\\\\n  const configSchema = await getResource('github://org/repo/schema/config.json');\\\\n  \\\\n  // Get actual configurations from database\\\\n  const dbConfigs = await getResource('postgres://app/configurations');\\\\n  \\\\n  // Get local configuration files\\\\n  const localConfigs = await getResource('file:///app/config/');\\\\n  \\\\n  // Compare configurations across systems\\\\n  const validationResults = {\\\\n    schema_valid: validateAgainstSchema(dbConfigs, configSchema),\\\\n    db_local_match: compareConfigurations(dbConfigs, localConfigs),\\\\n    mismatches: findMismatches(dbConfigs, localConfigs, configSchema)\\\\n  };\\\\n  \\\\n  // Store validation results in memory\\\\n  await setResource('memory://validation/config_results', validationResults);\\\\n  \\\\n  return validationResults;\\\\n}",
  "variables": {},
  "metadata": {
    "source": "/home/sparrow/projects/mcp-prompts/prompts/mcp-resources-integration.json",
    "imported": true
  }
}
```
Page 11/21FirstPrevNextLast