# Directory Structure ``` ├── .gitignore ├── .python-version ├── pyproject.toml ├── README.md ├── src │ └── brev_mcp │ ├── __init__.py │ ├── api.py │ ├── cli.py │ ├── instance_types.py │ ├── models.py │ ├── server.py │ ├── tools.py │ └── workspace.py ├── tests │ └── test.py └── uv.lock ``` # Files -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- ``` 3.10 ``` -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- ``` # Python-generated files __pycache__/ *.py[oc] build/ dist/ wheels/ *.egg-info # Virtual environments .venv ``` -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- ```markdown # Brev MCP server This is a MCP server implementation for Brev. ## Configuration The MCP server uses the Brev CLI's API access token and currently set org. Follow the [Brev documentation](https://www.brev.dev/docs/reference/brev-cli) to download the CLI and login if you haven't already. If you want to switch your Brev org, run `brev set <org-name>` The CLI access token expires every hour. If you have any 403 errors, simply run `brev ls` to refresh the access token. ## Quickstart ### Setup repository locally `git clone [email protected]:brevdev/brev-mcp.git` ### Install uv Follow the [uv installation guide](https://docs.astral.sh/uv/getting-started/installation/) #### Claude Desktop On MacOS: `~/Library/Application\ Support/Claude/claude_desktop_config.json` On Windows: `%APPDATA%/Claude/claude_desktop_config.json` Add the following to your `claude_desktop_config.json`: <details> <summary>Development/Unpublished Servers Configuration</summary> ```json "mcpServers": { "brev_mcp": { "command": "uv", "args": [ "--directory", "<path-to-repo>", "run", "brev-mcp" ] } } ``` </details> ## Development ### Building and Publishing To prepare the package for distribution: 1. Sync dependencies and update lockfile: ```bash uv sync ``` 2. Build package distributions: ```bash uv build ``` This will create source and wheel distributions in the `dist/` directory. 3. Publish to PyPI: ```bash uv publish ``` Note: You'll need to set PyPI credentials via environment variables or command flags: - Token: `--token` or `UV_PUBLISH_TOKEN` - Or username/password: `--username`/`UV_PUBLISH_USERNAME` and `--password`/`UV_PUBLISH_PASSWORD` ### Debugging Since MCP servers run over stdio, debugging can be challenging. For the best debugging experience, we strongly recommend using the [MCP Inspector](https://github.com/modelcontextprotocol/inspector). You can launch the MCP Inspector via [`npm`](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) with this command: ```bash npx @modelcontextprotocol/inspector uv --directory /Users/tmontfort/Brev/repos/brev_mcp run brev-mcp ``` Upon launching, the Inspector will display a URL that you can access in your browser to begin debugging. ``` -------------------------------------------------------------------------------- /src/brev_mcp/__init__.py: -------------------------------------------------------------------------------- ```python from . import server import asyncio def main(): """Main entry point for the package.""" asyncio.run(server.main()) # Optionally expose other important items at package level __all__ = ['main', 'server'] ``` -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- ```toml [project] name = "brev-mcp" version = "0.1.0" description = "A MCP server project" readme = "README.md" requires-python = ">=3.10" dependencies = [ "httpx>=0.28.1", "mcp>=1.1.0", "protobuf>=5.29.1", ] [[project.authors]] name = "tmonty12" email = "[email protected]" [build-system] requires = [ "hatchling",] build-backend = "hatchling.build" [project.scripts] brev-mcp = "brev_mcp:main" ``` -------------------------------------------------------------------------------- /src/brev_mcp/workspace.py: -------------------------------------------------------------------------------- ```python import json from .api import create_workspace from .models import ( CloudProvider, CreateWorkspaceRequest ) async def create_provider_workspace(name: str, cloud_provider: CloudProvider, instance_type: str) -> str: req = CreateWorkspaceRequest( name=name, workspaceGroupId=cloud_provider.get_workspace_group_id(), instanceType=instance_type, ) workspace = await create_workspace(req) return json.dumps(workspace.model_dump(), indent=2) ``` -------------------------------------------------------------------------------- /tests/test.py: -------------------------------------------------------------------------------- ```python from brev_mcp import server async def test_read_resource(): instance_types = await server.read_resource("brev://instance-types/crusoe") print(instance_types) async def test_get_instance_types_tool(): tool_output = await server.call_tool("get_instance_types", {"cloud_provider": "crusoe"}) print(tool_output[0].text) async def test_create_workspace_tool(): tool_output = await server.call_tool("create_workspace", { "name": "test-workspace-2", "cloud_provider": "crusoe", "instance_type": "l40s-48gb.1x" }) print(tool_output[0].text) if __name__ == "__main__": import asyncio asyncio.run(test_get_instance_types_tool()) ``` -------------------------------------------------------------------------------- /src/brev_mcp/cli.py: -------------------------------------------------------------------------------- ```python import os import json from .models import CredentialsFile, ActiveOrgFile CREDENTIALS_FILEPATH = "~/.brev/credentials.json" ACTIVEORG_FILEPATH = "~/.brev/active_org.json" def get_acess_token() -> str: env_token = os.getenv("BREV_API_TOKEN") if env_token: return env_token credentials_path = os.path.expanduser(CREDENTIALS_FILEPATH) if not os.path.exists(credentials_path): raise RuntimeError(f"brev credentials file {CREDENTIALS_FILEPATH} not found") with open(credentials_path, "r") as f: credentials = json.load(f) credential_file = CredentialsFile.model_validate(credentials) return credential_file.access_token def get_active_org_id() -> str: env_org_id = os.getenv("BREV_ORG_ID") if env_org_id: return env_org_id activeorg_path = os.path.expanduser(ACTIVEORG_FILEPATH) if not os.path.exists(activeorg_path): raise RuntimeError(f"brev active org file {ACTIVEORG_FILEPATH} not found") with open(activeorg_path, "r") as f: active_org = json.load(f) active_org_file = ActiveOrgFile.model_validate(active_org) return active_org_file.id ``` -------------------------------------------------------------------------------- /src/brev_mcp/instance_types.py: -------------------------------------------------------------------------------- ```python import json import logging from typing import List from .models import ( CloudProvider, AllInstanceTypeObj, InstanceType, ) from .api import get_instance_types logging.basicConfig(level=logging.INFO) logger = logging.getLogger("instance-types") async def get_provider_instance_types(provider: CloudProvider)-> str: try: all_instance_types_obj = await get_instance_types() instance_types = filter_instance_types(all_instance_types_obj) for cloud_provider, instance_type_list in instance_types.items(): logger.info(f"Number of instance types for {cloud_provider.value}: {len(instance_type_list)}") if provider not in instance_types: raise ValueError(f"Provider {provider.value} not found in instance types") instance_type_dicts = [ instance_type.model_dump(exclude_none=True) for instance_type in instance_types[provider] ] return json.dumps(instance_type_dicts, indent=2) except Exception as e: logger.error(f"Error getting instance types: {str(e)}") raise RuntimeError(f"Failed to get instance types: {str(e)}") def filter_instance_types(all_instance_types: AllInstanceTypeObj) -> dict[CloudProvider, List[InstanceType]]: instance_types: dict[CloudProvider, List[InstanceType]]= {} for it_wg in all_instance_types.all_instance_types: if len(it_wg.workspace_groups) == 0: continue cloud_provider = CloudProvider(it_wg.workspace_groups[0].platform_type) instance_type_data = it_wg.model_dump(exclude={'workspace_groups'}) instance_type = InstanceType.model_validate(instance_type_data) if not instance_type: logger.warning(f"Instance type {it_wg.type} has no attributes") continue if cloud_provider not in instance_types: instance_types[cloud_provider] = [instance_type] else: instance_types[cloud_provider].append(instance_type) return instance_types ``` -------------------------------------------------------------------------------- /src/brev_mcp/api.py: -------------------------------------------------------------------------------- ```python import httpx from pydantic import ValidationError from .models import ( AllInstanceTypeObj, CreateWorkspaceRequest, Workspace ) from .cli import get_acess_token, get_active_org_id BASE_API_URL = "https://brevapi.us-west-2-prod.control-plane.brev.dev/api" async def get_instance_types() -> AllInstanceTypeObj: access_token = get_acess_token() org_id = get_active_org_id() try: async with httpx.AsyncClient(timeout=httpx.Timeout(25.0)) as client: response = await client.get( f"{BASE_API_URL}/instances/alltypesavailable/{org_id}", headers={ "Authorization": f"Bearer {access_token}", "Content-Type": "application/json" }, ) response.raise_for_status() data = response.json() all_instance_types_obj = AllInstanceTypeObj.model_validate(data) return all_instance_types_obj except ValidationError as e: raise RuntimeError(f"Failed to validate instance types: {str(e)}") except Exception as e: raise RuntimeError(f"Failed to get instance types: {str(e)}") async def create_workspace(request: CreateWorkspaceRequest) -> Workspace: access_token = get_acess_token() org_id = get_active_org_id() try: async with httpx.AsyncClient(timeout=httpx.Timeout(25.0)) as client: json = request.model_dump(by_alias=True) response = await client.post( f"{BASE_API_URL}/organizations/{org_id}/workspaces", headers={ "Authorization": f"Bearer {access_token}", "Content-Type": "application/json" }, json=json ) response.raise_for_status() data = response.json() workspace = Workspace.model_validate(data) return workspace except ValidationError as e: raise RuntimeError(f"Failed to validate workspace: {str(e)}") except Exception as e: raise RuntimeError(f"Failed to create workspace: {str(e)}") ``` -------------------------------------------------------------------------------- /src/brev_mcp/tools.py: -------------------------------------------------------------------------------- ```python from mcp.types import ( TextContent, Tool ) from .models import CloudProvider, ToolModel from .instance_types import get_provider_instance_types from .workspace import create_provider_workspace async def get_instance_types_tool(args: dict[str, str]) -> TextContent: if "cloud_provider" not in args: raise ValueError("cloud_provider argument is required for get_instance_types tool") cloud_provider = CloudProvider(args["cloud_provider"]) instance_types = await get_provider_instance_types(cloud_provider) return [ TextContent( type="text", text=instance_types ) ] async def create_workspace_tool(args: dict[str, str]) -> TextContent: if "name" not in args or "cloud_provider" not in args or "instance_type" not in args: raise ValueError("missing required arguments for create_workspace tool") cloud_provider = CloudProvider(args["cloud_provider"]) workspace = await create_provider_workspace(args["name"], cloud_provider, args["instance_type"]) return [ TextContent( type="text", text=workspace ) ] tool_models = { "get_instance_types": ToolModel( tool=Tool( name="get_instance_types", description="Get available instances types for a cloud provider", inputSchema={ "type": "object", "properties": { "cloud_provider": { "description": "The cloud provider to get instance types for", "enum": [provider.value for provider in CloudProvider] } }, "required": ["cloud_provider"] } ), call_tool=get_instance_types_tool ), "create_workspace": ToolModel( tool=Tool( name="create_workspace", description="Create a workspace from an instance type and cloud provider", inputSchema={ "type": "object", "properties": { "name": { "description": "The name of the workspace", "type": "string", }, "cloud_provider": { "description": "The cloud provider for the workspace", "enum": [provider.value for provider in CloudProvider] }, "instance_type": { "description": "The instance type of the workspace", "type": "string", } }, "required": ["cloud_provider"] } ), call_tool=create_workspace_tool ) } ``` -------------------------------------------------------------------------------- /src/brev_mcp/server.py: -------------------------------------------------------------------------------- ```python import logging from typing import Any from collections.abc import Sequence from .instance_types import get_instance_types from .models import CloudProvider from .tools import tool_models from mcp.server.models import InitializationOptions from mcp.types import ( Resource, Tool, TextContent, ImageContent, EmbeddedResource ) from mcp.server import NotificationOptions, Server from pydantic import AnyUrl import mcp.server.stdio logging.basicConfig(level=logging.INFO) logger = logging.getLogger("brev-server") app = Server("brev_mcp") @app.list_resources() async def list_resources() -> list[Resource]: """List available Brev resources.""" return [ Resource( uri=f"brev://instance-types/{provider.value}", name=f"{provider.value} Instance Types", mimeType="application/json", description=f"Available virtual machine instance types for Brev provider {provider.value}", ) for provider in CloudProvider ] @app.read_resource() async def read_resource(uri: AnyUrl) -> str: """Read resource content.""" if str(uri).startswith("brev://instance-types/"): provider = CloudProvider(str(uri).split("/")[-1]) else: raise ValueError(f"Unknown resource: {uri}") instance_types = await get_instance_types(provider) return instance_types @app.list_tools() async def list_tools() -> list[Tool]: """List available brev tools.""" return [tool_model.tool for _, tool_model in tool_models.items()] @app.call_tool() async def call_tool(tool_name: str, arguments: Any) -> Sequence[TextContent | ImageContent | EmbeddedResource]: """Handle brev tool calls""" if tool_name not in tool_models: raise ValueError(f"Unknown tool: {tool_name}") if not isinstance(arguments, dict): raise ValueError(f"Invalid {tool_name} arguments") return await tool_models[tool_name].call_tool(arguments) # TO DO: aws instance types response is too long: result exceeds maximum length of 1048576 # TO DO: should have a tool call that can filter based on providers -> doing a query like "I'll help you find all single H100 instances across the cloud providers." # requires a bunch of api calls # TO DO: Error executing code: MCP error -2: Request timed out # TO DO: handle notifications async def main(): # Run the server using stdin/stdout streams async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): await app.run( read_stream, write_stream, InitializationOptions( server_name="brev_mcp", server_version="0.1.0", capabilities=app.get_capabilities( notification_options=NotificationOptions(), experimental_capabilities={}, ), ), ) ``` -------------------------------------------------------------------------------- /src/brev_mcp/models.py: -------------------------------------------------------------------------------- ```python from typing import Dict, List, Optional, Literal, Callable, Awaitable from pydantic import BaseModel, Field from datetime import datetime from enum import Enum from mcp.types import Tool, TextContent class Quota(BaseModel): id: Optional[str] = None name: Optional[str] = None maximum: Optional[int] = None current: Optional[int] = None unit: Optional[str] = None class InstanceTypeQuota(BaseModel): on_demand: Optional[Quota] = Field(None, alias="onDemand") spot: Optional[Quota] = None reserved: Optional[Quota] = None class Config: populate_by_name = True class InstanceTypePrice(BaseModel): instance_type_price_id: Optional[str] = Field(None, alias="instanceTypePriceId") operating_system: Optional[str] = Field(None, alias="operatingSystem") instance_type: Optional[str] = Field(None, alias="instanceType") term_type: Optional[str] = Field(None, alias="termType") term_attributes: Optional[Dict[str, str]] = Field(None, alias="termAttributes") unit: Optional[str] = None price_usd: Optional[str] = Field(None, alias="priceUsd") usage_type: Optional[str] = Field(None, alias="usageType") class Config: populate_by_name = True class CurrencyAmount(BaseModel): currency: Optional[str] = None amount: Optional[str] = None class Storage(BaseModel): count: Optional[int] = None size: Optional[str] = None type: Optional[str] = None min_size: Optional[str] = Field(None, alias="minSize") max_size: Optional[str] = Field(None, alias="maxSize") price_per_gb_hr: Optional[CurrencyAmount] = Field(None, alias="pricePerGbHr") class Config: populate_by_name = True class Gpu(BaseModel): count: Optional[int] = None memory: Optional[str] = None manufacturer: Optional[str] = None name: Optional[str] = None network_details: Optional[str] = Field(None, alias="networkDetails") memory_details: Optional[str] = Field(None, alias="memoryDetails") class Config: populate_by_name = True class WorkspaceGroupPlatform(str, Enum): NOOP = "noop" AWS = "aws" DEV_PLANE = "dev-plane" AWS_EC2_SPOT = "aws:ec2:spot" class WorkspaceGroupStatus(str, Enum): DEPLOYING = "DEPLOYING" RUNNING = "RUNNING" DEPRECATED = "DEPRECATED" DELETING = "DELETING" FAILURE = "FAILURE" class TenantType(str, Enum): SHARED = "shared" ISOLATED = "isolated" class Metadata(BaseModel): created_at: Optional[datetime] = Field(None, alias="createdAt") updated_at: Optional[datetime] = Field(None, alias="updatedAt") deleted_at: Optional[datetime] = Field(None, alias="deletedAt") id: Optional[str] = None org_id: Optional[str] = Field(None, alias="orgId") class Config: populate_by_name = True class Workspace(BaseModel): # Add workspace fields based on your needs pass class WorkspaceGroup(BaseModel): metadata: Optional[Metadata] = None name: Optional[str] = None host: Optional[str] = None # Assuming uri.Host is a string platform: Optional[WorkspaceGroupPlatform] = None platform_id: Optional[str] = Field(None, alias="platformId") platform_region: Optional[str] = Field(None, alias="platformRegion") platform_type: Optional[str] = Field(None, alias="platformType") usable_regions: Optional[List[str]] = Field(None, alias="usableRegions") status: Optional[WorkspaceGroupStatus] = None workspaces: Optional[List[Workspace]] = None tenant_type: Optional[TenantType] = Field(None, alias="tenantType") version: Optional[str] = None tags: Optional[Dict[str, List[str]]] = None class Config: populate_by_name = True class Location(BaseModel): name: Optional[str] = None description: Optional[str] = None available: Optional[bool] = None endpoint: Optional[str] = None country: Optional[str] = None class WorkspaceGroupWithLocations(WorkspaceGroup): locations: Optional[List[Location]] = None class InstanceType(BaseModel): type: Optional[str] = None supported_gpus: Optional[List[Gpu]] = Field(None, alias="supportedGpus") supported_storage: Optional[List[Storage]] = Field(None, alias="supportedStorage") memory: Optional[str] = None maximum_network_interfaces: Optional[int] = Field(None, alias="maximumNetworkInterfaces") network_performance: Optional[str] = Field(None, alias="networkPerformance") supported_num_cores: Optional[List[int]] = Field(None, alias="supportedNumCores") default_cores: Optional[int] = Field(None, alias="defaultCores") vcpu: Optional[int] = None supported_architectures: Optional[List[str]] = Field(None, alias="supportedArchitectures") clock_speed_in_ghz: Optional[str] = Field(None, alias="clockSpeedInGhz") sub_location: Optional[str] = Field(None, alias="subLocation") prices: Optional[List[InstanceTypePrice]] = None default_price: Optional[str] = Field(None, alias="defaultPrice") elastic_root_volume: Optional[bool] = Field(None, alias="elasticRootVolume") supported_usage_classes: Optional[List[str]] = Field(None, alias="supportedUsageClasses") quota: Optional[InstanceTypeQuota] = None location: Optional[str] = None is_available: Optional[bool] = Field(None, alias="isAvailable") variable_price: Optional[bool] = Field(None, alias="variablePrice") rebootable: Optional[bool] = None preemptible: Optional[bool] = None base_price: Optional[CurrencyAmount] = Field(None, alias="basePrice") sub_location_type_changeable: Optional[bool] = Field(None, alias="subLocationTypeChangeable") estimated_deploy_time: Optional[str] = Field(None, alias="estimatedDeployTime") user_privilege_escalation_disabled: Optional[bool] = Field(None, alias="userPrivilegeEscalationDisabled") not_privileged: Optional[bool] = Field(None, alias="notPrivileged") is_container: Optional[bool] = Field(None, alias="isContainer") class Config: populate_by_name = True class InstanceTypeWorkspaceGroup(InstanceType): workspace_groups: Optional[List[WorkspaceGroup]] = Field(None, alias="workspaceGroups") class Config: populate_by_name = True class WorkspaceGroupError(BaseModel): workspace_group: Optional[WorkspaceGroup] = Field(None, alias="workspaceGroup") error_message: Optional[str] = Field(None, alias="errorMessage") class Config: populate_by_name = True class AllInstanceTypeObj(BaseModel): all_instance_types: Optional[List[InstanceTypeWorkspaceGroup]] = Field(None, alias="allInstanceTypes") workspace_group_errors: Optional[List[WorkspaceGroupError]] = Field(None, alias="workspaceGroupErrors") class Config: populate_by_name = True Workspace_Group_Ids: dict[str, str] = { "aws": "devplane-brev-1", "gcp": "GCP", "azure": "azure-dgxc-wg", "crusoe": "crusoe-brev-wg", "lambda-labs": "lambda-labs-test", "fluidstack": "FluidStack", "launchpad": "launchpad-test-wg", "akash": "akash-brev-wg", "gcpalpha": "dgxc-gcp", } class CloudProvider(str, Enum): AWS = "aws" GCP = "gcp" AZURE = "azure" CRUSOE = "crusoe" LAMBDA_LABS = "lambda-labs" FLUIDSTACK = "fluidstack" LAUNCHPAD = "launchpad" AKASH = "akash" GCPALPHA = "gcpalpha" def get_workspace_group_id(self): return Workspace_Group_Ids[self.value] DEFAULT_VERB_CONFIG = "build:\n system_packages: []\n python_version: '3.10'\n cuda: 12.0.1\n python_packages:\n - jupyterlab\n run:\n - sh -c \"$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)\" \"\" --unattended\nuser:\n shell: zsh\n authorized_keys_path: /home/ubuntu/.ssh/authorized_keys\nports:\n - '2222:22'\nservices:\n - name: jupyter\n entrypoint: jupyter-lab --ip=0.0.0.0 --no-browser --NotebookApp.token='' --NotebookApp.password=''\n ports:\n - '8888'" class CreateWorkspaceRequest(BaseModel): # version: Optional[str] = None name: str # description: Optional[str] = None workspace_group_id: Optional[str] = Field(None, alias="workspaceGroupId") # workspace_template_id: Optional[str] = Field(None, alias="workspaceTemplateId") # workspace_class: Optional[WorkspaceClassID] = Field(None, alias="workspaceClassId") # git_repo: Optional[str] = Field(None, alias="gitRepo") # is_stoppable: Optional[bool] = Field(None, alias="isStoppable") # tunnel: Optional[WorkspaceTunnel] = None # primary_application_id: Optional[str] = Field(None, alias="primaryApplicationId") # startup_script: Optional[str] = Field(None, alias="startupScript") # startup_script_path: Optional[str] = Field(None, alias="startupScriptPath") # ide_config: Optional[ClientConfig] = Field(None, alias="ideConfig") # dont_check_ssh_keys: bool = Field(False, alias="dontCheckSSHKeys") # repos: ReposV0 # execs: ExecsV0 # init_branch: Optional[str] = Field(None, alias="initBranch") # dot_brev_path: Optional[str] = Field(None, alias="dotBrevPath") # repos_v1: Optional[ReposV1] = Field(None, alias="reposV1") # execs_v1: Optional[ExecsV1] = Field(None, alias="execsV1") instance_type: Optional[str] = Field(None, alias="instanceType") # disk_storage: Optional[str] = Field(None, alias="diskStorage") # region: Optional[str] = None # image: Optional[str] = None # architecture: Optional[Architecture] = None # spot: bool = False # on_container: bool = Field(False, alias="onContainer") # initial_container_image: Optional[str] = Field(None, alias="containerImage") verb_yaml: Optional[str] = Field(DEFAULT_VERB_CONFIG, alias="verbYaml") # base_image: Optional[str] = Field(None, alias="baseImage") # custom_container: Optional[CustomContainer] = Field(None, alias="customContainer") # port_mappings: Optional[Dict[str, str]] = Field(None, alias="portMappings") workspace_version: Optional[Literal["v1", "v0"]] = Field("v1", alias="workspaceVersion") # retry_for: Optional[str] = Field(None, alias="retryFor") # vm_only_mode: bool = Field(False, alias="vmOnlyMode") # files: Optional[List[FileRequest]] = None # labels: Optional[Dict[str, str]] = None # launch_jupyter_on_start: bool = Field(False, alias="launchJupyterOnStart") class Config: populate_by_name = True class WorkspaceStatus(str, Enum): DEPLOYING = "DEPLOYING" STARTING = "STARTING" RUNNING = "RUNNING" STOPPING = "STOPPING" STOPPED = "STOPPED" DELETING = "DELETING" FAILURE = "FAILURE" class HealthStatus(str, Enum): UNSPECIFIED = "" HEALTHY = "HEALTHY" UNHEALTHY = "UNHEALTHY" UNAVAILABLE = "UNAVAILABLE" class ServiceType(str, Enum): SSH = "SSH" HTTP = "HTTP" HTTPS = "HTTPS" TCP = "TCP" RDP = "RDP" class WorkspaceCapability(str, Enum): STOP_START_INSTANCE = "stop-start-instance" AUTOSTOP = "autostop" EXPOSE_PUBLIC_PORTS = "expose-public-ports" CLONE = "clone" RETIRE_VOLUME = "retire-volume" MACHINE_IMAGE = "machine-image" MODIFY_FIREWALL = "modify-firewall" INSTANCE_USER_DATA = "instance-userdata" VPC_SUBNETS = "vpc-subnets" CONTAINER_CLOUD = "container-cloud" class VerbBuildStatus(str, Enum): UNSPECIFIED = "" CREATE_FAILED = "CREATE_FAILED" PENDING = "PENDING" BUILDING = "BUILDING" COMPLETED = "COMPLETED" class FileType(str, Enum): COLAB = "colab" NOTEBOOK = "notebook" GITHUB = "github" GITLAB = "gitlab" class WorkspaceStartStatus(str, Enum): UNSPECIFIED = "" STARTING = "STARTING" FAILURE = "FAILURE" STARTED = "STARTED" class WorkspaceVersion(str, Enum): UNSPECIFIED = "" V0 = "v0" V1 = "v1" class WorkspaceApplicationAPIKey(BaseModel): enabled: bool id: str client_id: str = Field(alias="clientID") client_secret: str = Field(alias="clientSecret") class WorkspaceApplicationPolicy(BaseModel): allowed_user_auth_ids: List[str] = Field(alias="allowedUserAuthIDs") allow_everyone: bool = Field(alias="allowEveryone") api_key: WorkspaceApplicationAPIKey = Field(alias="apiKey") allowed_user_provider_ids: List[str] = Field(alias="allowedUserProviderIDs") class WorkspaceApplication(BaseModel): cloudflare_application_id: str = Field(alias="cloudflareApplicationID") cloudflare_dns_record_id: str = Field(alias="cloudflareDnsRecordID") hostname: str name: str service_type: ServiceType = Field(alias="serviceType") port: int application_setup_bash: str = Field(alias="userApplicationSetupBash") policy: WorkspaceApplicationPolicy health_check_id: str = Field(alias="healthCheckID") class WorkspaceTunnel(BaseModel): tunnel_id: str = Field(alias="tunnelID") applications: List[WorkspaceApplication] tunnel_setup_bash: str = Field(alias="tunnelSetupBash") tunnel_status: HealthStatus = Field(alias="tunnelStatus") class Thresholds(BaseModel): failure_threshold: int = Field(alias="failureThreshold") success_threshold: int = Field(alias="successThreshold") class Timestamp(BaseModel): seconds: int nanos: int class HealthCheck(BaseModel): health_check_id: str = Field(alias="healthCheckId") create_time: Optional[Timestamp] = Field(alias="createTime") update_time: Optional[Timestamp] = Field(alias="updateTime") labels: Dict[str, str] status: str thresholds: Optional[Thresholds] = None class FileMetadata(BaseModel): type: FileType class FileObject(BaseModel): url: str path: str metadata: FileMetadata class ClientConfig(BaseModel): # Add fields based on data.ClientConfig pass class ReposV0(BaseModel): # Add fields based on data.ReposV0 pass class ExecsV0(BaseModel): # Add fields based on data.ExecsV0 pass class ReposV1(BaseModel): # Add fields based on data.ReposV1 pass class ExecsV1(BaseModel): # Add fields based on data.ExecsV1 pass class CustomContainer(BaseModel): # Add fields based on data.CustomContainer pass class WorkspaceTemplateJSON(BaseModel): # Add fields based on WorkspaceTemplateJSON pass class Workspace(BaseModel): id: str workspace_group_id: str = Field(alias="workspaceGroupId") organization_id: str = Field(alias="organizationId") name: str description: str created_by_user_id: str = Field(alias="createdByUserId") dns: Optional[str] = None password: Optional[str] = None workspace_class: str = Field(alias="workspaceClassId") git_repo: Optional[str] = Field(None, alias="gitRepo") workspace_template: WorkspaceTemplateJSON = Field(alias="workspaceTemplate") status: WorkspaceStatus status_message: str = Field(alias="statusMessage") health_status: HealthStatus = Field(alias="healthStatus") last_online_at: str = Field(alias="lastOnlineAt") created_at: str = Field(alias="createdAt") updated_at: str = Field(alias="updatedAt") version: str ssh_port: int = Field(alias="sshPort") ssh_user: str = Field(alias="sshUser") ssh_proxy_hostname: str = Field(alias="sshProxyHostname") host_ssh_port: int = Field(alias="hostSshPort") host_ssh_user: str = Field(alias="hostSshUser") host_ssh_proxy_hostname: str = Field(alias="hostSshProxyHostname") on_container: bool = Field(alias="onContainer") is_stoppable: bool = Field(alias="isStoppable") tunnel: Optional[WorkspaceTunnel] = None primary_application_id: Optional[str] = Field(None, alias="primaryApplicationId") startup_script: str = Field(alias="startupScript") startup_script_path: str = Field(alias="startupScriptPath") init_branch: str = Field(alias="initBranch") dot_brev_path: str = Field(alias="dotBrevPath") network_id: str = Field(alias="networkId") ide_config: ClientConfig = Field(alias="ideConfig") # repos: ReposV0 # execs: ExecsV0 repos_v1: Optional[ReposV1] = Field(None, alias="reposV1") execs_v1: Optional[ExecsV1] = Field(None, alias="execsV1") stop_timeout: Optional[int] = Field(None, alias="stopTimeout") instance_type: str = Field(alias="instanceType") disk_storage: str = Field(alias="diskStorage") image: str region: str exposed_ports: List[int] = Field(alias="exposedPorts") spot: bool workspace_capabilities: List[WorkspaceCapability] = Field(alias="workspaceCapabilities") workspace_image_uri: str = Field(alias="workspaceImageUri") verb_yaml: str = Field(None, alias="verbYaml") verb_build_status: VerbBuildStatus = Field(alias="verbBuildStatus") health_checks: Optional[List[HealthCheck]] = Field(alias="healthCheck") file_objects: Optional[Dict[str, FileObject]] = Field(None, alias="fileObjects") additional_users: Optional[List[str]] = Field(None, alias="additionalUsers") base_image: Optional[str] = Field(None, alias="baseImage") port_mappings: Optional[Dict[str, str]] = Field(None, alias="portMappings") last_start_status: WorkspaceStartStatus = Field(alias="lastStartStatus") last_start_status_message: str = Field(alias="lastStartStatusMessage") workspace_version: WorkspaceVersion = Field(alias="workspaceVersion") vm_only_mode: bool = Field(alias="vmOnlyMode") custom_container: Optional[CustomContainer] = Field(None, alias="customContainer") instance_type_info: Optional[InstanceType] = Field(None, alias="instanceTypeInfo") class Config: populate_by_name = True class CredentialsFile(BaseModel): access_token: str refresh_token: str class ActiveOrgFile(BaseModel): id: str name: str userNetworkId: str class ToolModel(BaseModel): tool: Tool call_tool: Callable[..., Awaitable[TextContent]] ```