#
tokens: 46915/50000 20/134 files (page 2/6)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 2 of 6. Use http://codebase.md/datalayer/jupyter-mcp-server?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .dockerignore
├── .github
│   ├── copilot-instructions.md
│   ├── dependabot.yml
│   └── workflows
│       ├── build.yml
│       ├── fix-license-header.yml
│       ├── lint.sh
│       ├── prep-release.yml
│       ├── publish-release.yml
│       └── test.yml
├── .gitignore
├── .licenserc.yaml
├── .pre-commit-config.yaml
├── .vscode
│   ├── mcp.json
│   └── settings.json
├── ARCHITECTURE.md
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── dev
│   ├── content
│   │   ├── new.ipynb
│   │   ├── notebook.ipynb
│   │   └── README.md
│   └── README.md
├── Dockerfile
├── docs
│   ├── .gitignore
│   ├── .yarnrc.yml
│   ├── babel.config.js
│   ├── docs
│   │   ├── _category_.yaml
│   │   ├── clients
│   │   │   ├── _category_.yaml
│   │   │   ├── claude_desktop
│   │   │   │   ├── _category_.yaml
│   │   │   │   └── index.mdx
│   │   │   ├── cline
│   │   │   │   ├── _category_.yaml
│   │   │   │   └── index.mdx
│   │   │   ├── cursor
│   │   │   │   ├── _category_.yaml
│   │   │   │   └── index.mdx
│   │   │   ├── index.mdx
│   │   │   ├── vscode
│   │   │   │   ├── _category_.yaml
│   │   │   │   └── index.mdx
│   │   │   └── windsurf
│   │   │       ├── _category_.yaml
│   │   │       └── index.mdx
│   │   ├── configure
│   │   │   ├── _category_.yaml
│   │   │   └── index.mdx
│   │   ├── contribute
│   │   │   ├── _category_.yaml
│   │   │   └── index.mdx
│   │   ├── deployment
│   │   │   ├── _category_.yaml
│   │   │   ├── datalayer
│   │   │   │   ├── _category_.yaml
│   │   │   │   └── streamable-http
│   │   │   │       └── index.mdx
│   │   │   ├── index.mdx
│   │   │   └── jupyter
│   │   │       ├── _category_.yaml
│   │   │       ├── index.mdx
│   │   │       ├── stdio
│   │   │       │   ├── _category_.yaml
│   │   │       │   └── index.mdx
│   │   │       └── streamable-http
│   │   │           ├── _category_.yaml
│   │   │           ├── jupyter-extension
│   │   │           │   └── index.mdx
│   │   │           └── standalone
│   │   │               └── index.mdx
│   │   ├── index.mdx
│   │   ├── releases
│   │   │   ├── _category_.yaml
│   │   │   └── index.mdx
│   │   ├── resources
│   │   │   ├── _category_.yaml
│   │   │   └── index.mdx
│   │   └── tools
│   │       ├── _category_.yaml
│   │       └── index.mdx
│   ├── docusaurus.config.js
│   ├── LICENSE
│   ├── Makefile
│   ├── package.json
│   ├── README.md
│   ├── sidebars.js
│   ├── src
│   │   ├── components
│   │   │   ├── HomepageFeatures.js
│   │   │   ├── HomepageFeatures.module.css
│   │   │   ├── HomepageProducts.js
│   │   │   └── HomepageProducts.module.css
│   │   ├── css
│   │   │   └── custom.css
│   │   ├── pages
│   │   │   ├── index.module.css
│   │   │   ├── markdown-page.md
│   │   │   └── testimonials.tsx
│   │   └── theme
│   │       └── CustomDocItem.tsx
│   └── static
│       └── img
│           ├── datalayer
│           │   ├── logo.png
│           │   └── logo.svg
│           ├── favicon.ico
│           ├── feature_1.svg
│           ├── feature_2.svg
│           ├── feature_3.svg
│           ├── product_1.svg
│           ├── product_2.svg
│           └── product_3.svg
├── examples
│   └── integration_example.py
├── jupyter_mcp_server
│   ├── __init__.py
│   ├── __main__.py
│   ├── __version__.py
│   ├── config.py
│   ├── enroll.py
│   ├── env.py
│   ├── jupyter_extension
│   │   ├── __init__.py
│   │   ├── backends
│   │   │   ├── __init__.py
│   │   │   ├── base.py
│   │   │   ├── local_backend.py
│   │   │   └── remote_backend.py
│   │   ├── context.py
│   │   ├── extension.py
│   │   ├── handlers.py
│   │   └── protocol
│   │       ├── __init__.py
│   │       └── messages.py
│   ├── models.py
│   ├── notebook_manager.py
│   ├── server_modes.py
│   ├── server.py
│   ├── tools
│   │   ├── __init__.py
│   │   ├── _base.py
│   │   ├── _registry.py
│   │   ├── assign_kernel_to_notebook_tool.py
│   │   ├── delete_cell_tool.py
│   │   ├── execute_cell_tool.py
│   │   ├── execute_ipython_tool.py
│   │   ├── insert_cell_tool.py
│   │   ├── insert_execute_code_cell_tool.py
│   │   ├── list_cells_tool.py
│   │   ├── list_files_tool.py
│   │   ├── list_kernels_tool.py
│   │   ├── list_notebooks_tool.py
│   │   ├── overwrite_cell_source_tool.py
│   │   ├── read_cell_tool.py
│   │   ├── read_cells_tool.py
│   │   ├── restart_notebook_tool.py
│   │   ├── unuse_notebook_tool.py
│   │   └── use_notebook_tool.py
│   └── utils.py
├── jupyter-config
│   ├── jupyter_notebook_config
│   │   └── jupyter_mcp_server.json
│   └── jupyter_server_config.d
│       └── jupyter_mcp_server.json
├── LICENSE
├── Makefile
├── pyproject.toml
├── pytest.ini
├── README.md
├── RELEASE.md
├── smithery.yaml
└── tests
    ├── __init__.py
    ├── conftest.py
    ├── test_common.py
    ├── test_config.py
    ├── test_jupyter_extension.py
    ├── test_list_kernels.py
    ├── test_tools.py
    └── test_use_notebook.py
```

# Files

--------------------------------------------------------------------------------
/tests/test_jupyter_extension.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Integration tests for Jupyter MCP Server in JUPYTER_SERVER mode (extension).
  7 | 
  8 | This test file validates the server when running as a Jupyter Server extension
  9 | with direct access to serverapp resources (contents_manager, kernel_manager).
 10 | 
 11 | Key differences from MCP_SERVER mode:
 12 | - Uses YDoc collaborative editing when notebooks are open
 13 | - Direct kernel_manager access for execute_ipython
 14 | - Local file operations without HTTP roundtrip
 15 | 
 16 | The tests connect to the extension's HTTP endpoints (not the standalone MCP server).
 17 | 
 18 | Launch the tests:
 19 | ```
 20 | $ pytest tests/test_jupyter_extension.py -v
 21 | ```
 22 | """
 23 | 
 24 | import logging
 25 | from http import HTTPStatus
 26 | 
 27 | import pytest
 28 | import requests
 29 | 
 30 | from .conftest import JUPYTER_TOKEN
 31 | 
 32 | 
 33 | ###############################################################################
 34 | # Unit Tests - Extension Components
 35 | ###############################################################################
 36 | 
 37 | def test_import():
 38 |     """Test that all extension imports work."""
 39 |     from jupyter_mcp_server.jupyter_extension import extension
 40 |     from jupyter_mcp_server.jupyter_extension import handlers
 41 |     from jupyter_mcp_server.jupyter_extension import context
 42 |     logging.info("✅ All imports successful")
 43 |     assert True
 44 | 
 45 | 
 46 | def test_extension_points():
 47 |     """Test extension discovery."""
 48 |     from jupyter_mcp_server import _jupyter_server_extension_points
 49 |     points = _jupyter_server_extension_points()
 50 |     logging.info(f"Extension points: {points}")
 51 |     assert len(points) > 0
 52 |     assert "jupyter_mcp_server" in points[0]["module"]
 53 | 
 54 | 
 55 | def test_handler_creation():
 56 |     """Test that handlers can be instantiated."""
 57 |     from jupyter_mcp_server.jupyter_extension.handlers import (
 58 |         MCPSSEHandler, 
 59 |         MCPHealthHandler, 
 60 |         MCPToolsListHandler
 61 |     )
 62 |     logging.info("✅ Handlers available")
 63 |     assert MCPSSEHandler is not None
 64 |     assert MCPHealthHandler is not None
 65 |     assert MCPToolsListHandler is not None
 66 | 
 67 | 
 68 | ###############################################################################
 69 | # Integration Tests - Extension Running in Jupyter
 70 | ###############################################################################
 71 | 
 72 | def test_extension_health(jupyter_server_with_extension):
 73 |     """Test that Jupyter server with MCP extension is healthy"""
 74 |     logging.info(f"Testing Jupyter+MCP extension health ({jupyter_server_with_extension})")
 75 |     
 76 |     # Test Jupyter API is accessible
 77 |     response = requests.get(
 78 |         f"{jupyter_server_with_extension}/api/status",
 79 |         headers={"Authorization": f"token {JUPYTER_TOKEN}"},
 80 |     )
 81 |     assert response.status_code == HTTPStatus.OK
 82 |     logging.info("✅ Jupyter API is accessible")
 83 | 
 84 | 
 85 | def test_mode_comparison_documentation(jupyter_server_with_extension, jupyter_server):
 86 |     """
 87 |     Document the differences between the two server modes for future reference.
 88 |     
 89 |     This test serves as living documentation of the architecture.
 90 |     """
 91 |     logging.info("\n" + "="*80)
 92 |     logging.info("SERVER MODE COMPARISON")
 93 |     logging.info("="*80)
 94 |     
 95 |     logging.info("\nMCP_SERVER Mode (Standalone):")
 96 |     logging.info(f"  - URL: {jupyter_server}")
 97 |     logging.info("  - Started via: python -m jupyter_mcp_server --transport streamable-http")
 98 |     logging.info("  - Tools use: JupyterServerClient + KernelClient (HTTP)")
 99 |     logging.info("  - File operations: HTTP API (contents API)")
100 |     logging.info("  - Cell operations: WebSocket messages")
101 |     logging.info("  - Execute IPython: WebSocket to kernel")
102 |     logging.info("  - Tests: test_mcp_server.py")
103 |     
104 |     logging.info("\nJUPYTER_SERVER Mode (Extension):")
105 |     logging.info(f"  - URL: {jupyter_server_with_extension}")
106 |     logging.info("  - Started via: jupyter lab --ServerApp.jpserver_extensions")
107 |     logging.info("  - Tools use: Direct Python APIs (contents_manager, kernel_manager)")
108 |     logging.info("  - File operations: Direct nbformat + YDoc collaborative")
109 |     logging.info("  - Cell operations: YDoc when available, nbformat fallback")
110 |     logging.info("  - Execute IPython: Direct kernel_manager.get_kernel() + ZMQ")
111 |     logging.info("  - Tests: test_jupyter_extension.py (this file)")
112 |     
113 |     logging.info("\nKey Benefits of JUPYTER_SERVER Mode:")
114 |     logging.info("  ✓ Real-time collaborative editing via YDoc")
115 |     logging.info("  ✓ Zero-latency local operations")
116 |     logging.info("  ✓ Direct ZMQ access to kernels")
117 |     logging.info("  ✓ Automatic sync with JupyterLab UI")
118 |     
119 |     logging.info("\nKey Benefits of MCP_SERVER Mode:")
120 |     logging.info("  ✓ Works with remote Jupyter servers")
121 |     logging.info("  ✓ No Jupyter extension installation required")
122 |     logging.info("  ✓ Can proxy to multiple Jupyter instances")
123 |     logging.info("  ✓ Standard MCP protocol compatibility")
124 |     
125 |     logging.info("="*80 + "\n")
126 |     
127 |     # Both servers should be running
128 |     assert jupyter_server is not None
129 |     assert jupyter_server_with_extension is not None
130 |     assert jupyter_server != jupyter_server_with_extension  # Different ports
131 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/jupyter_extension/context.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Server Context Management
  7 | 
  8 | This module provides a singleton to track the execution context (MCP_SERVER vs JUPYTER_SERVER)
  9 | and provide access to Jupyter Server resources when running as an extension.
 10 | """
 11 | 
 12 | from typing import Optional, Literal, TYPE_CHECKING
 13 | import threading
 14 | 
 15 | if TYPE_CHECKING:
 16 |     from jupyter_server.serverapp import ServerApp
 17 | 
 18 | 
 19 | class ServerContext:
 20 |     """
 21 |     Singleton managing server execution context.
 22 |     
 23 |     This class tracks whether tools are running in standalone MCP_SERVER mode
 24 |     or embedded JUPYTER_SERVER mode, and provides access to server resources.
 25 |     """
 26 |     
 27 |     _instance: Optional['ServerContext'] = None
 28 |     _lock = threading.Lock()
 29 |     
 30 |     def __new__(cls):
 31 |         if cls._instance is None:
 32 |             with cls._lock:
 33 |                 if cls._instance is None:
 34 |                     cls._instance = super().__new__(cls)
 35 |                     cls._instance._initialized = False
 36 |         return cls._instance
 37 |     
 38 |     def __init__(self):
 39 |         if self._initialized:
 40 |             return
 41 |             
 42 |         self._initialized = True
 43 |         self._context_type: Literal["MCP_SERVER", "JUPYTER_SERVER"] = "MCP_SERVER"
 44 |         self._serverapp: Optional['ServerApp'] = None
 45 |         self._document_url: Optional[str] = None
 46 |         self._runtime_url: Optional[str] = None
 47 |     
 48 |     @property
 49 |     def context_type(self) -> Literal["MCP_SERVER", "JUPYTER_SERVER"]:
 50 |         """Get the current server context type."""
 51 |         return self._context_type
 52 |     
 53 |     @property
 54 |     def serverapp(self) -> Optional['ServerApp']:
 55 |         """Get the Jupyter ServerApp instance (only available in JUPYTER_SERVER mode)."""
 56 |         return self._serverapp
 57 |     
 58 |     @property
 59 |     def document_url(self) -> Optional[str]:
 60 |         """Get the configured document URL."""
 61 |         return self._document_url
 62 |     
 63 |     @property
 64 |     def runtime_url(self) -> Optional[str]:
 65 |         """Get the configured runtime URL."""
 66 |         return self._runtime_url
 67 |     
 68 |     def update(
 69 |         self,
 70 |         context_type: Literal["MCP_SERVER", "JUPYTER_SERVER"],
 71 |         serverapp: Optional['ServerApp'] = None,
 72 |         document_url: Optional[str] = None,
 73 |         runtime_url: Optional[str] = None
 74 |     ):
 75 |         """
 76 |         Update the server context.
 77 |         
 78 |         Args:
 79 |             context_type: The type of server context
 80 |             serverapp: Jupyter ServerApp instance (required for JUPYTER_SERVER mode)
 81 |             document_url: Document URL configuration
 82 |             runtime_url: Runtime URL configuration
 83 |         """
 84 |         with self._lock:
 85 |             self._context_type = context_type
 86 |             self._serverapp = serverapp
 87 |             self._document_url = document_url
 88 |             self._runtime_url = runtime_url
 89 |             
 90 |             if context_type == "JUPYTER_SERVER" and serverapp is None:
 91 |                 raise ValueError("serverapp is required when context_type is JUPYTER_SERVER")
 92 |     
 93 |     def is_local_document(self) -> bool:
 94 |         """Check if document operations should use local serverapp."""
 95 |         return (
 96 |             self._context_type == "JUPYTER_SERVER" 
 97 |             and self._document_url == "local"
 98 |         )
 99 |     
100 |     def is_local_runtime(self) -> bool:
101 |         """Check if runtime operations should use local serverapp."""
102 |         return (
103 |             self._context_type == "JUPYTER_SERVER" 
104 |             and self._runtime_url == "local"
105 |         )
106 |     
107 |     def get_contents_manager(self):
108 |         """
109 |         Get the Jupyter contents manager (only available in JUPYTER_SERVER mode with local access).
110 |         
111 |         Returns:
112 |             ContentsManager instance or None
113 |         """
114 |         if self._serverapp is not None:
115 |             return self._serverapp.contents_manager
116 |         return None
117 |     
118 |     def get_kernel_manager(self):
119 |         """
120 |         Get the Jupyter kernel manager (only available in JUPYTER_SERVER mode with local access).
121 |         
122 |         Returns:
123 |             KernelManager instance or None
124 |         """
125 |         if self._serverapp is not None:
126 |             return self._serverapp.kernel_manager
127 |         return None
128 |     
129 |     def get_kernel_spec_manager(self):
130 |         """
131 |         Get the Jupyter kernel spec manager (only available in JUPYTER_SERVER mode with local access).
132 |         
133 |         Returns:
134 |             KernelSpecManager instance or None
135 |         """
136 |         if self._serverapp is not None:
137 |             return self._serverapp.kernel_spec_manager
138 |         return None
139 |     
140 |     def get_session_manager(self):
141 |         """
142 |         Get the Jupyter session manager (only available in JUPYTER_SERVER mode with local access).
143 |         
144 |         Returns:
145 |             SessionManager instance or None
146 |         """
147 |         if self._serverapp is not None:
148 |             return self._serverapp.session_manager
149 |         return None
150 |     
151 |     @property
152 |     def session_manager(self):
153 |         """
154 |         Get the Jupyter session manager as a property (only available in JUPYTER_SERVER mode with local access).
155 |         
156 |         Returns:
157 |             SessionManager instance or None
158 |         """
159 |         return self.get_session_manager()
160 |     
161 |     def reset(self):
162 |         """Reset to default MCP_SERVER mode."""
163 |         with self._lock:
164 |             self._context_type = "MCP_SERVER"
165 |             self._serverapp = None
166 |             self._document_url = None
167 |             self._runtime_url = None
168 | 
169 | 
170 | # Global accessor
171 | def get_server_context() -> ServerContext:
172 |     """Get the global ServerContext singleton instance."""
173 |     return ServerContext()
174 | 
```

--------------------------------------------------------------------------------
/docs/docs/configure/index.mdx:
--------------------------------------------------------------------------------

```markdown
  1 | # Configure
  2 | 
  3 | ## Options
  4 | 
  5 | Check the help for the Jupyter MCP Server to see the available configuration options.
  6 | 
  7 | ```bash
  8 | jupyter-mcp-server start --help
  9 | 
 10 | Usage: jupyter-mcp-server start [OPTIONS]
 11 | 
 12 |   Start the Jupyter MCP server with a transport.
 13 | 
 14 | Options:
 15 |   --transport [stdio|streamable-http]
 16 |                                   The transport to use for the MCP server.
 17 |                                   Defaults to 'stdio'.
 18 |   --provider [jupyter|datalayer]  The provider to use for the document and
 19 |                                   runtime. Defaults to 'jupyter'.
 20 |   --runtime-url TEXT              The runtime URL to use. For the jupyter
 21 |                                   provider, this is the Jupyter server URL.
 22 |                                   For the datalayer provider, this is the
 23 |                                   Datalayer runtime URL.
 24 |   --start-new-runtime BOOLEAN     Start a new runtime or use an existing one.
 25 |   --runtime-id TEXT               The kernel ID to use. If not provided, a new
 26 |                                   kernel should be started.
 27 |   --runtime-token TEXT            The runtime token to use for authentication
 28 |                                   with the provider. If not provided, the
 29 |                                   provider should accept anonymous requests.
 30 |   --document-url TEXT             The document URL to use. For the jupyter
 31 |                                   provider, this is the Jupyter server URL.
 32 |                                   For the datalayer provider, this is the
 33 |                                   Datalayer document URL.
 34 |   --document-id TEXT              The document id to use. For the jupyter
 35 |                                   provider, this is the notebook path. For the
 36 |                                   datalayer provider, this is the notebook
 37 |                                   path.
 38 |   --document-token TEXT           The document token to use for authentication
 39 |                                   with the provider. If not provided, the
 40 |                                   provider should accept anonymous requests.
 41 |   --port INTEGER                  The port to use for the Streamable HTTP
 42 |                                   transport. Ignored for stdio transport.
 43 |   --help                          Show this message and exit
 44 | ```
 45 | 
 46 | ## Starting then Connecting to Existing Runtime
 47 | 
 48 | For example, you can start the MCP Server with the following command that will not create a new Runtime.
 49 | 
 50 | ```bash
 51 | jupyter-mcp-server start \
 52 |   --transport streamable-http \
 53 |   --runtime-token MY_TOKEN \
 54 |   --document-url http://localhost:8888 \
 55 |   --runtime-url http://localhost:8888 \
 56 |   --start-new-runtime false
 57 | ```
 58 | 
 59 | Only after you can start a local JupyterLab and open a Notebook with a Runtime.
 60 | 
 61 | ```bash
 62 | make jupyterlab
 63 | ```
 64 | 
 65 | Then, you can assign a document and runtime via the `/connect` endpoint by running this command.
 66 | 
 67 | ```bash
 68 | jupyter-mcp-server connect \
 69 |   --provider datalayer \
 70 |   --document-url <url> \
 71 |   --document-id <document> \
 72 |   --document-token <token> \
 73 |   --runtime-url <url> \
 74 |   --runtime-id <runtime-id> \
 75 |   --runtime-token <token> \
 76 |   --jupyter-mcp-server-url http://localhost:4040
 77 | ```
 78 | 
 79 | ## Multimodal Output Support
 80 | 
 81 | The server supports multimodal output, allowing AI agents to directly receive and analyze visual content such as images and charts generated by code execution.
 82 | 
 83 | ### Supported Output Types
 84 | 
 85 | - **Text Output**: Standard text output from code execution
 86 | - **Image Output**: PNG images generated by matplotlib, seaborn, plotly, and other visualization libraries
 87 | - **Error Output**: Error messages and tracebacks
 88 | 
 89 | ### Environment Variable Configuration
 90 | 
 91 | Control multimodal output behavior using environment variables:
 92 | 
 93 | #### `ALLOW_IMG_OUTPUT`
 94 | 
 95 | Controls whether to return actual image content or text placeholders.
 96 | 
 97 | - **Default**: `true`
 98 | - **Values**: `true`, `false`, `1`, `0`, `yes`, `no`, `on`, `off`, `enable`, `disable`, `enabled`, `disabled`
 99 | 
100 | **Example Docker Configuration:**
101 | 
102 | ```json
103 | {
104 |   "mcpServers": {
105 |     "jupyter": {
106 |       "command": "docker",
107 |       "args": [
108 |         "run", "-i", "--rm",
109 |         "-e", "DOCUMENT_URL",
110 |         "-e", "DOCUMENT_TOKEN",
111 |         "-e", "DOCUMENT_ID",
112 |         "-e", "RUNTIME_URL",
113 |         "-e", "RUNTIME_TOKEN",
114 |         "-e", "ALLOW_IMG_OUTPUT",
115 |         "datalayer/jupyter-mcp-server:latest"
116 |       ],
117 |       "env": {
118 |         "DOCUMENT_URL": "http://host.docker.internal:8888",
119 |         "DOCUMENT_TOKEN": "MY_TOKEN",
120 |         "DOCUMENT_ID": "notebook.ipynb",
121 |         "RUNTIME_URL": "http://host.docker.internal:8888",
122 |         "RUNTIME_TOKEN": "MY_TOKEN",
123 |         "ALLOW_IMG_OUTPUT": "true"
124 |       }
125 |     }
126 |   }
127 | }
128 | ```
129 | 
130 | ### Output Behavior
131 | 
132 | #### When `ALLOW_IMG_OUTPUT=true` (Default)
133 | - Images are returned as `ImageContent` objects with actual PNG data
134 | - AI agents can directly analyze visual content
135 | - Supports advanced multimodal reasoning
136 | 
137 | #### When `ALLOW_IMG_OUTPUT=false`
138 | - Images are returned as text placeholders: `"[Image Output (PNG) - Image display disabled]"`
139 | - Maintains backward compatibility with text-only LLMs
140 | - Reduces bandwidth and token usage
141 | 
142 | ### Use Cases
143 | 
144 | **Data Visualization Analysis:**
145 | ```python
146 | import matplotlib.pyplot as plt
147 | import pandas as pd
148 | 
149 | df = pd.read_csv('sales_data.csv')
150 | df.plot(kind='bar', x='month', y='revenue')
151 | plt.title('Monthly Revenue')
152 | plt.show()
153 | # AI can now "see" and analyze the chart content
154 | ```
155 | 
156 | **Machine Learning Model Visualization:**
157 | ```python
158 | import matplotlib.pyplot as plt
159 | 
160 | # Plot training curves
161 | plt.plot(epochs, train_loss, label='Training Loss')
162 | plt.plot(epochs, val_loss, label='Validation Loss')
163 | plt.legend()
164 | plt.show()
165 | # AI can evaluate training effectiveness from the visual curves
166 | ```
167 | 
```

--------------------------------------------------------------------------------
/tests/test_list_kernels.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Tests for list_kernels tool in both MCP_SERVER and JUPYTER_SERVER modes.
  7 | """
  8 | 
  9 | import logging
 10 | import pytest
 11 | 
 12 | # Explicitly set pytest-asyncio mode for this module
 13 | pytestmark = pytest.mark.asyncio
 14 | 
 15 | from .test_common import MCPClient
 16 | 
 17 | 
 18 | @pytest.mark.asyncio
 19 | async def test_list_kernels(mcp_client_parametrized: MCPClient):
 20 |     """Test list_kernels functionality in both MCP_SERVER and JUPYTER_SERVER modes"""
 21 |     async with mcp_client_parametrized:
 22 |         # Call list_kernels
 23 |         kernel_list = await mcp_client_parametrized.list_kernels()
 24 |         logging.debug(f"Kernel list: {kernel_list}")
 25 |         
 26 |         # Verify result is a string
 27 |         assert isinstance(kernel_list, str), "list_kernels should return a string"
 28 |         
 29 |         # Check for either TSV header or "No kernels found" message
 30 |         has_header = "ID\tName\tDisplay_Name\tLanguage\tState\tConnections\tLast_Activity\tEnvironment" in kernel_list
 31 |         has_no_kernels_msg = "No kernels found" in kernel_list
 32 |         
 33 |         assert has_header or has_no_kernels_msg, \
 34 |             f"Kernel list should have TSV header or 'No kernels found' message, got: {kernel_list[:100]}"
 35 |         
 36 |         # Parse the output
 37 |         lines = kernel_list.strip().split('\n')
 38 |         
 39 |         # Should have at least one line (header or message)
 40 |         assert len(lines) >= 1, "Should have at least one line"
 41 |         
 42 |         # If there are running kernels (header present), verify the format
 43 |         if has_header and len(lines) > 1:
 44 |             # Check that data lines have the right number of columns
 45 |             header_cols = lines[0].split('\t')
 46 |             assert len(header_cols) == 8, f"Header should have 8 columns, got {len(header_cols)}"
 47 |             
 48 |             # Check first data line
 49 |             data_line = lines[1].split('\t')
 50 |             assert len(data_line) == 8, f"Data lines should have 8 columns, got {len(data_line)}"
 51 |             
 52 |             # Verify kernel ID is present (not empty or "unknown")
 53 |             kernel_id = data_line[0]
 54 |             assert kernel_id and kernel_id != "unknown", f"Kernel ID should not be empty or unknown, got '{kernel_id}'"
 55 |             
 56 |             # Verify kernel name is present
 57 |             kernel_name = data_line[1]
 58 |             assert kernel_name and kernel_name != "unknown", f"Kernel name should not be empty or unknown, got '{kernel_name}'"
 59 |             
 60 |             logging.info(f"Found {len(lines) - 1} running kernel(s)")
 61 |         else:
 62 |             # No kernels found - this is valid
 63 |             logging.info("No running kernels found")
 64 | 
 65 | 
 66 | @pytest.mark.asyncio
 67 | async def test_list_kernels_after_execution(mcp_client_parametrized: MCPClient):
 68 |     """Test that list_kernels shows kernel after code execution in both modes"""
 69 |     async with mcp_client_parametrized:
 70 |         # Get initial kernel list
 71 |         initial_list = await mcp_client_parametrized.list_kernels()
 72 |         logging.debug(f"Initial kernel list: {initial_list}")
 73 |         
 74 |         # Execute some code which should start a kernel
 75 |         await mcp_client_parametrized.insert_execute_code_cell(-1, "x = 1 + 1")
 76 |         
 77 |         # Now list kernels again - should have at least one
 78 |         kernel_list = await mcp_client_parametrized.list_kernels()
 79 |         logging.debug(f"Kernel list after execution: {kernel_list}")
 80 |         
 81 |         # Verify we have at least one kernel now
 82 |         lines = kernel_list.strip().split('\n')
 83 |         assert len(lines) >= 2, "Should have header and at least one kernel after code execution"
 84 |         
 85 |         # Verify kernel state is valid
 86 |         data_line = lines[1].split('\t')
 87 |         kernel_state = data_line[4]  # State is the 5th column (index 4)
 88 |         # State could be 'idle', 'busy', 'starting', etc.
 89 |         assert kernel_state != "unknown", f"Kernel state should be known, got '{kernel_state}'"
 90 |         
 91 |         # Clean up - delete the cell we created
 92 |         cell_count = await mcp_client_parametrized.get_cell_count()
 93 |         await mcp_client_parametrized.delete_cell(cell_count - 1)
 94 | 
 95 | 
 96 | @pytest.mark.asyncio
 97 | async def test_list_kernels_format(mcp_client_parametrized: MCPClient):
 98 |     """Test that list_kernels output format is consistent in both modes"""
 99 |     async with mcp_client_parametrized:
100 |         # Ensure we have a running kernel by executing code
101 |         initial_count = await mcp_client_parametrized.get_cell_count()
102 |         
103 |         await mcp_client_parametrized.insert_execute_code_cell(-1, "print('hello')")
104 |         
105 |         # Get kernel list
106 |         kernel_list = await mcp_client_parametrized.list_kernels()
107 |         
108 |         # Parse and validate structure
109 |         lines = kernel_list.strip().split('\n')
110 |         assert len(lines) >= 2, "Should have header and at least one kernel"
111 |         
112 |         # Verify header structure
113 |         header = lines[0]
114 |         expected_headers = ["ID", "Name", "Display_Name", "Language", "State", "Connections", "Last_Activity", "Environment"]
115 |         for expected_header in expected_headers:
116 |             assert expected_header in header, f"Header should contain '{expected_header}'"
117 |         
118 |         # Verify data structure
119 |         for i in range(1, len(lines)):
120 |             data_line = lines[i].split('\t')
121 |             assert len(data_line) == 8, f"Line {i} should have 8 columns"
122 |             
123 |             # ID should be a valid UUID-like string
124 |             kernel_id = data_line[0]
125 |             assert len(kernel_id) > 0, "Kernel ID should not be empty"
126 |             
127 |             # Name should not be empty
128 |             kernel_name = data_line[1]
129 |             assert len(kernel_name) > 0, "Kernel name should not be empty"
130 |         
131 |         # Clean up
132 |         cell_count = await mcp_client_parametrized.get_cell_count()
133 |         await mcp_client_parametrized.delete_cell(cell_count - 1)
134 | 
```

--------------------------------------------------------------------------------
/docs/static/img/feature_2.svg:
--------------------------------------------------------------------------------

```
  1 | <?xml version="1.0" encoding="UTF-8" standalone="no"?>
  2 | <!--
  3 |   ~ Copyright (c) 2023-2024 Datalayer, Inc.
  4 |   ~
  5 |   ~ BSD 3-Clause License
  6 | -->
  7 | 
  8 | <svg
  9 |    xmlns:dc="http://purl.org/dc/elements/1.1/"
 10 |    xmlns:cc="http://creativecommons.org/ns#"
 11 |    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
 12 |    xmlns:svg="http://www.w3.org/2000/svg"
 13 |    xmlns="http://www.w3.org/2000/svg"
 14 |    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
 15 |    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
 16 |    viewBox="0 0 215.52 220.8421"
 17 |    version="1.1"
 18 |    id="svg1242"
 19 |    sodipodi:docname="5.svg"
 20 |    inkscape:version="1.0.1 (c497b03c, 2020-09-10)"
 21 |    width="57.022999mm"
 22 |    height="58.431137mm">
 23 |   <metadata
 24 |      id="metadata1246">
 25 |     <rdf:RDF>
 26 |       <cc:Work
 27 |          rdf:about="">
 28 |         <dc:format>image/svg+xml</dc:format>
 29 |         <dc:type
 30 |            rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
 31 |         <dc:title>Marketing_strategy_SVG</dc:title>
 32 |       </cc:Work>
 33 |     </rdf:RDF>
 34 |   </metadata>
 35 |   <sodipodi:namedview
 36 |      pagecolor="#ffffff"
 37 |      bordercolor="#666666"
 38 |      borderopacity="1"
 39 |      objecttolerance="10"
 40 |      gridtolerance="10"
 41 |      guidetolerance="10"
 42 |      inkscape:pageopacity="0"
 43 |      inkscape:pageshadow="2"
 44 |      inkscape:window-width="1440"
 45 |      inkscape:window-height="635"
 46 |      id="namedview1244"
 47 |      showgrid="false"
 48 |      inkscape:zoom="0.49908293"
 49 |      inkscape:cx="-364.03258"
 50 |      inkscape:cy="111.25926"
 51 |      inkscape:window-x="0"
 52 |      inkscape:window-y="25"
 53 |      inkscape:window-maximized="0"
 54 |      inkscape:current-layer="Слой_1-2"
 55 |      inkscape:document-rotation="0"
 56 |      units="mm"
 57 |      fit-margin-top="0"
 58 |      fit-margin-left="0"
 59 |      fit-margin-right="0"
 60 |      fit-margin-bottom="0" />
 61 |   <defs
 62 |      id="defs835">
 63 |     <style
 64 |        id="style833">.cls-1,.cls-15,.cls-4{fill:#d6d8e5;}.cls-1{opacity:0.4;}.cls-2{fill:#b1b4c4;}.cls-3{fill:#9ea2b2;}.cls-5{fill:#f4f4f4;}.cls-6{fill:#9acc12;}.cls-7{fill:#e8bc05;}.cls-8{fill:#ef6848;}.cls-9{fill:#be4aed;}.cls-10{fill:#543526;}.cls-11{fill:#1b96ea;}.cls-12{fill:#ff5050;}.cls-13{fill:#32cec3;}.cls-14{fill:none;stroke:#d6d8e5;stroke-miterlimit:10;stroke-width:1.42px;}.cls-15{opacity:0.3;}.cls-16{fill:#dd990e;}.cls-17{fill:#f9cb07;}.cls-18{fill:#cc8b09;}.cls-19{fill:#e8a30a;}.cls-20{fill:#f9ca06;}.cls-21{fill:#3a2c6d;}.cls-22{fill:#ffcea9;}.cls-23{fill:#38226d;}.cls-24{fill:#9c73ff;}.cls-25{fill:#8c50ff;}.cls-26{fill:#ededed;}.cls-27{fill:#d33d3d;}.cls-28{fill:#ff4d4d;}.cls-29{fill:#2b303f;}</style>
 65 |   </defs>
 66 |   <title
 67 |      id="title837">Marketing_strategy_SVG</title>
 68 |   <g
 69 |      id="Слой_1-2"
 70 |      data-name="Слой 1"
 71 |      transform="translate(-88.126634,-152.59003)">
 72 |     <path
 73 |        class="cls-1"
 74 |        d="m 278.96663,372.54665 -113.6,-65.58 a 6.38,6.38 0 0 1 0,-11.05 v 0 a 6.38,6.38 0 0 1 6.38,0 l 113.6,65.63 a 6.38,6.38 0 0 1 -0.73,11.41 v 0 a 6.36,6.36 0 0 1 -5.65,-0.41 z"
 75 |        id="path1214" />
 76 |     <path
 77 |        class="cls-27"
 78 |        d="m 229.91663,332.87665 c -40.66,-23.47 -73.73,-80.76 -73.73,-127.7 0,-46.94 33.07,-66 73.73,-42.56 40.66,23.44 73.73,80.75 73.73,127.7 0,46.95 -33.08,66.03 -73.73,42.56 z m 0,-155 c -33.38,-19.27 -60.54,-3.59 -60.54,34.95 0,38.54 27.16,85.57 60.54,104.84 33.38,19.27 60.53,3.6 60.53,-34.94 0,-38.54 -27.15,-85.61 -60.53,-104.88 z"
 79 |        id="path1216" />
 80 |     <polygon
 81 |        class="cls-27"
 82 |        points="83.82,129.6 97.33,122.22 95.31,131.48 "
 83 |        id="polygon1218"
 84 |        transform="translate(83.866634,33.546654)" />
 85 |     <path
 86 |        class="cls-27"
 87 |        d="m 269.48663,344.82665 c 0.33,-0.15 14.77,-8.23 14.77,-8.23 l -11.16,-1.79 z"
 88 |        id="path1220" />
 89 |     <ellipse
 90 |        class="cls-5"
 91 |        cx="64.47393"
 92 |        cy="329.32858"
 93 |        rx="54.810001"
 94 |        ry="94.940002"
 95 |        transform="rotate(-30)"
 96 |        id="ellipse1222" />
 97 |     <path
 98 |        class="cls-28"
 99 |        d="m 220.50663,338.10665 c -40.64,-23.48 -73.73,-80.76 -73.73,-127.71 0,-46.95 33.08,-66 73.73,-42.56 40.65,23.44 73.73,80.76 73.73,127.7 0,46.94 -33.07,66.01 -73.73,42.57 z m 0,-155 c -33.37,-19.27 -60.53,-3.6 -60.53,34.94 0,38.54 27.16,85.58 60.53,104.85 33.37,19.27 60.54,3.59 60.54,-34.95 0,-38.54 -27.18,-85.6 -60.54,-104.87 z"
100 |        id="path1224" />
101 |     <path
102 |        class="cls-28"
103 |        d="m 220.50663,305.70665 c -25.18,-14.54 -45.64,-50.03 -45.64,-79.11 0,-29.08 20.49,-40.91 45.67,-26.37 25.18,14.54 45.68,50 45.68,79.11 0,29.11 -20.52,40.92 -45.71,26.37 z m 0,-90.24 c -17.91,-10.34 -32.48,-1.93 -32.48,18.75 0,20.68 14.57,45.92 32.48,56.26 17.91,10.34 32.48,1.92 32.48,-18.76 0,-20.68 -14.57,-45.91 -32.48,-56.25 z"
104 |        id="path1226" />
105 |     <path
106 |        class="cls-28"
107 |        d="m 220.50663,273.82665 c -10,-5.75 -18.06,-19.79 -18.06,-31.29 0,-11.5 8.1,-16.18 18.06,-10.43 9.96,5.75 18.07,19.79 18.07,31.29 0,11.5 -8.1,16.15 -18.07,10.43 z m 0,-26.48 c -2.68,-1.55 -4.87,-0.29 -4.87,2.81 a 10.79,10.79 0 0 0 4.87,8.43 c 2.69,1.55 4.87,0.29 4.87,-2.81 a 10.76,10.76 0 0 0 -4.87,-8.43 z"
108 |        id="path1228" />
109 |     <polygon
110 |        class="cls-29"
111 |        points="26.22,281.67 13.49,289.58 11.02,290.9 4.26,290.99 "
112 |        id="polygon1230"
113 |        transform="translate(83.866634,33.546654)" />
114 |     <ellipse
115 |        class="cls-19"
116 |        cx="-79.061852"
117 |        cy="330.15607"
118 |        rx="1.41"
119 |        ry="2.4400001"
120 |        transform="rotate(-30)"
121 |        id="ellipse1232" />
122 |     <ellipse
123 |        class="cls-19"
124 |        cx="64.443764"
125 |        cy="330.02509"
126 |        rx="1.41"
127 |        ry="2.4400001"
128 |        transform="rotate(-30)"
129 |        id="ellipse1234" />
130 |     <polygon
131 |        class="cls-29"
132 |        points="31.69,282.7 30.92,284.29 15.64,300.69 14.03,293.96 "
133 |        id="polygon1236"
134 |        transform="translate(83.866634,33.546654)" />
135 |     <path
136 |        class="cls-19"
137 |        d="m 97.866634,327.54665 c 0.15,-0.13 124.149996,-71.81 124.149996,-71.81 l -2.28,-4.3 -124.379996,72 z"
138 |        id="path1238" />
139 |   </g>
140 | </svg>
141 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/jupyter_extension/backends/remote_backend.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Remote Backend Implementation
  7 | 
  8 | This backend uses the existing jupyter_nbmodel_client, jupyter_kernel_client,
  9 | and jupyter_server_api packages to connect to remote Jupyter servers.
 10 | 
 11 | For MCP_SERVER mode, this maintains 100% backward compatibility with the existing implementation.
 12 | """
 13 | 
 14 | from typing import Optional, Any, Union, Literal
 15 | from mcp.types import ImageContent
 16 | from jupyter_mcp_server.jupyter_extension.backends.base import Backend
 17 | 
 18 | # Note: This is a placeholder that delegates to existing server.py logic
 19 | # The actual implementation will be refactored from server.py in a later step
 20 | # For now, this establishes the pattern
 21 | 
 22 | 
 23 | class RemoteBackend(Backend):
 24 |     """
 25 |     Backend that connects to remote Jupyter servers using HTTP/WebSocket APIs.
 26 |     
 27 |     Uses:
 28 |     - jupyter_nbmodel_client.NbModelClient for notebook operations
 29 |     - jupyter_kernel_client.KernelClient for kernel operations  
 30 |     - jupyter_server_api.JupyterServerClient for server operations
 31 |     """
 32 |     
 33 |     def __init__(self, document_url: str, document_token: str, runtime_url: str, runtime_token: str):
 34 |         """
 35 |         Initialize remote backend.
 36 |         
 37 |         Args:
 38 |             document_url: URL of Jupyter server for document operations
 39 |             document_token: Authentication token for document server
 40 |             runtime_url: URL of Jupyter server for runtime operations
 41 |             runtime_token: Authentication token for runtime server
 42 |         """
 43 |         self.document_url = document_url
 44 |         self.document_token = document_token
 45 |         self.runtime_url = runtime_url
 46 |         self.runtime_token = runtime_token
 47 |     
 48 |     # Notebook operations
 49 |     
 50 |     async def get_notebook_content(self, path: str) -> dict[str, Any]:
 51 |         """Get notebook content via remote API."""
 52 |         # TODO: Implement using jupyter_server_api
 53 |         raise NotImplementedError("To be refactored from server.py")
 54 |     
 55 |     async def list_notebooks(self, path: str = "") -> list[str]:
 56 |         """List notebooks via remote API."""
 57 |         # TODO: Implement using jupyter_server_api
 58 |         raise NotImplementedError("To be refactored from server.py")
 59 |     
 60 |     async def notebook_exists(self, path: str) -> bool:
 61 |         """Check if notebook exists via remote API."""
 62 |         # TODO: Implement using jupyter_server_api
 63 |         raise NotImplementedError("To be refactored from server.py")
 64 |     
 65 |     async def create_notebook(self, path: str) -> dict[str, Any]:
 66 |         """Create notebook via remote API."""
 67 |         # TODO: Implement using jupyter_server_api
 68 |         raise NotImplementedError("To be refactored from server.py")
 69 |     
 70 |     # Cell operations
 71 |     
 72 |     async def read_cells(
 73 |         self, 
 74 |         path: str, 
 75 |         start_index: Optional[int] = None,
 76 |         end_index: Optional[int] = None
 77 |     ) -> list[dict[str, Any]]:
 78 |         """Read cells via nbmodel_client."""
 79 |         # TODO: Implement using jupyter_nbmodel_client
 80 |         raise NotImplementedError("To be refactored from server.py")
 81 |     
 82 |     async def append_cell(
 83 |         self, 
 84 |         path: str, 
 85 |         cell_type: Literal["code", "markdown"],
 86 |         source: Union[str, list[str]]
 87 |     ) -> int:
 88 |         """Append cell via nbmodel_client."""
 89 |         # TODO: Implement using jupyter_nbmodel_client
 90 |         raise NotImplementedError("To be refactored from server.py")
 91 |     
 92 |     async def insert_cell(
 93 |         self,
 94 |         path: str,
 95 |         cell_index: int,
 96 |         cell_type: Literal["code", "markdown"],
 97 |         source: Union[str, list[str]]
 98 |     ) -> int:
 99 |         """Insert cell via nbmodel_client."""
100 |         # TODO: Implement using jupyter_nbmodel_client
101 |         raise NotImplementedError("To be refactored from server.py")
102 |     
103 |     async def delete_cell(self, path: str, cell_index: int) -> None:
104 |         """Delete cell via nbmodel_client."""
105 |         # TODO: Implement using jupyter_nbmodel_client
106 |         raise NotImplementedError("To be refactored from server.py")
107 |     
108 |     async def overwrite_cell(
109 |         self,
110 |         path: str,
111 |         cell_index: int,
112 |         new_source: Union[str, list[str]]
113 |     ) -> tuple[str, str]:
114 |         """Overwrite cell via nbmodel_client."""
115 |         # TODO: Implement using jupyter_nbmodel_client
116 |         raise NotImplementedError("To be refactored from server.py")
117 |     
118 |     # Kernel operations
119 |     
120 |     async def get_or_create_kernel(self, path: str, kernel_id: Optional[str] = None) -> str:
121 |         """Get or create kernel via kernel_client."""
122 |         # TODO: Implement using jupyter_kernel_client
123 |         raise NotImplementedError("To be refactored from server.py")
124 |     
125 |     async def execute_cell(
126 |         self,
127 |         path: str,
128 |         cell_index: int,
129 |         kernel_id: str,
130 |         timeout_seconds: int = 300
131 |     ) -> list[Union[str, ImageContent]]:
132 |         """Execute cell via kernel_client."""
133 |         # TODO: Implement using jupyter_kernel_client
134 |         raise NotImplementedError("To be refactored from server.py")
135 |     
136 |     async def interrupt_kernel(self, kernel_id: str) -> None:
137 |         """Interrupt kernel via kernel_client."""
138 |         # TODO: Implement using jupyter_kernel_client
139 |         raise NotImplementedError("To be refactored from server.py")
140 |     
141 |     async def restart_kernel(self, kernel_id: str) -> None:
142 |         """Restart kernel via kernel_client."""
143 |         # TODO: Implement using jupyter_kernel_client
144 |         raise NotImplementedError("To be refactored from server.py")
145 |     
146 |     async def shutdown_kernel(self, kernel_id: str) -> None:
147 |         """Shutdown kernel via kernel_client."""
148 |         # TODO: Implement using jupyter_kernel_client
149 |         raise NotImplementedError("To be refactored from server.py")
150 |     
151 |     async def list_kernels(self) -> list[dict[str, Any]]:
152 |         """List kernels via server API."""
153 |         # TODO: Implement using jupyter_server_api
154 |         raise NotImplementedError("To be refactored from server.py")
155 |     
156 |     async def kernel_exists(self, kernel_id: str) -> bool:
157 |         """Check if kernel exists via server API."""
158 |         # TODO: Implement using jupyter_server_api
159 |         raise NotImplementedError("To be refactored from server.py")
160 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/jupyter_extension/backends/base.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Abstract Backend Interface
  7 | 
  8 | Defines the contract for all backend implementations (Remote and Local).
  9 | """
 10 | 
 11 | from abc import ABC, abstractmethod
 12 | from typing import Optional, Any, Union, Literal
 13 | from mcp.types import ImageContent
 14 | 
 15 | 
 16 | class Backend(ABC):
 17 |     """
 18 |     Abstract backend for notebook and kernel operations.
 19 |     
 20 |     Implementations:
 21 |     - RemoteBackend: Uses jupyter_nbmodel_client, jupyter_kernel_client, jupyter_server_api
 22 |     - LocalBackend: Uses local serverapp.contents_manager and serverapp.kernel_manager
 23 |     """
 24 |     
 25 |     # Notebook operations
 26 |     
 27 |     @abstractmethod
 28 |     async def get_notebook_content(self, path: str) -> dict[str, Any]:
 29 |         """
 30 |         Retrieve notebook content.
 31 |         
 32 |         Args:
 33 |             path: Path to the notebook file
 34 |             
 35 |         Returns:
 36 |             Dictionary with notebook content (cells, metadata)
 37 |         """
 38 |         pass
 39 |     
 40 |     @abstractmethod
 41 |     async def list_notebooks(self, path: str = "") -> list[str]:
 42 |         """
 43 |         List all notebooks in a directory.
 44 |         
 45 |         Args:
 46 |             path: Directory path (empty string for root)
 47 |             
 48 |         Returns:
 49 |             List of notebook paths
 50 |         """
 51 |         pass
 52 |     
 53 |     @abstractmethod
 54 |     async def notebook_exists(self, path: str) -> bool:
 55 |         """
 56 |         Check if a notebook exists.
 57 |         
 58 |         Args:
 59 |             path: Path to the notebook file
 60 |             
 61 |         Returns:
 62 |             True if notebook exists
 63 |         """
 64 |         pass
 65 |     
 66 |     @abstractmethod
 67 |     async def create_notebook(self, path: str) -> dict[str, Any]:
 68 |         """
 69 |         Create a new notebook.
 70 |         
 71 |         Args:
 72 |             path: Path for the new notebook
 73 |             
 74 |         Returns:
 75 |             Created notebook content
 76 |         """
 77 |         pass
 78 |     
 79 |     # Cell operations (via notebook connection)
 80 |     
 81 |     @abstractmethod
 82 |     async def read_cells(
 83 |         self, 
 84 |         path: str, 
 85 |         start_index: Optional[int] = None,
 86 |         end_index: Optional[int] = None
 87 |     ) -> list[dict[str, Any]]:
 88 |         """
 89 |         Read cells from a notebook.
 90 |         
 91 |         Args:
 92 |             path: Notebook path
 93 |             start_index: Start cell index (None for all)
 94 |             end_index: End cell index (None for all)
 95 |             
 96 |         Returns:
 97 |             List of cell dictionaries
 98 |         """
 99 |         pass
100 |     
101 |     @abstractmethod
102 |     async def append_cell(
103 |         self, 
104 |         path: str, 
105 |         cell_type: Literal["code", "markdown"],
106 |         source: Union[str, list[str]]
107 |     ) -> int:
108 |         """
109 |         Append a cell to notebook.
110 |         
111 |         Args:
112 |             path: Notebook path
113 |             cell_type: Type of cell
114 |             source: Cell source code/markdown
115 |             
116 |         Returns:
117 |             Index of appended cell
118 |         """
119 |         pass
120 |     
121 |     @abstractmethod
122 |     async def insert_cell(
123 |         self,
124 |         path: str,
125 |         cell_index: int,
126 |         cell_type: Literal["code", "markdown"],
127 |         source: Union[str, list[str]]
128 |     ) -> int:
129 |         """
130 |         Insert a cell at specific index.
131 |         
132 |         Args:
133 |             path: Notebook path
134 |             cell_index: Where to insert
135 |             cell_type: Type of cell
136 |             source: Cell source
137 |             
138 |         Returns:
139 |             Index of inserted cell
140 |         """
141 |         pass
142 |     
143 |     @abstractmethod
144 |     async def delete_cell(self, path: str, cell_index: int) -> None:
145 |         """
146 |         Delete a cell from notebook.
147 |         
148 |         Args:
149 |             path: Notebook path
150 |             cell_index: Index of cell to delete
151 |         """
152 |         pass
153 |     
154 |     @abstractmethod
155 |     async def overwrite_cell(
156 |         self,
157 |         path: str,
158 |         cell_index: int,
159 |         new_source: Union[str, list[str]]
160 |     ) -> tuple[str, str]:
161 |         """
162 |         Overwrite cell content.
163 |         
164 |         Args:
165 |             path: Notebook path
166 |             cell_index: Index of cell to overwrite
167 |             new_source: New source content
168 |             
169 |         Returns:
170 |             Tuple of (old_source, new_source) for diff generation
171 |         """
172 |         pass
173 |     
174 |     # Kernel operations
175 |     
176 |     @abstractmethod
177 |     async def get_or_create_kernel(self, path: str, kernel_id: Optional[str] = None) -> str:
178 |         """
179 |         Get existing kernel or create new one for a notebook.
180 |         
181 |         Args:
182 |             path: Notebook path
183 |             kernel_id: Specific kernel ID (None to create new)
184 |             
185 |         Returns:
186 |             Kernel ID
187 |         """
188 |         pass
189 |     
190 |     @abstractmethod
191 |     async def execute_cell(
192 |         self,
193 |         path: str,
194 |         cell_index: int,
195 |         kernel_id: str,
196 |         timeout_seconds: int = 300
197 |     ) -> list[Union[str, ImageContent]]:
198 |         """
199 |         Execute a cell and return outputs.
200 |         
201 |         Args:
202 |             path: Notebook path
203 |             cell_index: Index of cell to execute
204 |             kernel_id: Kernel to use
205 |             timeout_seconds: Execution timeout
206 |             
207 |         Returns:
208 |             List of cell outputs
209 |         """
210 |         pass
211 |     
212 |     @abstractmethod
213 |     async def interrupt_kernel(self, kernel_id: str) -> None:
214 |         """
215 |         Interrupt a running kernel.
216 |         
217 |         Args:
218 |             kernel_id: Kernel to interrupt
219 |         """
220 |         pass
221 |     
222 |     @abstractmethod
223 |     async def restart_kernel(self, kernel_id: str) -> None:
224 |         """
225 |         Restart a kernel.
226 |         
227 |         Args:
228 |             kernel_id: Kernel to restart
229 |         """
230 |         pass
231 |     
232 |     @abstractmethod
233 |     async def shutdown_kernel(self, kernel_id: str) -> None:
234 |         """
235 |         Shutdown a kernel.
236 |         
237 |         Args:
238 |             kernel_id: Kernel to shutdown
239 |         """
240 |         pass
241 |     
242 |     @abstractmethod
243 |     async def list_kernels(self) -> list[dict[str, Any]]:
244 |         """
245 |         List all running kernels.
246 |         
247 |         Returns:
248 |             List of kernel information dictionaries
249 |         """
250 |         pass
251 |     
252 |     @abstractmethod
253 |     async def kernel_exists(self, kernel_id: str) -> bool:
254 |         """
255 |         Check if a kernel exists.
256 |         
257 |         Args:
258 |             kernel_id: Kernel ID to check
259 |             
260 |         Returns:
261 |             True if kernel exists
262 |         """
263 |         pass
264 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/list_notebooks_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """List notebooks tool implementation."""
  6 | 
  7 | from typing import Any, Optional, List
  8 | from jupyter_server_api import JupyterServerClient
  9 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 10 | from jupyter_mcp_server.notebook_manager import NotebookManager
 11 | from jupyter_mcp_server.utils import format_TSV
 12 | 
 13 | 
 14 | class ListNotebooksTool(BaseTool):
 15 |     """Tool to list all notebooks in the Jupyter server."""
 16 |     
 17 |     @property
 18 |     def name(self) -> str:
 19 |         return "list_notebooks"
 20 |     
 21 |     @property
 22 |     def description(self) -> str:
 23 |         return """List all notebooks in the Jupyter server (including subdirectories) and show which ones are managed.
 24 |     
 25 | To interact with a notebook, it has to be "managed". If a notebook is not managed, you can connect to it using the `use_notebook` tool.
 26 | 
 27 | Returns:
 28 |     str: TSV formatted table with notebook information including management status"""
 29 |     
 30 |     def _list_notebooks_http(self, server_client: JupyterServerClient, path: str = "", notebooks: Optional[List[str]] = None) -> List[str]:
 31 |         """List notebooks using HTTP API (MCP_SERVER mode)."""
 32 |         if notebooks is None:
 33 |             notebooks = []
 34 |         
 35 |         try:
 36 |             contents = server_client.contents.list_directory(path)
 37 |             for item in contents:
 38 |                 full_path = f"{path}/{item.name}" if path else item.name
 39 |                 if item.type == "directory":
 40 |                     # Recursively search subdirectories
 41 |                     self._list_notebooks_http(server_client, full_path, notebooks)
 42 |                 elif item.type == "notebook" or (item.type == "file" and item.name.endswith('.ipynb')):
 43 |                     # Add notebook to list without any prefix
 44 |                     notebooks.append(full_path)
 45 |         except Exception as e:
 46 |             # If we can't access a directory, just skip it
 47 |             pass
 48 |         
 49 |         return notebooks
 50 |     
 51 |     async def _list_notebooks_local(self, contents_manager: Any, path: str = "", notebooks: Optional[List[str]] = None) -> List[str]:
 52 |         """List notebooks using local contents_manager API (JUPYTER_SERVER mode)."""
 53 |         if notebooks is None:
 54 |             notebooks = []
 55 |         
 56 |         try:
 57 |             model = await contents_manager.get(path, content=True, type='directory')
 58 |             for item in model.get('content', []):
 59 |                 full_path = f"{path}/{item['name']}" if path else item['name']
 60 |                 if item['type'] == "directory":
 61 |                     # Recursively search subdirectories
 62 |                     await self._list_notebooks_local(contents_manager, full_path, notebooks)
 63 |                 elif item['type'] == "notebook" or (item['type'] == "file" and item['name'].endswith('.ipynb')):
 64 |                     # Add notebook to list
 65 |                     notebooks.append(full_path)
 66 |         except Exception as e:
 67 |             # If we can't access a directory, just skip it
 68 |             pass
 69 |         
 70 |         return notebooks
 71 |     
 72 |     async def execute(
 73 |         self,
 74 |         mode: ServerMode,
 75 |         server_client: Optional[JupyterServerClient] = None,
 76 |         kernel_client: Optional[Any] = None,
 77 |         contents_manager: Optional[Any] = None,
 78 |         kernel_manager: Optional[Any] = None,
 79 |         kernel_spec_manager: Optional[Any] = None,
 80 |         notebook_manager: Optional[NotebookManager] = None,
 81 |         **kwargs
 82 |     ) -> str:
 83 |         """Execute the list_notebook tool.
 84 |         
 85 |         Args:
 86 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
 87 |             server_client: HTTP client for MCP_SERVER mode
 88 |             contents_manager: Direct API access for JUPYTER_SERVER mode
 89 |             notebook_manager: Notebook manager instance
 90 |             **kwargs: Additional parameters (unused)
 91 |             
 92 |         Returns:
 93 |             TSV formatted table with notebook information
 94 |         """
 95 |         # Get all notebooks based on mode
 96 |         if mode == ServerMode.JUPYTER_SERVER and contents_manager is not None:
 97 |             all_notebooks = await self._list_notebooks_local(contents_manager)
 98 |         elif mode == ServerMode.MCP_SERVER and server_client is not None:
 99 |             all_notebooks = self._list_notebooks_http(server_client)
100 |         else:
101 |             raise ValueError(f"Invalid mode or missing required clients: mode={mode}")
102 |         
103 |         # Get managed notebooks info
104 |         managed_notebooks = notebook_manager.list_all_notebooks() if notebook_manager else {}
105 |         
106 |         if not all_notebooks and not managed_notebooks:
107 |             return "No notebooks found in the Jupyter server."
108 |         
109 |         # Create TSV formatted output
110 |         headers = ["Path", "Managed", "Name", "Status", "Current"]
111 |         rows = []
112 |         
113 |         # Create a set of managed notebook paths for quick lookup
114 |         managed_paths = {info["path"] for info in managed_notebooks.values()}
115 |         
116 |         # Add all notebooks found in the server
117 |         for notebook_path in sorted(all_notebooks):
118 |             is_managed = notebook_path in managed_paths
119 |             
120 |             if is_managed:
121 |                 # Find the managed notebook entry
122 |                 managed_info = None
123 |                 managed_name = None
124 |                 for name, info in managed_notebooks.items():
125 |                     if info["path"] == notebook_path:
126 |                         managed_info = info
127 |                         managed_name = name
128 |                         break
129 |                 
130 |                 if managed_info:
131 |                     current_marker = "✓" if managed_info["is_current"] else ""
132 |                     rows.append([notebook_path, "Yes", managed_name, managed_info['kernel_status'], current_marker])
133 |                 else:
134 |                     rows.append([notebook_path, "Yes", "-", "-", ""])
135 |             else:
136 |                 rows.append([notebook_path, "No", "-", "-", ""])
137 |         
138 |         # Add any managed notebooks that weren't found in the server (edge case)
139 |         for name, info in managed_notebooks.items():
140 |             if info["path"] not in all_notebooks:
141 |                 current_marker = "✓" if info["is_current"] else ""
142 |                 rows.append([info['path'], "Yes (not found)", name, info['kernel_status'], current_marker])
143 |         
144 |         return format_TSV(headers, rows)
145 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/list_cells_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """List cells tool implementation."""
  6 | 
  7 | from typing import Any, Optional
  8 | from jupyter_server_api import JupyterServerClient
  9 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 10 | from jupyter_mcp_server.notebook_manager import NotebookManager
 11 | from jupyter_mcp_server.config import get_config
 12 | from jupyter_nbmodel_client import NbModelClient
 13 | from jupyter_mcp_server.utils import normalize_cell_source, format_TSV
 14 | 
 15 | 
 16 | class ListCellsTool(BaseTool):
 17 |     """Tool to list basic information of all cells."""
 18 |     
 19 |     @property
 20 |     def name(self) -> str:
 21 |         return "list_cells"
 22 |     
 23 |     @property
 24 |     def description(self) -> str:
 25 |         return """List the basic information of all cells in the notebook.
 26 |     
 27 | Returns a formatted table showing the index, type, execution count (for code cells),
 28 | and first line of each cell. This provides a quick overview of the notebook structure
 29 | and is useful for locating specific cells for operations like delete or insert.
 30 | 
 31 | Returns:
 32 |     str: Formatted table with cell information (Index, Type, Count, First Line)"""
 33 |     
 34 |     async def _list_cells_local(self, contents_manager: Any, path: str) -> str:
 35 |         """List cells using local contents_manager (JUPYTER_SERVER mode)."""
 36 |         # Read the notebook file directly
 37 |         model = await contents_manager.get(path, content=True, type='notebook')
 38 |         
 39 |         if 'content' not in model:
 40 |             raise ValueError(f"Could not read notebook content from {path}")
 41 |         
 42 |         notebook_content = model['content']
 43 |         cells = notebook_content.get('cells', [])
 44 |         
 45 |         # Format the cells into a table
 46 |         headers = ["Index", "Type", "Count", "First Line"]
 47 |         rows = []
 48 |         
 49 |         for idx, cell in enumerate(cells):
 50 |             cell_type = cell.get('cell_type', 'unknown')
 51 |             execution_count = cell.get('execution_count', '-') if cell_type == 'code' else '-'
 52 |             
 53 |             # Get the first line of source
 54 |             source = cell.get('source', '')
 55 |             if isinstance(source, list):
 56 |                 first_line = source[0] if source else ''
 57 |                 lines = len(source)
 58 |             else:
 59 |                 first_line = source.split('\n')[0] if source else ''
 60 |                 lines = len(source.split('\n'))
 61 |             
 62 |             if lines > 1:
 63 |                 first_line += f"...({lines - 1} lines hidden)"
 64 |             
 65 |             rows.append([idx, cell_type, execution_count, first_line])
 66 |         
 67 |         return format_TSV(headers, rows)
 68 |     
 69 |     def _list_cells_websocket(self, notebook: NbModelClient) -> str:
 70 |         """List cells using WebSocket connection (MCP_SERVER mode)."""
 71 |         total_cells = len(notebook)
 72 |         
 73 |         if total_cells == 0:
 74 |             return "Notebook is empty, no cells found."
 75 |         
 76 |         # Create header
 77 |         headers = ["Index", "Type", "Count", "First Line"]
 78 |         rows = []
 79 |         
 80 |         # Process each cell
 81 |         for i in range(total_cells):
 82 |             cell_data = notebook[i]
 83 |             cell_type = cell_data.get("cell_type", "unknown")
 84 |             
 85 |             # Get execution count for code cells
 86 |             execution_count = (cell_data.get("execution_count") or "None") if cell_type == "code" else "N/A"
 87 |             # Get first line of source
 88 |             source_lines = normalize_cell_source(cell_data.get("source", ""))
 89 |             first_line = source_lines[0] if source_lines else ""
 90 |             if len(source_lines) > 1:
 91 |                 first_line += f"...({len(source_lines) - 1} lines hidden)"
 92 |             
 93 |             # Add to table
 94 |             rows.append([i, cell_type, execution_count, first_line])
 95 |         
 96 |         return format_TSV(headers, rows)
 97 |     
 98 |     async def execute(
 99 |         self,
100 |         mode: ServerMode,
101 |         server_client: Optional[JupyterServerClient] = None,
102 |         kernel_client: Optional[Any] = None,
103 |         contents_manager: Optional[Any] = None,
104 |         kernel_manager: Optional[Any] = None,
105 |         kernel_spec_manager: Optional[Any] = None,
106 |         notebook_manager: Optional[NotebookManager] = None,
107 |         **kwargs
108 |     ) -> str:
109 |         """Execute the list_cells tool.
110 |         
111 |         Args:
112 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
113 |             contents_manager: Direct API access for JUPYTER_SERVER mode
114 |             notebook_manager: Notebook manager instance
115 |             **kwargs: Additional parameters
116 |             
117 |         Returns:
118 |             Formatted table with cell information
119 |         """
120 |         if mode == ServerMode.JUPYTER_SERVER and contents_manager is not None:
121 |             # Local mode: read notebook directly from file system
122 |             from jupyter_mcp_server.jupyter_extension.context import get_server_context
123 |             from pathlib import Path
124 |             
125 |             context = get_server_context()
126 |             serverapp = context.serverapp
127 |             
128 |             # Get current notebook path from notebook_manager if available, else use config
129 |             notebook_path = None
130 |             if notebook_manager:
131 |                 notebook_path = notebook_manager.get_current_notebook_path()
132 |             if not notebook_path:
133 |                 config = get_config()
134 |                 notebook_path = config.document_id
135 |             
136 |             # contents_manager expects path relative to serverapp.root_dir
137 |             # If we have an absolute path, convert it to relative
138 |             if serverapp and Path(notebook_path).is_absolute():
139 |                 root_dir = Path(serverapp.root_dir)
140 |                 abs_path = Path(notebook_path)
141 |                 try:
142 |                     notebook_path = str(abs_path.relative_to(root_dir))
143 |                 except ValueError:
144 |                     # Path is not under root_dir, use as-is
145 |                     pass
146 |             
147 |             return await self._list_cells_local(contents_manager, notebook_path)
148 |         elif mode == ServerMode.MCP_SERVER and notebook_manager is not None:
149 |             # Remote mode: use WebSocket connection to Y.js document
150 |             async with notebook_manager.get_current_connection() as notebook:
151 |                 return self._list_cells_websocket(notebook)
152 |         else:
153 |             raise ValueError(f"Invalid mode or missing required clients: mode={mode}")
154 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/assign_kernel_to_notebook_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """Assign kernel to notebook tool implementation."""
  6 | 
  7 | from typing import Any, Optional
  8 | from jupyter_server_api import JupyterServerClient, NotFoundError
  9 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 10 | 
 11 | 
 12 | class AssignKernelToNotebookTool(BaseTool):
 13 |     """Tool to assign a kernel to a notebook by creating a Jupyter session."""
 14 |     
 15 |     @property
 16 |     def name(self) -> str:
 17 |         return "assign_kernel_to_notebook"
 18 |     
 19 |     @property
 20 |     def description(self) -> str:
 21 |         return """Assign a kernel to a notebook by creating a Jupyter session.
 22 |     
 23 | This creates a Jupyter server session that connects a notebook file to a kernel,
 24 | enabling code execution in the notebook. Sessions are the mechanism Jupyter uses
 25 | to maintain the relationship between notebooks and their kernels.
 26 | 
 27 | Args:
 28 |     notebook_path: Path to the notebook file, relative to the Jupyter server root (e.g. "notebook.ipynb")
 29 |     kernel_id: ID of the kernel to assign to the notebook
 30 |     session_name: Optional name for the session (defaults to notebook path)
 31 |     
 32 | Returns:
 33 |     str: Success message with session information including session ID"""
 34 |     
 35 |     async def execute(
 36 |         self,
 37 |         mode: ServerMode,
 38 |         server_client: Optional[JupyterServerClient] = None,
 39 |         contents_manager: Optional[Any] = None,
 40 |         session_manager: Optional[Any] = None,
 41 |         kernel_manager: Optional[Any] = None,
 42 |         # Tool-specific parameters
 43 |         notebook_path: str = None,
 44 |         kernel_id: str = None,
 45 |         session_name: Optional[str] = None,
 46 |         **kwargs
 47 |     ) -> str:
 48 |         """Execute the assign_kernel_to_notebook tool.
 49 |         
 50 |         Args:
 51 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
 52 |             server_client: HTTP client for MCP_SERVER mode
 53 |             contents_manager: Direct API access for JUPYTER_SERVER mode
 54 |             session_manager: Session manager for JUPYTER_SERVER mode
 55 |             kernel_manager: Kernel manager for validation
 56 |             notebook_path: Path to the notebook file
 57 |             kernel_id: ID of the kernel to assign
 58 |             session_name: Optional session name
 59 |             **kwargs: Additional parameters
 60 |             
 61 |         Returns:
 62 |             Success message with session information
 63 |         """
 64 |         if not notebook_path:
 65 |             return "Error: notebook_path is required"
 66 |         
 67 |         if not kernel_id:
 68 |             return "Error: kernel_id is required"
 69 |         
 70 |         # Use notebook_path as session name if not provided
 71 |         if not session_name:
 72 |             session_name = notebook_path
 73 |         
 74 |         # Verify notebook exists
 75 |         try:
 76 |             if mode == ServerMode.MCP_SERVER and server_client is not None:
 77 |                 # Check notebook exists using HTTP API
 78 |                 try:
 79 |                     # FIXED: contents.get_file -> contents.get
 80 |                     server_client.contents.get(notebook_path)
 81 |                 except NotFoundError:
 82 |                     return f"Error: Notebook '{notebook_path}' not found on Jupyter server"
 83 |             elif mode == ServerMode.JUPYTER_SERVER and contents_manager is not None:
 84 |                 # Check notebook exists using local API
 85 |                 try:
 86 |                     await contents_manager.get(notebook_path, content=False)
 87 |                 except Exception as e:
 88 |                     return f"Error: Notebook '{notebook_path}' not found: {e}"
 89 |             else:
 90 |                 return f"Error: Invalid mode or missing required clients: mode={mode}"
 91 |         except Exception as e:
 92 |             return f"Error checking notebook: {e}"
 93 |         
 94 |         # Verify kernel exists
 95 |         try:
 96 |             if mode == ServerMode.MCP_SERVER and server_client is not None:
 97 |                 # Check kernel exists using HTTP API
 98 |                 kernels = server_client.kernels.list_kernels()
 99 |                 kernel_exists = any(kernel.id == kernel_id for kernel in kernels)
100 |                 if not kernel_exists:
101 |                     return f"Error: Kernel '{kernel_id}' not found on Jupyter server"
102 |             elif mode == ServerMode.JUPYTER_SERVER and kernel_manager is not None:
103 |                 # Check kernel exists using local API
104 |                 if kernel_id not in kernel_manager:
105 |                     return f"Error: Kernel '{kernel_id}' not found in local kernel manager"
106 |             else:
107 |                 return f"Error: Invalid mode or missing kernel manager: mode={mode}"
108 |         except Exception as e:
109 |             return f"Error checking kernel: {e}"
110 |         
111 |         # Create the session
112 |         try:
113 |             if mode == ServerMode.MCP_SERVER and server_client is not None:
114 |                 # Create session using HTTP API
115 |                 session = server_client.sessions.create_session(
116 |                     path=notebook_path,
117 |                     kernel={"id": kernel_id},
118 |                     session_type="notebook",
119 |                     name=session_name
120 |                 )
121 |                 return (
122 |                     f"Successfully created session '{session.id}' for notebook '{notebook_path}' "
123 |                     f"with kernel '{kernel_id}'. The notebook is now connected to the kernel."
124 |                 )
125 |             elif mode == ServerMode.JUPYTER_SERVER and session_manager is not None:
126 |                 # Create session using local API
127 |                 # The session_manager API varies, but typically follows similar pattern
128 |                 import asyncio
129 |                 
130 |                 # Create session dict with required parameters
131 |                 session_dict = await asyncio.to_thread(
132 |                     session_manager.create_session,
133 |                     path=notebook_path,
134 |                     kernel_id=kernel_id,
135 |                     type="notebook",
136 |                     name=session_name
137 |                 )
138 |                 
139 |                 session_id = session_dict.get("id", "unknown")
140 |                 return (
141 |                     f"Successfully created session '{session_id}' for notebook '{notebook_path}' "
142 |                     f"with kernel '{kernel_id}'. The notebook is now connected to the kernel."
143 |                 )
144 |             else:
145 |                 return f"Error: Invalid mode or missing session manager: mode={mode}"
146 |         except Exception as e:
147 |             return f"Error creating session: {e}"
148 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/jupyter_extension/protocol/messages.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | MCP Protocol Messages
  7 | 
  8 | Pydantic models for MCP protocol requests and responses to ensure consistent
  9 | API across both MCP_SERVER and JUPYTER_SERVER modes.
 10 | """
 11 | 
 12 | from typing import Any, Optional, Union, Literal
 13 | from pydantic import BaseModel, Field
 14 | from mcp.types import ImageContent
 15 | 
 16 | 
 17 | # Tool execution models
 18 | class ToolRequest(BaseModel):
 19 |     """Request to execute a tool"""
 20 |     tool_name: str = Field(..., description="Name of the tool to execute")
 21 |     arguments: dict[str, Any] = Field(default_factory=dict, description="Tool arguments")
 22 |     context: Optional[dict[str, Any]] = Field(None, description="Execution context")
 23 | 
 24 | 
 25 | class ToolResponse(BaseModel):
 26 |     """Response from tool execution"""
 27 |     success: bool = Field(..., description="Whether execution was successful")
 28 |     result: Any = Field(None, description="Tool execution result")
 29 |     error: Optional[str] = Field(None, description="Error message if execution failed")
 30 | 
 31 | 
 32 | # Notebook operation models
 33 | class NotebookContentRequest(BaseModel):
 34 |     """Request to retrieve notebook content"""
 35 |     path: str = Field(..., description="Path to the notebook file")
 36 |     include_outputs: bool = Field(True, description="Include cell outputs")
 37 | 
 38 | 
 39 | class NotebookContentResponse(BaseModel):
 40 |     """Response containing notebook content"""
 41 |     path: str = Field(..., description="Notebook path")
 42 |     cells: list[dict[str, Any]] = Field(..., description="List of cells")
 43 |     metadata: dict[str, Any] = Field(default_factory=dict, description="Notebook metadata")
 44 | 
 45 | 
 46 | class NotebookListRequest(BaseModel):
 47 |     """Request to list notebooks"""
 48 |     path: Optional[str] = Field("", description="Directory path to search")
 49 |     recursive: bool = Field(True, description="Search recursively")
 50 | 
 51 | 
 52 | class NotebookListResponse(BaseModel):
 53 |     """Response containing list of notebooks"""
 54 |     notebooks: list[str] = Field(..., description="List of notebook paths")
 55 | 
 56 | 
 57 | # Cell operation models
 58 | class ReadCellsRequest(BaseModel):
 59 |     """Request to read cells from a notebook"""
 60 |     path: Optional[str] = Field(None, description="Notebook path (uses current if not specified)")
 61 |     start_index: Optional[int] = Field(None, description="Start cell index")
 62 |     end_index: Optional[int] = Field(None, description="End cell index")
 63 | 
 64 | 
 65 | class ReadCellsResponse(BaseModel):
 66 |     """Response containing cell information"""
 67 |     cells: list[dict[str, Any]] = Field(..., description="List of cell information")
 68 | 
 69 | 
 70 | class AppendCellRequest(BaseModel):
 71 |     """Request to append a cell"""
 72 |     path: Optional[str] = Field(None, description="Notebook path")
 73 |     cell_type: Literal["code", "markdown"] = Field(..., description="Cell type")
 74 |     source: Union[str, list[str]] = Field(..., description="Cell source")
 75 | 
 76 | 
 77 | class AppendCellResponse(BaseModel):
 78 |     """Response after appending a cell"""
 79 |     cell_index: int = Field(..., description="Index of the appended cell")
 80 |     message: str = Field(..., description="Success message")
 81 | 
 82 | 
 83 | class InsertCellRequest(BaseModel):
 84 |     """Request to insert a cell"""
 85 |     path: Optional[str] = Field(None, description="Notebook path")
 86 |     cell_index: int = Field(..., description="Index where to insert")
 87 |     cell_type: Literal["code", "markdown"] = Field(..., description="Cell type")
 88 |     source: Union[str, list[str]] = Field(..., description="Cell source")
 89 | 
 90 | 
 91 | class InsertCellResponse(BaseModel):
 92 |     """Response after inserting a cell"""
 93 |     cell_index: int = Field(..., description="Index of the inserted cell")
 94 |     message: str = Field(..., description="Success message")
 95 | 
 96 | 
 97 | class DeleteCellRequest(BaseModel):
 98 |     """Request to delete a cell"""
 99 |     path: Optional[str] = Field(None, description="Notebook path")
100 |     cell_index: int = Field(..., description="Index of cell to delete")
101 | 
102 | 
103 | class DeleteCellResponse(BaseModel):
104 |     """Response after deleting a cell"""
105 |     message: str = Field(..., description="Success message")
106 | 
107 | 
108 | class OverwriteCellRequest(BaseModel):
109 |     """Request to overwrite a cell"""
110 |     path: Optional[str] = Field(None, description="Notebook path")
111 |     cell_index: int = Field(..., description="Index of cell to overwrite")
112 |     new_source: Union[str, list[str]] = Field(..., description="New cell source")
113 | 
114 | 
115 | class OverwriteCellResponse(BaseModel):
116 |     """Response after overwriting a cell"""
117 |     message: str = Field(..., description="Success message with diff")
118 | 
119 | 
120 | # Cell execution models
121 | class ExecuteCellRequest(BaseModel):
122 |     """Request to execute a cell"""
123 |     path: Optional[str] = Field(None, description="Notebook path")
124 |     cell_index: int = Field(..., description="Index of cell to execute")
125 |     timeout_seconds: int = Field(300, description="Execution timeout in seconds")
126 | 
127 | 
128 | class ExecuteCellResponse(BaseModel):
129 |     """Response after executing a cell"""
130 |     cell_index: int = Field(..., description="Executed cell index")
131 |     outputs: list[Union[str, ImageContent]] = Field(..., description="Cell outputs")
132 |     execution_count: Optional[int] = Field(None, description="Execution count")
133 |     status: Literal["success", "error", "timeout"] = Field(..., description="Execution status")
134 | 
135 | 
136 | # Kernel operation models
137 | class ConnectNotebookRequest(BaseModel):
138 |     """Request to connect to a notebook"""
139 |     notebook_name: str = Field(..., description="Unique notebook identifier")
140 |     notebook_path: str = Field(..., description="Path to notebook file")
141 |     mode: Literal["connect", "create"] = Field("connect", description="Connection mode")
142 |     kernel_id: Optional[str] = Field(None, description="Specific kernel ID")
143 | 
144 | 
145 | class ConnectNotebookResponse(BaseModel):
146 |     """Response after connecting to notebook"""
147 |     message: str = Field(..., description="Success message")
148 |     notebook_name: str = Field(..., description="Notebook identifier")
149 |     notebook_path: str = Field(..., description="Notebook path")
150 | 
151 | 
152 | class UnuseNotebookRequest(BaseModel):
153 |     """Request to unuse from a notebook"""
154 |     notebook_name: str = Field(..., description="Notebook identifier to disconnect")
155 | 
156 | 
157 | class UnuseNotebookResponse(BaseModel):
158 |     """Response after disconnecting"""
159 |     message: str = Field(..., description="Success message")
160 | 
161 | 
162 | class RestartNotebookRequest(BaseModel):
163 |     """Request to restart a notebook kernel"""
164 |     notebook_name: str = Field(..., description="Notebook identifier to restart")
165 | 
166 | 
167 | class RestartNotebookResponse(BaseModel):
168 |     """Response after restarting kernel"""
169 |     message: str = Field(..., description="Success message")
170 |     notebook_name: str = Field(..., description="Notebook identifier")
171 | 
```

--------------------------------------------------------------------------------
/docs/static/img/feature_1.svg:
--------------------------------------------------------------------------------

```
  1 | <?xml version="1.0" encoding="UTF-8" standalone="no"?>
  2 | <!--
  3 |   ~ Copyright (c) 2023-2024 Datalayer, Inc.
  4 |   ~
  5 |   ~ BSD 3-Clause License
  6 | -->
  7 | 
  8 | <svg
  9 |    xmlns:dc="http://purl.org/dc/elements/1.1/"
 10 |    xmlns:cc="http://creativecommons.org/ns#"
 11 |    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
 12 |    xmlns:svg="http://www.w3.org/2000/svg"
 13 |    xmlns="http://www.w3.org/2000/svg"
 14 |    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
 15 |    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
 16 |    viewBox="0 0 143.86 320.16998"
 17 |    version="1.1"
 18 |    id="svg1038"
 19 |    sodipodi:docname="feature_1.svg"
 20 |    inkscape:version="1.0.1 (c497b03c, 2020-09-10)"
 21 |    width="143.86"
 22 |    height="320.16998">
 23 |   <metadata
 24 |      id="metadata1042">
 25 |     <rdf:RDF>
 26 |       <cc:Work
 27 |          rdf:about="">
 28 |         <dc:format>image/svg+xml</dc:format>
 29 |         <dc:type
 30 |            rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
 31 |         <dc:title>Startup_SVG</dc:title>
 32 |       </cc:Work>
 33 |     </rdf:RDF>
 34 |   </metadata>
 35 |   <sodipodi:namedview
 36 |      pagecolor="#ffffff"
 37 |      bordercolor="#666666"
 38 |      borderopacity="1"
 39 |      objecttolerance="10"
 40 |      gridtolerance="10"
 41 |      guidetolerance="10"
 42 |      inkscape:pageopacity="0"
 43 |      inkscape:pageshadow="2"
 44 |      inkscape:window-width="1440"
 45 |      inkscape:window-height="717"
 46 |      id="namedview1040"
 47 |      showgrid="false"
 48 |      inkscape:zoom="1.0226025"
 49 |      inkscape:cx="117.68707"
 50 |      inkscape:cy="153.04271"
 51 |      inkscape:window-x="0"
 52 |      inkscape:window-y="25"
 53 |      inkscape:window-maximized="0"
 54 |      inkscape:current-layer="svg1038"
 55 |      inkscape:document-rotation="0"
 56 |      fit-margin-top="0"
 57 |      fit-margin-left="0"
 58 |      fit-margin-right="0"
 59 |      fit-margin-bottom="0" />
 60 |   <defs
 61 |      id="defs835">
 62 |     <style
 63 |        id="style833">.cls-1,.cls-11,.cls-9{fill:#d6d8e5;}.cls-1{opacity:0.15;}.cls-2,.cls-3{fill:#edeff9;}.cls-2{opacity:0.5;}.cls-4{fill:#ffbc00;}.cls-5{fill:#8c50ff;}.cls-6{fill:#424956;}.cls-7{fill:#494949;}.cls-8{fill:#2b303f;}.cls-9{opacity:0.4;}.cls-10{fill:#b1b4c4;}.cls-12{fill:#9ea1af;}.cls-13{fill:#c4c7d6;}.cls-14{fill:#e4e7f2;}.cls-15{fill:#fff;}.cls-16{fill:#e9eaf2;}.cls-17{fill:#f5f6ff;}.cls-18,.cls-19{fill:none;stroke:#e9eaf2;stroke-miterlimit:10;}.cls-18{stroke-width:0.31px;}.cls-19{stroke-width:1.53px;}.cls-20{fill:#edf0f9;}.cls-21{fill:#e2e5f2;}.cls-22{fill:#6e48e5;}.cls-23{fill:#5e42d3;}.cls-24{fill:#ffcea9;}.cls-25{fill:#ededed;}.cls-26{fill:#38226d;}.cls-27{fill:#9c73ff;}.cls-28{fill:#f4f4f4;}.cls-29{fill:#3a2c6d;}.cls-30{isolation:isolate;}</style>
 64 |   </defs>
 65 |   <title
 66 |      id="title837">Startup_SVG</title>
 67 |   <ellipse
 68 |      class="cls-1"
 69 |      cx="71.93"
 70 |      cy="278.63998"
 71 |      rx="71.93"
 72 |      ry="41.529999"
 73 |      id="ellipse839" />
 74 |   <ellipse
 75 |      class="cls-2"
 76 |      cx="71.099998"
 77 |      cy="274.19998"
 78 |      rx="40.119999"
 79 |      ry="23.16"
 80 |      id="ellipse841" />
 81 |   <ellipse
 82 |      class="cls-3"
 83 |      cx="70.719994"
 84 |      cy="265.97998"
 85 |      rx="11.44"
 86 |      ry="6.6100001"
 87 |      id="ellipse843" />
 88 |   <rect
 89 |      class="cls-3"
 90 |      x="60.099998"
 91 |      y="178.34999"
 92 |      width="22"
 93 |      height="86.459999"
 94 |      id="rect845" />
 95 |   <path
 96 |      class="cls-4"
 97 |      d="m 59.05,177.37 c 0,0 0.6,17.78 13.25,49.67 0,0 11.78,-25.68 13,-50.21 1.22,-24.53 -26.25,0.54 -26.25,0.54 z"
 98 |      id="path847" />
 99 |   <polygon
100 |      class="cls-5"
101 |      points="10.85,219.48 46.25,201.73 52.69,156.37 43.6,153.81 14.33,182.29 "
102 |      id="polygon849"
103 |      transform="translate(-4.1748046e-7,-43.38)" />
104 |   <polygon
105 |      class="cls-5"
106 |      points="133.75,219.48 98.22,200.5 91.91,156.37 101,153.81 130.26,182.29 "
107 |      id="polygon851"
108 |      transform="translate(-4.1748046e-7,-43.38)" />
109 |   <ellipse
110 |      class="cls-6"
111 |      cx="71.93"
112 |      cy="173.46001"
113 |      rx="14.55"
114 |      ry="8.3999996"
115 |      id="ellipse853" />
116 |   <polygon
117 |      class="cls-7"
118 |      points="93.94,208.72 49.87,208.44 49.87,200.88 93.94,200.88 "
119 |      id="polygon855"
120 |      transform="translate(-4.1748046e-7,-43.38)" />
121 |   <ellipse
122 |      class="cls-8"
123 |      cx="71.93"
124 |      cy="165.05"
125 |      rx="22.059999"
126 |      ry="12.74"
127 |      id="ellipse857" />
128 |   <ellipse
129 |      class="cls-3"
130 |      cx="71.859993"
131 |      cy="153.58002"
132 |      rx="26.92"
133 |      ry="15.54"
134 |      id="ellipse859" />
135 |   <path
136 |      class="cls-3"
137 |      d="m 100.93,125.9 v 0 c 0,1.25 0,2.48 0,3.67 0,1 0,2 0,3 0,1.35 -0.07,2.64 -0.12,3.9 0,1.57 -0.11,3.06 -0.19,4.49 -0.08,1.43 -0.15,2.83 -0.25,4.12 -0.41,5.65 -0.4,9.81 -1.1,11.44 -3.81,8.91 -16.7,4 -23.06,0.8 -2.35,-1.18 -3.9,-2.15 -3.9,-2.15 h -0.58 c 0,0 -1.84,1 -4.56,2.08 -6.67,2.7 -18.56,7.49 -21.86,-0.76 -0.59,-1.48 -1.18,-6.37 -1.54,-11.49 -0.09,-1.28 -0.17,-2.65 -0.25,-4.11 -0.08,-1.46 -0.12,-2.91 -0.18,-4.48 0,-1.25 -0.07,-2.54 -0.1,-3.88 0,-1 0,-2 0,-3 v 0 -3.67 0 C 43.06,90.969997 46.76,35.179997 58.98,11.369997 c 3.53,-6.89 7.77,-11.1 12.84,-11.349997 v 0 h 0.58 v 0 c 5,0.259997 9.29,4.449997 12.83,11.309997 12.05,23.72 15.92,79.29 15.7,114.570003 z"
138 |      id="path861" />
139 |   <circle
140 |      class="cls-8"
141 |      cx="71.999992"
142 |      cy="82.829994"
143 |      r="10.17"
144 |      id="circle863" />
145 |   <path
146 |      class="cls-6"
147 |      d="m 72,94.489997 a 11.66,11.66 0 1 1 11.66,-11.66 11.67,11.67 0 0 1 -11.66,11.66 z m 0,-20.34 a 8.68,8.68 0 1 0 8.67,8.68 8.69,8.69 0 0 0 -8.67,-8.68 z"
148 |      id="path865" />
149 |   <circle
150 |      class="cls-8"
151 |      cx="71.999992"
152 |      cy="52.930004"
153 |      r="10.17"
154 |      id="circle867" />
155 |   <path
156 |      class="cls-6"
157 |      d="m 72,64.619997 a 11.67,11.67 0 1 1 11.66,-11.69 11.68,11.68 0 0 1 -11.66,11.69 z m 0,-20.34 a 8.68,8.68 0 1 0 8.67,8.67 8.68,8.68 0 0 0 -8.67,-8.69 z"
158 |      id="path869" />
159 |   <path
160 |      class="cls-5"
161 |      d="m 100.93,146.01 c 0,1.25 0,2.48 0,3.68 -1,3.3 -3.72,6.46 -8.14,9 -11.46,6.61 -30,6.61 -41.51,0 -4.42,-2.56 -7.13,-5.73 -8.14,-9 v 0 -3.67 c 1,3.36 3.7,6.56 8.18,9.15 11.47,6.61 30.05,6.61 41.51,0 4.39,-2.62 7.17,-5.81 8.1,-9.16 z"
162 |      id="path871" />
163 |   <path
164 |      class="cls-5"
165 |      d="m 100.84,151.83 c 0,1.34 -0.07,2.64 -0.12,3.89 -1.12,3.12 -3.78,6.09 -8,8.51 -11.46,6.62 -30,6.62 -41.51,0 -4.21,-2.44 -6.87,-5.43 -8,-8.57 0,-1.25 -0.07,-2.54 -0.1,-3.87 1,3.26 3.73,6.36 8.09,8.88 11.47,6.62 30.05,6.62 41.51,0 4.38,-2.51 7.08,-5.6 8.13,-8.84 z"
166 |      id="path873" />
167 |   <path
168 |      class="cls-5"
169 |      d="m 100.53,141.5 c -1.21,2.93 -3.81,5.72 -7.78,8 -11.46,6.61 -30,6.61 -41.51,0 -4,-2.32 -6.6,-5.12 -7.81,-8.08 0.08,1.46 0.16,2.83 0.25,4.11 1.29,2.76 3.81,5.36 7.56,7.53 a 39.77,39.77 0 0 0 15.84,4.72 50.54,50.54 0 0 0 9,0.07 40.14,40.14 0 0 0 16.63,-4.79 c 3.72,-2.15 6.23,-4.72 7.53,-7.45 z"
170 |      id="path875" />
171 |   <path
172 |      class="cls-5"
173 |      d="m 85.05,11.349997 a 44.73,44.73 0 0 1 -26.25,0 C 62.33,4.459997 66.57,0.249997 71.64,0 v 0 h 0.58 v 0 c 5.05,0.299997 9.29,4.489997 12.83,11.349997 z"
174 |      id="path877" />
175 |   <rect
176 |      class="cls-5"
177 |      x="69.799995"
178 |      y="110.51"
179 |      width="4.9899998"
180 |      height="65.510002"
181 |      id="rect879" />
182 | </svg>
183 | 
```

--------------------------------------------------------------------------------
/docs/docs/tools/index.mdx:
--------------------------------------------------------------------------------

```markdown
  1 | # Tools
  2 | 
  3 | The server currently offers 16 tools organized into 3 categories:
  4 | 
  5 | ## Server Management Tools (3 tools)
  6 | 
  7 | #### 1. `list_files`
  8 | 
  9 | - List all files and directories in the Jupyter server's file system.
 10 | - This tool recursively lists files and directories from the Jupyter server's content API, showing the complete file structure including notebooks, data files, scripts, and directories.
 11 | - Input:
 12 |   - `path`(string, optional): The starting path to list from (empty string means root directory)
 13 |   - `max_depth`(int, optional): Maximum depth to recurse into subdirectories (default: 3)
 14 | - Returns: Tab-separated table with columns: Path, Type, Size, Last_Modified
 15 |   - **Path**: Full path to the file or directory
 16 |   - **Type**: File type ("file", "directory", "notebook", or "error" if inaccessible)
 17 |   - **Size**: File size formatted as B, KB, or MB (empty for directories)
 18 |   - **Last_Modified**: Last modification timestamp in YYYY-MM-DD HH:MM:SS format
 19 | 
 20 | #### 2. `list_kernels`
 21 | 
 22 | - List all available kernels in the Jupyter server.
 23 | - This tool shows all running and available kernel sessions on the Jupyter server, including their IDs, names, states, connection information, and kernel specifications. Useful for monitoring kernel resources and identifying specific kernels for connection.
 24 | - Input: None
 25 | - Returns: Tab-separated table with columns: ID, Name, Display_Name, Language, State, Connections, Last_Activity, Environment
 26 |   - **ID**: Unique kernel identifier
 27 |   - **Name**: Kernel name/type (e.g., "python3", "ir", etc.)
 28 |   - **Display_Name**: Human-readable kernel name from kernel spec
 29 |   - **Language**: Programming language supported by the kernel
 30 |   - **State**: Current execution state ("idle", "busy", "unknown")
 31 |   - **Connections**: Number of active connections to this kernel
 32 |   - **Last_Activity**: Timestamp of last kernel activity in YYYY-MM-DD HH:MM:SS format
 33 |   - **Environment**: Environment variables defined in the kernel spec (truncated if long)
 34 | 
 35 | #### 3. `assign_kernel_to_notebook`
 36 | 
 37 | - Assign a kernel to a notebook by creating a Jupyter session.
 38 | - This creates a Jupyter server session that connects a notebook file to a kernel, enabling code execution in the notebook. Sessions are the mechanism Jupyter uses to maintain the relationship between notebooks and their kernels.
 39 | - Input:
 40 |   - `notebook_path`(string): Path to the notebook file, relative to the Jupyter server root (e.g. "notebook.ipynb")
 41 |   - `kernel_id`(string): ID of the kernel to assign to the notebook
 42 |   - `session_name`(string, optional): Optional name for the session (defaults to notebook path)
 43 | - Returns: Success message with session information including session ID
 44 | 
 45 | ## Multi-Notebook Management Tools (4 tools)
 46 | 
 47 | #### 4. `use_notebook`
 48 | 
 49 | - Connect to a notebook file or create a new one.
 50 | - Input:
 51 |   - `notebook_name`(string): Unique identifier for the notebook
 52 |   - `notebook_path`(string, optional): Path to the notebook file, relative to the Jupyter server root (e.g. "notebook.ipynb"). If not provided, switches to an already-connected notebook with the given name.
 53 |   - `mode`(string): "connect" to connect to existing, "create" to create new (default: "connect")
 54 |   - `kernel_id`(string, optional): Specific kernel ID to use (optional, will create new if not provided)
 55 | - Returns: Success message with notebook information
 56 | 
 57 | #### 5. `list_notebooks`
 58 | 
 59 | - List all notebooks in the Jupyter server (including subdirectories) and show which ones are managed by the notebook manager. To interact with a notebook, it has to be "managed". If a notebook is not managed, you can connect to it using the `use_notebook` tool.
 60 | - Input: None
 61 | - Returns: TSV formatted table with notebook information (Path, Managed, Name, Status, Current)
 62 |   - **Path**: Relative path to the notebook file in the Jupyter server
 63 |   - **Managed**: "Yes" if the notebook is currently managed by the MCP server, "No" otherwise
 64 |   - **Name**: Unique identifier for managed notebooks, "-" for unmanaged notebooks
 65 |   - **Status**: Kernel status for managed notebooks ("alive", "dead", etc.), "-" for unmanaged notebooks
 66 |   - **Current**: "✓" if this is the currently active managed notebook, empty otherwise
 67 | 
 68 | #### 6. `restart_notebook`
 69 | 
 70 | - Restart the kernel for a specific notebook.
 71 | - Input:
 72 |   - `notebook_name`(string): Notebook identifier to restart
 73 | - Returns: Success message
 74 | 
 75 | #### 7. `unuse_notebook`
 76 | 
 77 | - Unuse from a specific notebook and release its resources.
 78 | - Input:
 79 |   - `notebook_name`(string): Notebook identifier to disconnect
 80 | - Returns: Success message
 81 | 
 82 | ## Cell Tools (9 tools)
 83 | 
 84 | #### 8. `insert_cell`
 85 | 
 86 | - Insert a cell to specified position with unified API.
 87 | - Input:
 88 |   - `cell_index`(int): Target index for insertion (0-based). Use -1 to append at end.
 89 |   - `cell_type`(string): Type of cell to insert ("code" or "markdown").
 90 |   - `cell_source`(string): Source content for the cell.
 91 | - Returns: Success message and the structure of its surrounding cells (up to 5 cells above and 5 cells below).
 92 | 
 93 | #### 9. `insert_execute_code_cell`
 94 | 
 95 | - Insert and execute a code cell in a Jupyter notebook.
 96 | - Input:
 97 |   - `cell_index`(int): Index of the cell to insert (0-based). Use -1 to append at end and execute.
 98 |   - `cell_source`(string): Code source.
 99 | - Returns: List of outputs from the executed cell (supports multimodal output including images).
100 | 
101 | #### 10. `delete_cell`
102 | 
103 | - Delete a specific cell from the notebook.
104 | - Input:
105 |   - `cell_index`(int): Index of the cell to delete (0-based).
106 | - Returns: Success message.
107 | 
108 | #### 11. `read_cell`
109 | 
110 | - Read a specific cell from the notebook.
111 | - Input:
112 |   - `cell_index`(int): Index of the cell to read (0-based).
113 | - Returns: Dictionary with cell index, type, source, and outputs (for code cells).
114 | 
115 | #### 12. `read_cells`
116 | 
117 | - Read all cells from the notebook.
118 | - Returns:  List of cell information including index, type, source, and outputs (for code cells).
119 | 
120 | #### 13. `list_cells`
121 | 
122 | - List the basic information of all cells in the notebook.
123 | - Returns a formatted table showing the index, type, execution count (for code cells), and first line of each cell.
124 | - Provides a quick overview of the notebook structure and is useful for locating specific cells for operations.
125 | - Input: None
126 | - Returns: Formatted table string with cell information (Index, Type, Count, First Line).
127 | 
128 | #### 14. `overwrite_cell_source`
129 | 
130 | - Overwrite the source of an existing cell.
131 | - Input:
132 |   - `cell_index`(int): Index of the cell to overwrite (0-based).
133 |   - `cell_source`(string): New cell source - must match existing cell type.
134 | - Returns: Success message and diff style.
135 | 
136 | #### 15. `execute_cell`
137 | 
138 | - Execute a cell with configurable timeout and optional streaming progress updates.
139 | - Input:
140 |   - `cell_index`: Index of the cell to execute (0-based)
141 |   - `timeout_seconds`: Maximum time to wait for execution (default: 300s)
142 |   - `stream`: Enable streaming progress updates for long-running cells (default: False)
143 |   - `progress_interval`: Seconds between progress updates when stream=True (default: 5s)
144 | - Returns:
145 |   - `list[Union[str, ImageContent]]`: List of outputs from the executed cell (supports multimodal output including images)
146 | - Use `stream=False` for short-running cells (more reliable)
147 | - Use `stream=True` for long-running cells (provides real-time feedback)
148 | 
149 | #### 16. `execute_ipython`
150 | 
151 | - Execute IPython code directly in the kernel on the current active notebook.
152 | - This powerful tool supports:
153 |   1. Magic commands (e.g., %timeit, %who, %load, %run, %matplotlib)
154 |   2. Shell commands (e.g., !pip install, !ls, !cat)
155 |   3. Python code (e.g., print(df.head()), df.info())
156 | - Use cases:
157 |   - Performance profiling and debugging
158 |   - Environment exploration and package management
159 |   - Variable inspection and data analysis
160 |   - File system operations on Jupyter server
161 |   - Temporary calculations and quick tests
162 | - Input:
163 |   - `code`(string): IPython code to execute (supports magic commands, shell commands with !, and Python code)
164 |   - `timeout`(int): Execution timeout in seconds (default: 60s)
165 | - Returns:
166 |   - `list[Union[str, ImageContent]]`: List of outputs from the executed code (supports multimodal output including images)
167 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/execute_ipython_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """Execute IPython code directly in kernel tool."""
  6 | 
  7 | import asyncio
  8 | import logging
  9 | from typing import Union
 10 | 
 11 | from mcp.types import ImageContent
 12 | 
 13 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 14 | from jupyter_mcp_server.notebook_manager import NotebookManager
 15 | 
 16 | logger = logging.getLogger(__name__)
 17 | 
 18 | 
 19 | class ExecuteIpythonTool(BaseTool):
 20 |     """Execute IPython code directly in the kernel on the current active notebook.
 21 |     
 22 |     This powerful tool supports:
 23 |     1. Magic commands (e.g., %timeit, %who, %load, %run, %matplotlib)
 24 |     2. Shell commands (e.g., !pip install, !ls, !cat)
 25 |     3. Python code (e.g., print(df.head()), df.info())
 26 |     
 27 |     Use cases:
 28 |     - Performance profiling and debugging
 29 |     - Environment exploration and package management
 30 |     - Variable inspection and data analysis
 31 |     - File system operations on Jupyter server
 32 |     - Temporary calculations and quick tests
 33 |     """
 34 |     
 35 |     @property
 36 |     def name(self) -> str:
 37 |         return "execute_ipython"
 38 |     
 39 |     @property
 40 |     def description(self) -> str:
 41 |         return "Execute IPython code directly in the kernel (supports magic commands, shell commands, and Python code)"
 42 |     
 43 |     async def _execute_via_kernel_manager(
 44 |         self,
 45 |         kernel_manager,
 46 |         kernel_id: str,
 47 |         code: str,
 48 |         timeout: int,
 49 |         safe_extract_outputs_fn
 50 |     ) -> list[Union[str, ImageContent]]:
 51 |         """Execute code using kernel_manager (JUPYTER_SERVER mode).
 52 |         
 53 |         Uses execute_code_local which handles ZMQ message collection properly.
 54 |         """
 55 |         from jupyter_mcp_server.utils import execute_code_local
 56 |         
 57 |         # Get serverapp from kernel_manager
 58 |         serverapp = kernel_manager.parent
 59 |         
 60 |         # Use centralized execute_code_local function
 61 |         return await execute_code_local(
 62 |             serverapp=serverapp,
 63 |             notebook_path="",  # Not needed for execute_ipython
 64 |             code=code,
 65 |             kernel_id=kernel_id,
 66 |             timeout=timeout,
 67 |             logger=logger
 68 |         )
 69 |     
 70 |     async def _execute_via_notebook_manager(
 71 |         self,
 72 |         notebook_manager: NotebookManager,
 73 |         code: str,
 74 |         timeout: int,
 75 |         ensure_kernel_alive_fn,
 76 |         wait_for_kernel_idle_fn,
 77 |         safe_extract_outputs_fn
 78 |     ) -> list[Union[str, ImageContent]]:
 79 |         """Execute code using notebook_manager (MCP_SERVER mode - original logic)."""
 80 |         # Get current notebook name and kernel
 81 |         current_notebook = notebook_manager.get_current_notebook() or "default"
 82 |         kernel = notebook_manager.get_kernel(current_notebook)
 83 |         
 84 |         if not kernel:
 85 |             # Ensure kernel is alive
 86 |             kernel = ensure_kernel_alive_fn()
 87 |         
 88 |         # Wait for kernel to be idle before executing
 89 |         await wait_for_kernel_idle_fn(kernel, max_wait_seconds=30)
 90 |         
 91 |         logger.info(f"Executing IPython code (MCP_SERVER) with timeout {timeout}s: {code[:100]}...")
 92 |         
 93 |         try:
 94 |             # Execute code directly with kernel
 95 |             execution_task = asyncio.create_task(
 96 |                 asyncio.to_thread(kernel.execute, code)
 97 |             )
 98 |             
 99 |             # Wait for execution with timeout
100 |             try:
101 |                 outputs = await asyncio.wait_for(execution_task, timeout=timeout)
102 |             except asyncio.TimeoutError:
103 |                 execution_task.cancel()
104 |                 try:
105 |                     if kernel and hasattr(kernel, 'interrupt'):
106 |                         kernel.interrupt()
107 |                         logger.info("Sent interrupt signal to kernel due to timeout")
108 |                 except Exception as interrupt_err:
109 |                     logger.error(f"Failed to interrupt kernel: {interrupt_err}")
110 |                 
111 |                 return [f"[TIMEOUT ERROR: IPython execution exceeded {timeout} seconds and was interrupted]"]
112 |             
113 |             # Process and extract outputs
114 |             if outputs:
115 |                 result = safe_extract_outputs_fn(outputs['outputs'])
116 |                 logger.info(f"IPython execution completed successfully with {len(result)} outputs")
117 |                 return result
118 |             else:
119 |                 return ["[No output generated]"]
120 |                 
121 |         except Exception as e:
122 |             logger.error(f"Error executing IPython code: {e}")
123 |             return [f"[ERROR: {str(e)}]"]
124 |     
125 |     async def execute(
126 |         self,
127 |         mode: ServerMode,
128 |         server_client=None,
129 |         contents_manager=None,
130 |         kernel_manager=None,
131 |         kernel_spec_manager=None,
132 |         notebook_manager=None,
133 |         # Tool-specific parameters
134 |         code: str = None,
135 |         timeout: int = 60,
136 |         kernel_id: str = None,
137 |         ensure_kernel_alive_fn=None,
138 |         wait_for_kernel_idle_fn=None,
139 |         safe_extract_outputs_fn=None,
140 |         **kwargs
141 |     ) -> list[Union[str, ImageContent]]:
142 |         """Execute IPython code directly in the kernel.
143 |         
144 |         Args:
145 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
146 |             server_client: JupyterServerClient (not used)
147 |             contents_manager: Contents manager (not used)
148 |             kernel_manager: Kernel manager (for JUPYTER_SERVER mode)
149 |             kernel_spec_manager: Kernel spec manager (not used)
150 |             notebook_manager: Notebook manager (for MCP_SERVER mode)
151 |             code: IPython code to execute (supports magic commands, shell commands with !, and Python code)
152 |             timeout: Execution timeout in seconds (default: 60s)
153 |             kernel_id: Kernel ID (for JUPYTER_SERVER mode)
154 |             ensure_kernel_alive_fn: Function to ensure kernel is alive (for MCP_SERVER mode)
155 |             wait_for_kernel_idle_fn: Function to wait for kernel idle state (for MCP_SERVER mode)
156 |             safe_extract_outputs_fn: Function to safely extract outputs
157 |             
158 |         Returns:
159 |             List of outputs from the executed code
160 |         """
161 |         if safe_extract_outputs_fn is None:
162 |             raise ValueError("safe_extract_outputs_fn is required")
163 |         
164 |         # JUPYTER_SERVER mode: Use kernel_manager directly
165 |         if mode == ServerMode.JUPYTER_SERVER and kernel_manager is not None:
166 |             if kernel_id is None:
167 |                 # Try to get kernel_id from context
168 |                 from jupyter_mcp_server.utils import get_current_notebook_context
169 |                 _, kernel_id = get_current_notebook_context(notebook_manager)
170 |             
171 |             if kernel_id is None:
172 |                 # No kernel available - start a new one on demand
173 |                 logger.info("No kernel_id available, starting new kernel for execute_ipython")
174 |                 kernel_id = await kernel_manager.start_kernel()
175 |                 
176 |                 # Store the kernel in notebook_manager if available
177 |                 if notebook_manager is not None:
178 |                     default_notebook = "default"
179 |                     kernel_info = {"id": kernel_id}
180 |                     notebook_manager.add_notebook(
181 |                         default_notebook,
182 |                         kernel_info,
183 |                         server_url="local",
184 |                         token=None,
185 |                         path="notebook.ipynb"  # Placeholder path
186 |                     )
187 |                     notebook_manager.set_current_notebook(default_notebook)
188 |             
189 |             logger.info(f"Executing IPython in JUPYTER_SERVER mode with kernel_id={kernel_id}")
190 |             return await self._execute_via_kernel_manager(
191 |                 kernel_manager=kernel_manager,
192 |                 kernel_id=kernel_id,
193 |                 code=code,
194 |                 timeout=timeout,
195 |                 safe_extract_outputs_fn=safe_extract_outputs_fn
196 |             )
197 |         
198 |         # MCP_SERVER mode: Use notebook_manager (original behavior)
199 |         elif mode == ServerMode.MCP_SERVER and notebook_manager is not None:
200 |             if ensure_kernel_alive_fn is None:
201 |                 raise ValueError("ensure_kernel_alive_fn is required for MCP_SERVER mode")
202 |             if wait_for_kernel_idle_fn is None:
203 |                 raise ValueError("wait_for_kernel_idle_fn is required for MCP_SERVER mode")
204 |             
205 |             logger.info("Executing IPython in MCP_SERVER mode")
206 |             return await self._execute_via_notebook_manager(
207 |                 notebook_manager=notebook_manager,
208 |                 code=code,
209 |                 timeout=timeout,
210 |                 ensure_kernel_alive_fn=ensure_kernel_alive_fn,
211 |                 wait_for_kernel_idle_fn=wait_for_kernel_idle_fn,
212 |                 safe_extract_outputs_fn=safe_extract_outputs_fn
213 |             )
214 |         
215 |         else:
216 |             return [f"[ERROR: Invalid mode or missing required managers]"]
217 | 
218 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/delete_cell_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """Delete cell tool implementation."""
  6 | 
  7 | from typing import Any, Optional
  8 | from pathlib import Path
  9 | import nbformat
 10 | from jupyter_server_api import JupyterServerClient
 11 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 12 | from jupyter_mcp_server.notebook_manager import NotebookManager
 13 | from jupyter_mcp_server.utils import get_current_notebook_context
 14 | 
 15 | 
 16 | class DeleteCellTool(BaseTool):
 17 |     """Tool to delete a specific cell from a notebook."""
 18 |     
 19 |     @property
 20 |     def name(self) -> str:
 21 |         return "delete_cell"
 22 |     
 23 |     @property
 24 |     def description(self) -> str:
 25 |         return """Delete a specific cell from the Jupyter notebook.
 26 |     
 27 | Args:
 28 |     cell_index: Index of the cell to delete (0-based)
 29 |     
 30 | Returns:
 31 |     str: Success message"""
 32 |     
 33 |     async def _get_jupyter_ydoc(self, serverapp: Any, file_id: str):
 34 |         """Get the YNotebook document if it's currently open in a collaborative session.
 35 |         
 36 |         This follows the jupyter_ai_tools pattern of accessing YDoc through the
 37 |         yroom_manager when the notebook is actively being edited.
 38 |         
 39 |         Args:
 40 |             serverapp: The Jupyter ServerApp instance
 41 |             file_id: The file ID for the document
 42 |             
 43 |         Returns:
 44 |             YNotebook instance or None if not in a collaborative session
 45 |         """
 46 |         try:
 47 |             yroom_manager = serverapp.web_app.settings.get("yroom_manager")
 48 |             if yroom_manager is None:
 49 |                 return None
 50 |                 
 51 |             room_id = f"json:notebook:{file_id}"
 52 |             
 53 |             if yroom_manager.has_room(room_id):
 54 |                 yroom = yroom_manager.get_room(room_id)
 55 |                 notebook = await yroom.get_jupyter_ydoc()
 56 |                 return notebook
 57 |         except Exception:
 58 |             # YDoc not available, will fall back to file operations
 59 |             pass
 60 |         
 61 |         return None
 62 |     
 63 |     def _get_cell_index_from_id(self, ydoc, cell_id: str) -> Optional[int]:
 64 |         """Find cell index by cell ID in YDoc."""
 65 |         for i, ycell in enumerate(ydoc.ycells):
 66 |             if ycell.get("id") == cell_id:
 67 |                 return i
 68 |         return None
 69 |     
 70 |     async def _delete_cell_ydoc(
 71 |         self,
 72 |         serverapp: Any,
 73 |         notebook_path: str,
 74 |         cell_index: int
 75 |     ) -> str:
 76 |         """Delete cell using YDoc (collaborative editing mode).
 77 |         
 78 |         Args:
 79 |             serverapp: Jupyter ServerApp instance
 80 |             notebook_path: Path to the notebook
 81 |             cell_index: Index of cell to delete
 82 |             
 83 |         Returns:
 84 |             Success message
 85 |         """
 86 |         # Get file_id from file_id_manager
 87 |         file_id_manager = serverapp.web_app.settings.get("file_id_manager")
 88 |         if file_id_manager is None:
 89 |             raise RuntimeError("file_id_manager not available in serverapp")
 90 |         
 91 |         file_id = file_id_manager.get_id(notebook_path)
 92 |         
 93 |         # Try to get YDoc
 94 |         ydoc = await self._get_jupyter_ydoc(serverapp, file_id)
 95 |         
 96 |         if ydoc:
 97 |             # Notebook is open in collaborative mode, use YDoc
 98 |             if cell_index < 0 or cell_index >= len(ydoc.ycells):
 99 |                 raise ValueError(
100 |                     f"Cell index {cell_index} is out of range. Notebook has {len(ydoc.ycells)} cells."
101 |                 )
102 |             
103 |             cell_type = ydoc.ycells[cell_index].get("cell_type", "unknown")
104 |             
105 |             # Delete the cell from YDoc
106 |             del ydoc.ycells[cell_index]
107 |             
108 |             return f"Cell {cell_index} ({cell_type}) deleted successfully."
109 |         else:
110 |             # YDoc not available, use file operations
111 |             return await self._delete_cell_file(notebook_path, cell_index)
112 |     
113 |     async def _delete_cell_file(
114 |         self,
115 |         notebook_path: str,
116 |         cell_index: int
117 |     ) -> str:
118 |         """Delete cell using file operations (non-collaborative mode).
119 |         
120 |         Args:
121 |             notebook_path: Absolute path to the notebook
122 |             cell_index: Index of cell to delete
123 |             
124 |         Returns:
125 |             Success message
126 |         """
127 |         # Read notebook file as version 4 for consistency
128 |         with open(notebook_path, "r", encoding="utf-8") as f:
129 |             notebook = nbformat.read(f, as_version=4)
130 |         
131 |         # Clean transient fields from outputs
132 |         from jupyter_mcp_server.utils import _clean_notebook_outputs
133 |         _clean_notebook_outputs(notebook)
134 |         
135 |         # Validate index
136 |         if cell_index < 0 or cell_index >= len(notebook.cells):
137 |             raise ValueError(
138 |                 f"Cell index {cell_index} is out of range. Notebook has {len(notebook.cells)} cells."
139 |             )
140 |         
141 |         cell_type = notebook.cells[cell_index].cell_type
142 |         
143 |         # Delete the cell
144 |         notebook.cells.pop(cell_index)
145 |         
146 |         # Write back to file
147 |         with open(notebook_path, "w", encoding="utf-8") as f:
148 |             nbformat.write(notebook, f)
149 |         
150 |         return f"Cell {cell_index} ({cell_type}) deleted successfully."
151 |     
152 |     async def _delete_cell_websocket(
153 |         self,
154 |         notebook_manager: NotebookManager,
155 |         cell_index: int
156 |     ) -> str:
157 |         """Delete cell using WebSocket connection (MCP_SERVER mode).
158 |         
159 |         Args:
160 |             notebook_manager: Notebook manager instance
161 |             cell_index: Index of cell to delete
162 |             
163 |         Returns:
164 |             Success message
165 |         """
166 |         async with notebook_manager.get_current_connection() as notebook:
167 |             if cell_index < 0 or cell_index >= len(notebook):
168 |                 raise ValueError(
169 |                     f"Cell index {cell_index} is out of range. Notebook has {len(notebook)} cells."
170 |                 )
171 | 
172 |             deleted_content = notebook.delete_cell(cell_index)
173 |             return f"Cell {cell_index} ({deleted_content['cell_type']}) deleted successfully."
174 |     
175 |     async def execute(
176 |         self,
177 |         mode: ServerMode,
178 |         server_client: Optional[JupyterServerClient] = None,
179 |         kernel_client: Optional[Any] = None,
180 |         contents_manager: Optional[Any] = None,
181 |         kernel_manager: Optional[Any] = None,
182 |         kernel_spec_manager: Optional[Any] = None,
183 |         notebook_manager: Optional[NotebookManager] = None,
184 |         # Tool-specific parameters
185 |         cell_index: int = None,
186 |         **kwargs
187 |     ) -> str:
188 |         """Execute the delete_cell tool.
189 |         
190 |         This tool supports three modes of operation:
191 |         
192 |         1. JUPYTER_SERVER mode with YDoc (collaborative):
193 |            - Checks if notebook is open in a collaborative session
194 |            - Uses YDoc for real-time collaborative editing
195 |            - Changes are immediately visible to all connected users
196 |            
197 |         2. JUPYTER_SERVER mode without YDoc (file-based):
198 |            - Falls back to direct file operations using nbformat
199 |            - Suitable when notebook is not actively being edited
200 |            
201 |         3. MCP_SERVER mode (WebSocket):
202 |            - Uses WebSocket connection to remote Jupyter server
203 |            - Accesses YDoc through NbModelClient
204 |         
205 |         Args:
206 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
207 |             server_client: HTTP client for MCP_SERVER mode
208 |             contents_manager: Direct API access for JUPYTER_SERVER mode
209 |             notebook_manager: Notebook manager instance
210 |             cell_index: Index of the cell to delete (0-based)
211 |             **kwargs: Additional parameters
212 |             
213 |         Returns:
214 |             Success message
215 |         """
216 |         if mode == ServerMode.JUPYTER_SERVER and contents_manager is not None:
217 |             # JUPYTER_SERVER mode: Try YDoc first, fall back to file operations
218 |             from jupyter_mcp_server.jupyter_extension.context import get_server_context
219 |             
220 |             context = get_server_context()
221 |             serverapp = context.serverapp
222 |             notebook_path, _ = get_current_notebook_context(notebook_manager)
223 |             
224 |             # Resolve to absolute path
225 |             if serverapp and not Path(notebook_path).is_absolute():
226 |                 root_dir = serverapp.root_dir
227 |                 notebook_path = str(Path(root_dir) / notebook_path)
228 |             
229 |             if serverapp:
230 |                 # Try YDoc approach first
231 |                 return await self._delete_cell_ydoc(serverapp, notebook_path, cell_index)
232 |             else:
233 |                 # Fall back to file operations
234 |                 return await self._delete_cell_file(notebook_path, cell_index)
235 |                 
236 |         elif mode == ServerMode.MCP_SERVER and notebook_manager is not None:
237 |             # MCP_SERVER mode: Use WebSocket connection
238 |             return await self._delete_cell_websocket(notebook_manager, cell_index)
239 |         else:
240 |             raise ValueError(f"Invalid mode or missing required clients: mode={mode}")
241 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/list_kernels_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """List all available kernels tool."""
  6 | 
  7 | from typing import Any, Optional, List, Dict
  8 | from jupyter_server_api import JupyterServerClient
  9 | 
 10 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 11 | from jupyter_mcp_server.utils import format_TSV
 12 | 
 13 | 
 14 | class ListKernelsTool(BaseTool):
 15 |     """List all available kernels in the Jupyter server.
 16 |     
 17 |     This tool shows all running and available kernel sessions on the Jupyter server,
 18 |     including their IDs, names, states, connection information, and kernel specifications.
 19 |     Useful for monitoring kernel resources and identifying specific kernels for connection.
 20 |     """
 21 |     
 22 |     @property
 23 |     def name(self) -> str:
 24 |         return "list_kernels"
 25 |     
 26 |     @property
 27 |     def description(self) -> str:
 28 |         return "List all available kernels in the Jupyter server"
 29 |     
 30 |     def _list_kernels_http(self, server_client: JupyterServerClient) -> List[Dict[str, str]]:
 31 |         """List kernels using HTTP API (MCP_SERVER mode)."""
 32 |         try:
 33 |             # Get all kernels from the Jupyter server
 34 |             kernels = server_client.kernels.list_kernels()
 35 |             
 36 |             if not kernels:
 37 |                 return []
 38 |             
 39 |             # Get kernel specifications for additional details
 40 |             kernels_specs = server_client.kernelspecs.list_kernelspecs()
 41 |             
 42 |             # Create enhanced kernel information list
 43 |             output = []
 44 |             for kernel in kernels:
 45 |                 kernel_info = {
 46 |                     "id": kernel.id or "unknown",
 47 |                     "name": kernel.name or "unknown",
 48 |                     "state": "unknown",
 49 |                     "connections": "unknown", 
 50 |                     "last_activity": "unknown",
 51 |                     "display_name": "unknown",
 52 |                     "language": "unknown",
 53 |                     "env": "unknown"
 54 |                 }
 55 |                 
 56 |                 # Get kernel state - this might vary depending on the API version
 57 |                 if hasattr(kernel, 'execution_state'):
 58 |                     kernel_info["state"] = kernel.execution_state
 59 |                 elif hasattr(kernel, 'state'):
 60 |                     kernel_info["state"] = kernel.state
 61 |                 
 62 |                 # Get connection count
 63 |                 if hasattr(kernel, 'connections'):
 64 |                     kernel_info["connections"] = str(kernel.connections)
 65 |                 
 66 |                 # Get last activity
 67 |                 if hasattr(kernel, 'last_activity') and kernel.last_activity:
 68 |                     if hasattr(kernel.last_activity, 'strftime'):
 69 |                         kernel_info["last_activity"] = kernel.last_activity.strftime("%Y-%m-%d %H:%M:%S")
 70 |                     else:
 71 |                         kernel_info["last_activity"] = str(kernel.last_activity)
 72 |                 
 73 |                 output.append(kernel_info)
 74 |             
 75 |             # Enhance kernel info with specifications
 76 |             for kernel in output:
 77 |                 kernel_name = kernel["name"]
 78 |                 if hasattr(kernels_specs, 'kernelspecs') and kernel_name in kernels_specs.kernelspecs:
 79 |                     kernel_spec = kernels_specs.kernelspecs[kernel_name]
 80 |                     if hasattr(kernel_spec, 'spec'):
 81 |                         if hasattr(kernel_spec.spec, 'display_name'):
 82 |                             kernel["display_name"] = kernel_spec.spec.display_name
 83 |                         if hasattr(kernel_spec.spec, 'language'):
 84 |                             kernel["language"] = kernel_spec.spec.language
 85 |                         if hasattr(kernel_spec.spec, 'env'):
 86 |                             # Convert env dict to a readable string format
 87 |                             env_dict = kernel_spec.spec.env
 88 |                             if env_dict:
 89 |                                 env_str = "; ".join([f"{k}={v}" for k, v in env_dict.items()])
 90 |                                 kernel["env"] = env_str[:100] + "..." if len(env_str) > 100 else env_str
 91 |             
 92 |             return output
 93 |             
 94 |         except Exception as e:
 95 |             raise RuntimeError(f"Error listing kernels via HTTP: {str(e)}")
 96 |     
 97 |     async def _list_kernels_local(
 98 |         self, 
 99 |         kernel_manager: Any, 
100 |         kernel_spec_manager: Any
101 |     ) -> List[Dict[str, str]]:
102 |         """List kernels using local kernel_manager API (JUPYTER_SERVER mode)."""
103 |         try:
104 |             # Get all running kernels - list_kernels() returns dicts with kernel info
105 |             kernel_infos = list(kernel_manager.list_kernels())
106 |             
107 |             if not kernel_infos:
108 |                 return []
109 |             
110 |             # Get kernel specifications
111 |             kernel_specs = kernel_spec_manager.get_all_specs() if kernel_spec_manager else {}
112 |             
113 |             # Create enhanced kernel information list
114 |             output = []
115 |             for kernel_info_dict in kernel_infos:
116 |                 # kernel_info_dict is already a dict with kernel information
117 |                 kernel_id = kernel_info_dict.get('id', 'unknown')
118 |                 kernel_name = kernel_info_dict.get('name', 'unknown')
119 |                 
120 |                 kernel_info = {
121 |                     "id": kernel_id,
122 |                     "name": kernel_name,
123 |                     "state": kernel_info_dict.get('execution_state', 'unknown'),
124 |                     "connections": str(kernel_info_dict.get('connections', 'unknown')),
125 |                     "last_activity": "unknown",
126 |                     "display_name": "unknown",
127 |                     "language": "unknown",
128 |                     "env": "unknown"
129 |                 }
130 |                 
131 |                 # Format last activity if present
132 |                 last_activity = kernel_info_dict.get('last_activity')
133 |                 if last_activity:
134 |                     if hasattr(last_activity, 'strftime'):
135 |                         kernel_info["last_activity"] = last_activity.strftime("%Y-%m-%d %H:%M:%S")
136 |                     else:
137 |                         kernel_info["last_activity"] = str(last_activity)
138 |                 
139 |                 output.append(kernel_info)
140 |             
141 |             # Enhance kernel info with specifications
142 |             for kernel in output:
143 |                 kernel_name = kernel["name"]
144 |                 if kernel_name in kernel_specs:
145 |                     spec = kernel_specs[kernel_name].get('spec', {})
146 |                     if 'display_name' in spec:
147 |                         kernel["display_name"] = spec['display_name']
148 |                     if 'language' in spec:
149 |                         kernel["language"] = spec['language']
150 |                     if 'env' in spec and spec['env']:
151 |                         env_dict = spec['env']
152 |                         env_str = "; ".join([f"{k}={v}" for k, v in env_dict.items()])
153 |                         kernel["env"] = env_str[:100] + "..." if len(env_str) > 100 else env_str
154 |             
155 |             return output
156 |             
157 |         except Exception as e:
158 |             raise RuntimeError(f"Error listing kernels locally: {str(e)}")
159 |     
160 |     async def execute(
161 |         self,
162 |         mode: ServerMode,
163 |         server_client: Optional[JupyterServerClient] = None,
164 |         kernel_client: Optional[Any] = None,
165 |         contents_manager: Optional[Any] = None,
166 |         kernel_manager: Optional[Any] = None,
167 |         kernel_spec_manager: Optional[Any] = None,
168 |         **kwargs
169 |     ) -> str:
170 |         """List all available kernels.
171 |         
172 |         Args:
173 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
174 |             server_client: HTTP client for MCP_SERVER mode
175 |             kernel_manager: Direct kernel manager access for JUPYTER_SERVER mode
176 |             kernel_spec_manager: Kernel spec manager for JUPYTER_SERVER mode
177 |             **kwargs: Additional parameters (unused)
178 |             
179 |         Returns:
180 |             Tab-separated table with columns: ID, Name, Display_Name, Language, State, Connections, Last_Activity, Environment
181 |         """
182 |         # Get kernel info based on mode
183 |         if mode == ServerMode.JUPYTER_SERVER and kernel_manager is not None:
184 |             kernel_list = await self._list_kernels_local(kernel_manager, kernel_spec_manager)
185 |         elif mode == ServerMode.MCP_SERVER and server_client is not None:
186 |             kernel_list = self._list_kernels_http(server_client)
187 |         else:
188 |             raise ValueError(f"Invalid mode or missing required managers/clients: mode={mode}")
189 |         
190 |         if not kernel_list:
191 |             return "No kernels found on the Jupyter server."
192 |         
193 |         try:
194 |             # Create TSV formatted output
195 |             headers = ["ID", "Name", "Display_Name", "Language", "State", "Connections", "Last_Activity", "Environment"]
196 |             rows = []
197 |             
198 |             for kernel in kernel_list:
199 |                 rows.append([kernel['id'], kernel['name'], kernel['display_name'], kernel['language'], kernel['state'], kernel['connections'], kernel['last_activity'], kernel['env']])
200 |             
201 |             return format_TSV(headers, rows)
202 |             
203 |         except Exception as e:
204 |             return f"Error formatting kernel list: {str(e)}"
205 | 
206 | 
```

--------------------------------------------------------------------------------
/.github/copilot-instructions.md:
--------------------------------------------------------------------------------

```markdown
  1 | # Jupyter MCP Server
  2 | 
  3 | **Always reference these instructions first and fallback to search or bash commands only when you encounter unexpected information that does not match the info here.**
  4 | 
  5 | Jupyter MCP Server is a Python-based Model Context Protocol (MCP) server implementation that enables real-time interaction with Jupyter Notebooks. The project uses a modern Python build system with hatch, and includes comprehensive testing, linting, and documentation.
  6 | 
  7 | ## Working Effectively
  8 | 
  9 | ### Environment Setup
 10 | - **Python Requirements**: Python 3.10 or higher (tested with 3.9-3.13)
 11 | - **Network Considerations**: PyPI installs may fail due to SSL certificate issues or timeout limitations. This is a known environment constraint.
 12 | 
 13 | ### Build and Install (CRITICAL: Network Limitations)
 14 | ```bash
 15 | # Standard installation (may fail with network issues)
 16 | pip install ".[test,lint,typing]"
 17 | 
 18 | # Alternative if pip install fails:
 19 | # 1. Install dependencies individually with longer timeouts
 20 | pip install --timeout=300 pytest
 21 | pip install --timeout=300 ruff  
 22 | pip install --timeout=300 mypy
 23 | 
 24 | # 2. Or use Docker approach (preferred for consistency)
 25 | docker build -t jupyter-mcp-server .
 26 | ```
 27 | 
 28 | **NETWORK TIMEOUT WARNING**: pip install commands may fail with SSL certificate errors or read timeouts when connecting to PyPI. If installs fail:
 29 | - Try increasing timeout: `pip install --timeout=300`
 30 | - Use Docker build which handles dependencies internally
 31 | - Document the network limitation in any testing notes
 32 | 
 33 | ### Core Development Commands
 34 | ```bash
 35 | # Development installation (when network allows)
 36 | make dev
 37 | # Equivalent to: pip install ".[test,lint,typing]"
 38 | 
 39 | # Basic installation  
 40 | make install
 41 | # Equivalent to: pip install .
 42 | 
 43 | # Build the package
 44 | make build
 45 | # Equivalent to: pip install build && python -m build .
 46 | ```
 47 | 
 48 | ### Testing (CRITICAL: Use Long Timeouts)
 49 | ```bash
 50 | # Run tests using hatch (when available)
 51 | make test
 52 | # Equivalent to: hatch test
 53 | 
 54 | # Run tests directly with pytest (when network allows install)
 55 | pytest .
 56 | 
 57 | # NEVER CANCEL: Test suite timing expectations
 58 | # - Full test suite: Allow 15-20 minutes minimum
 59 | # - Network-dependent tests may take longer
 60 | # - Set timeout to 30+ minutes for safety
 61 | ```
 62 | 
 63 | **VALIDATION REQUIREMENT**: When testing is not possible due to network issues, verify at minimum:
 64 | ```bash
 65 | # Syntax validation (always works)
 66 | python -m py_compile jupyter_mcp_server/server.py
 67 | find . -name "*.py" -exec python -m py_compile {} \;
 68 | 
 69 | # Import validation
 70 | PYTHONPATH=. python -c "import jupyter_mcp_server; print('Import successful')"
 71 | ```
 72 | 
 73 | ### Linting and Code Quality (CRITICAL: Use Long Timeouts)
 74 | ```bash
 75 | # Full linting pipeline (when network allows)
 76 | bash ./.github/workflows/lint.sh
 77 | 
 78 | # Individual linting commands:
 79 | pip install -e ".[lint,typing]"
 80 | mypy --install-types --non-interactive .  # May take 10+ minutes, NEVER CANCEL
 81 | ruff check .                              # Quick, usually <1 minute  
 82 | mdformat --check *.md                     # Quick, usually <1 minute
 83 | pipx run 'validate-pyproject[all]' pyproject.toml  # 2-3 minutes
 84 | 
 85 | # TIMING WARNING: mypy type checking can take 10+ minutes on first run
 86 | # Set timeout to 20+ minutes for mypy operations
 87 | ```
 88 | 
 89 | ### Running the Application
 90 | 
 91 | #### Local Development Mode
 92 | ```bash
 93 | # Start with streamable HTTP transport
 94 | make start
 95 | # Equivalent to:
 96 | jupyter-mcp-server start \
 97 |   --transport streamable-http \
 98 |   --document-url http://localhost:8888 \
 99 |   --document-id notebook.ipynb \
100 |   --document-token MY_TOKEN \
101 |   --runtime-url http://localhost:8888 \
102 |   --start-new-runtime true \
103 |   --runtime-token MY_TOKEN \
104 |   --port 4040
105 | ```
106 | 
107 | #### JupyterLab Setup (Required for Testing)
108 | ```bash
109 | # Start JupyterLab server for MCP integration
110 | make jupyterlab
111 | # Equivalent to:
112 | pip uninstall -y pycrdt datalayer_pycrdt
113 | pip install datalayer_pycrdt
114 | jupyter lab \
115 |   --port 8888 \
116 |   --ip 0.0.0.0 \
117 |   --ServerApp.root_dir ./dev/content \
118 |   --IdentityProvider.token MY_TOKEN
119 | ```
120 | 
121 | #### Docker Deployment
122 | ```bash
123 | # Build Docker image (NEVER CANCEL: Build takes 10-15 minutes)
124 | make build-docker  # Takes 10-15 minutes, set timeout to 20+ minutes
125 | 
126 | # Run with Docker  
127 | make start-docker
128 | # Or manually:
129 | docker run -i --rm \
130 |   -e DOCUMENT_URL=http://localhost:8888 \
131 |   -e DOCUMENT_ID=notebook.ipynb \
132 |   -e DOCUMENT_TOKEN=MY_TOKEN \
133 |   -e RUNTIME_URL=http://localhost:8888 \
134 |   -e START_NEW_RUNTIME=true \
135 |   -e RUNTIME_TOKEN=MY_TOKEN \
136 |   --network=host \
137 |   datalayer/jupyter-mcp-server:latest
138 | ```
139 | 
140 | ### Manual Validation Scenarios
141 | 
142 | **When full testing is not possible due to network constraints, always verify:**
143 | 
144 | 1. **Syntax and Import Validation**:
145 |    ```bash
146 |    # Validate all Python files compile
147 |    find . -name "*.py" -exec python -m py_compile {} \;
148 |    
149 |    # Test local imports work
150 |    PYTHONPATH=. python -c "import jupyter_mcp_server; print('SUCCESS')"
151 |    ```
152 | 
153 | 2. **Configuration Validation**:
154 |    ```bash
155 |    # Verify pyproject.toml is valid
156 |    python -c "import tomllib; tomllib.load(open('pyproject.toml', 'rb'))"
157 |    
158 |    # Test module structure
159 |    python -c "import jupyter_mcp_server.server, jupyter_mcp_server.models"
160 |    ```
161 | 
162 | 3. **Documentation Build** (when Node.js available):
163 |    ```bash
164 |    cd docs/
165 |    npm install  # May have network issues
166 |    npm run build  # 3-5 minutes, set timeout to 10+ minutes
167 |    ```
168 | 
169 | ## Project Structure and Navigation
170 | 
171 | ### Key Directories
172 | - **`jupyter_mcp_server/`**: Main Python package
173 |   - `server.py`: Core MCP server implementation with FastMCP integration
174 |   - `models.py`: Pydantic data models for document and runtime handling
175 |   - `utils.py`: Utility functions for output extraction and processing
176 |   - `tests/`: Unit tests (internal package tests)
177 | - **`tests/`**: Integration tests using pytest-asyncio
178 | - **`docs/`**: Docusaurus-based documentation site (Node.js/React)
179 | - **`dev/content/`**: Development Jupyter notebook files for testing
180 | - **`.github/workflows/`**: CI/CD pipeline definitions
181 | 
182 | ### Important Files
183 | - **`pyproject.toml`**: Build configuration, dependencies, and tool settings
184 | - **`Makefile`**: Development workflow automation
185 | - **`Dockerfile`**: Container build definition
186 | - **`.github/workflows/lint.sh`**: Linting pipeline script
187 | - **`pytest.ini`**: Test configuration
188 | 
189 | ### Frequently Modified Areas
190 | - **Server Logic**: `jupyter_mcp_server/server.py` - Main MCP server implementation
191 | - **Data Models**: `jupyter_mcp_server/models.py` - When adding new MCP tools or changing data structures
192 | - **Tests**: `tests/test_mcp.py` - Integration tests for MCP functionality
193 | - **Documentation**: `docs/src/` - When updating API documentation or user guides
194 | 
195 | ## Common Tasks and Gotchas
196 | 
197 | ### Adding New MCP Tools
198 | 1. Add tool definition in `jupyter_mcp_server/server.py`
199 | 2. Update models in `jupyter_mcp_server/models.py` if needed
200 | 3. Add tests in `tests/test_mcp.py`
201 | 4. Update documentation in `docs/`
202 | 
203 | ### Dependency Management
204 | - **Core deps**: Defined in `pyproject.toml` dependencies section
205 | - **Dev deps**: Use `[test,lint,typing]` optional dependencies
206 | - **Special handling**: `datalayer_pycrdt` has specific version requirements (0.12.17)
207 | 
208 | ### CI/CD Pipeline Expectations
209 | - **Build Matrix**: Tests run on Ubuntu, macOS, Windows with Python 3.9, 3.13
210 | - **Critical Timing**: Full CI pipeline takes 20-30 minutes
211 | - **Required Checks**: pytest, ruff, mypy, mdformat, pyproject validation
212 | 
213 | ### Environment Variables for Testing
214 | ```bash
215 | # Required for MCP server operation
216 | export DOCUMENT_URL="http://localhost:8888"
217 | export DOCUMENT_TOKEN="MY_TOKEN"
218 | export DOCUMENT_ID="notebook.ipynb"
219 | export RUNTIME_URL="http://localhost:8888"
220 | export RUNTIME_TOKEN="MY_TOKEN"
221 | ```
222 | 
223 | ## Network Limitations and Workarounds
224 | 
225 | **CRITICAL CONSTRAINT**: This development environment has limited PyPI connectivity with SSL certificate issues and timeout problems.
226 | 
227 | ### Known Working Commands
228 | ```bash
229 | # These always work (no network required):
230 | python -m py_compile <file>          # Syntax validation
231 | PYTHONPATH=. python -c "import ..."  # Import testing  
232 | python -c "import tomllib; ..."      # Config validation
233 | git operations                       # Version control
234 | docker build (when base images cached)
235 | ```
236 | 
237 | ### Commands That May Fail
238 | ```bash
239 | pip install <anything>               # Network timeouts/SSL issues
240 | npm install                          # Network limitations  
241 | mypy --install-types                 # Downloads type stubs
242 | hatch test                           # May need PyPI for dependencies
243 | ```
244 | 
245 | ### Required Workarounds
246 | 1. **Document network failures** when they occur: "pip install fails due to network limitations"
247 | 2. **Use syntax validation** instead of full testing when pip installs fail
248 | 3. **Prefer Docker approach** for consistent builds when possible
249 | 4. **Set generous timeouts** (60+ minutes) for any network operations
250 | 5. **Never cancel long-running commands** - document expected timing instead
251 | 
252 | ## Timing Expectations
253 | 
254 | **NEVER CANCEL these operations - they are expected to take significant time:**
255 | 
256 | - **pip install ".[test,lint,typing]"**: 5-10 minutes (when network works)
257 | - **mypy --install-types --non-interactive**: 10-15 minutes first run
258 | - **Docker build**: 10-15 minutes
259 | - **Full test suite**: 15-20 minutes  
260 | - **Documentation build**: 3-5 minutes
261 | - **CI pipeline**: 20-30 minutes total
262 | 
263 | Always set timeouts to at least double these estimates to account for network variability.
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/tools/overwrite_cell_source_tool.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """Overwrite cell source tool implementation."""
  6 | 
  7 | import difflib
  8 | import nbformat
  9 | from pathlib import Path
 10 | from typing import Any, Optional
 11 | from jupyter_server_api import JupyterServerClient
 12 | from jupyter_mcp_server.tools._base import BaseTool, ServerMode
 13 | from jupyter_mcp_server.notebook_manager import NotebookManager
 14 | from jupyter_mcp_server.utils import get_current_notebook_context
 15 | 
 16 | 
 17 | class OverwriteCellSourceTool(BaseTool):
 18 |     """Tool to overwrite the source of an existing cell."""
 19 |     
 20 |     @property
 21 |     def name(self) -> str:
 22 |         return "overwrite_cell_source"
 23 |     
 24 |     @property
 25 |     def description(self) -> str:
 26 |         return """Overwrite the source of an existing cell.
 27 | Note this does not execute the modified cell by itself.
 28 | 
 29 | Args:
 30 |     cell_index: Index of the cell to overwrite (0-based)
 31 |     cell_source: New cell source - must match existing cell type
 32 | 
 33 | Returns:
 34 |     str: Success message with diff showing changes made"""
 35 |     
 36 |     async def _get_jupyter_ydoc(self, serverapp: Any, file_id: str):
 37 |         """Get the YNotebook document if it's currently open in a collaborative session."""
 38 |         try:
 39 |             yroom_manager = serverapp.web_app.settings.get("yroom_manager")
 40 |             if yroom_manager is None:
 41 |                 return None
 42 |                 
 43 |             room_id = f"json:notebook:{file_id}"
 44 |             
 45 |             if yroom_manager.has_room(room_id):
 46 |                 yroom = yroom_manager.get_room(room_id)
 47 |                 notebook = await yroom.get_jupyter_ydoc()
 48 |                 return notebook
 49 |         except Exception:
 50 |             pass
 51 |         
 52 |         return None
 53 |     
 54 |     def _generate_diff(self, old_source: str, new_source: str) -> str:
 55 |         """Generate unified diff between old and new source."""
 56 |         old_lines = old_source.splitlines(keepends=False)
 57 |         new_lines = new_source.splitlines(keepends=False)
 58 |         
 59 |         diff_lines = list(difflib.unified_diff(
 60 |             old_lines, 
 61 |             new_lines, 
 62 |             lineterm='',
 63 |             n=3  # Number of context lines
 64 |         ))
 65 |         
 66 |         # Remove the first 3 lines (file headers) from unified_diff output
 67 |         if len(diff_lines) > 3:
 68 |             return '\n'.join(diff_lines[3:])
 69 |         return "no changes detected"
 70 |     
 71 |     async def _overwrite_cell_ydoc(
 72 |         self,
 73 |         serverapp: Any,
 74 |         notebook_path: str,
 75 |         cell_index: int,
 76 |         cell_source: str
 77 |     ) -> str:
 78 |         """Overwrite cell using YDoc (collaborative editing mode)."""
 79 |         # Get file_id from file_id_manager
 80 |         file_id_manager = serverapp.web_app.settings.get("file_id_manager")
 81 |         if file_id_manager is None:
 82 |             raise RuntimeError("file_id_manager not available in serverapp")
 83 |         
 84 |         file_id = file_id_manager.get_id(notebook_path)
 85 |         
 86 |         # Try to get YDoc
 87 |         ydoc = await self._get_jupyter_ydoc(serverapp, file_id)
 88 |         
 89 |         if ydoc:
 90 |             # Notebook is open in collaborative mode, use YDoc
 91 |             if cell_index < 0 or cell_index >= len(ydoc.ycells):
 92 |                 raise ValueError(
 93 |                     f"Cell index {cell_index} is out of range. Notebook has {len(ydoc.ycells)} cells."
 94 |                 )
 95 |             
 96 |             # Get original cell content
 97 |             old_source_raw = ydoc.ycells[cell_index].get("source", "")
 98 |             if isinstance(old_source_raw, list):
 99 |                 old_source = "".join(old_source_raw)
100 |             else:
101 |                 old_source = str(old_source_raw)
102 |             
103 |             # Set new cell source
104 |             ydoc.ycells[cell_index]["source"] = cell_source
105 |             
106 |             # Generate diff
107 |             diff_content = self._generate_diff(old_source, cell_source)
108 |             
109 |             if not diff_content.strip() or diff_content == "no changes detected":
110 |                 return f"Cell {cell_index} overwritten successfully - no changes detected"
111 |             
112 |             return f"Cell {cell_index} overwritten successfully!\n\n```diff\n{diff_content}\n```"
113 |         else:
114 |             # YDoc not available, use file operations
115 |             return await self._overwrite_cell_file(notebook_path, cell_index, cell_source)
116 |     
117 |     async def _overwrite_cell_file(
118 |         self,
119 |         notebook_path: str,
120 |         cell_index: int,
121 |         cell_source: str
122 |     ) -> str:
123 |         """Overwrite cell using file operations (non-collaborative mode)."""
124 |         # Read notebook file as version 4 for consistency
125 |         with open(notebook_path, "r", encoding="utf-8") as f:
126 |             notebook = nbformat.read(f, as_version=4)
127 |         
128 |         # Clean transient fields from outputs
129 |         from jupyter_mcp_server.utils import _clean_notebook_outputs
130 |         _clean_notebook_outputs(notebook)
131 |         
132 |         if cell_index < 0 or cell_index >= len(notebook.cells):
133 |             raise ValueError(
134 |                 f"Cell index {cell_index} is out of range. Notebook has {len(notebook.cells)} cells."
135 |             )
136 |         
137 |         # Get original cell content
138 |         old_source = notebook.cells[cell_index].source
139 |         
140 |         # Set new cell source
141 |         notebook.cells[cell_index].source = cell_source
142 |         
143 |         # Write back to file
144 |         with open(notebook_path, "w", encoding="utf-8") as f:
145 |             nbformat.write(notebook, f)
146 |         
147 |         # Generate diff
148 |         diff_content = self._generate_diff(old_source, cell_source)
149 |         
150 |         if not diff_content.strip() or diff_content == "no changes detected":
151 |             return f"Cell {cell_index} overwritten successfully - no changes detected"
152 |         
153 |         return f"Cell {cell_index} overwritten successfully!\n\n```diff\n{diff_content}\n```"
154 |     
155 |     async def _overwrite_cell_websocket(
156 |         self,
157 |         notebook_manager: NotebookManager,
158 |         cell_index: int,
159 |         cell_source: str
160 |     ) -> str:
161 |         """Overwrite cell using WebSocket connection (MCP_SERVER mode)."""
162 |         async with notebook_manager.get_current_connection() as notebook:
163 |             if cell_index < 0 or cell_index >= len(notebook):
164 |                 raise ValueError(f"Cell index {cell_index} out of range")
165 |             
166 |             # Get original cell content
167 |             old_source_raw = notebook[cell_index].get("source", "")
168 |             if isinstance(old_source_raw, list):
169 |                 old_source = "".join(old_source_raw)
170 |             else:
171 |                 old_source = str(old_source_raw)
172 |             
173 |             # Set new cell content
174 |             notebook.set_cell_source(cell_index, cell_source)
175 |             
176 |             # Generate diff
177 |             diff_content = self._generate_diff(old_source, cell_source)
178 |             
179 |             if not diff_content.strip() or diff_content == "no changes detected":
180 |                 return f"Cell {cell_index} overwritten successfully - no changes detected"
181 |             
182 |             return f"Cell {cell_index} overwritten successfully!\n\n```diff\n{diff_content}\n```"
183 |     
184 |     async def execute(
185 |         self,
186 |         mode: ServerMode,
187 |         server_client: Optional[JupyterServerClient] = None,
188 |         kernel_client: Optional[Any] = None,
189 |         contents_manager: Optional[Any] = None,
190 |         kernel_manager: Optional[Any] = None,
191 |         kernel_spec_manager: Optional[Any] = None,
192 |         notebook_manager: Optional[NotebookManager] = None,
193 |         # Tool-specific parameters
194 |         cell_index: int = None,
195 |         cell_source: str = None,
196 |         **kwargs
197 |     ) -> str:
198 |         """Execute the overwrite_cell_source tool.
199 |         
200 |         Args:
201 |             mode: Server mode (MCP_SERVER or JUPYTER_SERVER)
202 |             contents_manager: Direct API access for JUPYTER_SERVER mode
203 |             notebook_manager: Notebook manager instance
204 |             cell_index: Index of the cell to overwrite (0-based)
205 |             cell_source: New cell source
206 |             **kwargs: Additional parameters
207 |             
208 |         Returns:
209 |             Success message with diff
210 |         """
211 |         if mode == ServerMode.JUPYTER_SERVER and contents_manager is not None:
212 |             # JUPYTER_SERVER mode: Try YDoc first, fall back to file operations
213 |             from jupyter_mcp_server.jupyter_extension.context import get_server_context
214 |             
215 |             context = get_server_context()
216 |             serverapp = context.serverapp
217 |             notebook_path, _ = get_current_notebook_context(notebook_manager)
218 |             
219 |             # Resolve to absolute path
220 |             if serverapp and not Path(notebook_path).is_absolute():
221 |                 root_dir = serverapp.root_dir
222 |                 notebook_path = str(Path(root_dir) / notebook_path)
223 |             
224 |             if serverapp:
225 |                 return await self._overwrite_cell_ydoc(serverapp, notebook_path, cell_index, cell_source)
226 |             else:
227 |                 return await self._overwrite_cell_file(notebook_path, cell_index, cell_source)
228 |                 
229 |         elif mode == ServerMode.MCP_SERVER and notebook_manager is not None:
230 |             # MCP_SERVER mode: Use WebSocket connection
231 |             return await self._overwrite_cell_websocket(notebook_manager, cell_index, cell_source)
232 |         else:
233 |             raise ValueError(f"Invalid mode or missing required clients: mode={mode}")
234 | 
```

--------------------------------------------------------------------------------
/jupyter_mcp_server/jupyter_extension/extension.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | # Copyright (c) 2023-2024 Datalayer, Inc.
  6 | #
  7 | # BSD 3-Clause License
  8 | 
  9 | """
 10 | Jupyter Server Extension for MCP Protocol
 11 | 
 12 | This extension exposes MCP tools directly from a running Jupyter Server,
 13 | allowing MCP clients to connect to the Jupyter Server's MCP endpoints.
 14 | """
 15 | 
 16 | import logging
 17 | from traitlets import Unicode, Bool
 18 | from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin
 19 | from jupyter_server.utils import url_path_join
 20 | 
 21 | from jupyter_mcp_server.jupyter_extension.context import get_server_context
 22 | from jupyter_mcp_server.jupyter_extension.handlers import (
 23 |     MCPHealthHandler,
 24 |     MCPToolsListHandler,
 25 |     MCPToolsCallHandler,
 26 | )
 27 | 
 28 | 
 29 | logger = logging.getLogger(__name__)
 30 | 
 31 | 
 32 | class JupyterMCPServerExtensionApp(ExtensionAppJinjaMixin, ExtensionApp):
 33 |     """
 34 |     Jupyter Server Extension for MCP Server.
 35 |     
 36 |     This extension allows MCP clients to connect to Jupyter Server and use
 37 |     MCP tools to interact with notebooks and kernels.
 38 |     
 39 |     Configuration:
 40 |         c.JupyterMCPServerExtensionApp.document_url = "local"  # or http://...
 41 |         c.JupyterMCPServerExtensionApp.runtime_url = "local"   # or http://...
 42 |         c.JupyterMCPServerExtensionApp.document_id = "notebook.ipynb"
 43 |         c.JupyterMCPServerExtensionApp.start_new_runtime = True  # Start new kernel
 44 |         c.JupyterMCPServerExtensionApp.runtime_id = "kernel-id"  # Or connect to existing
 45 |     """
 46 |     
 47 |     # Extension metadata
 48 |     name = "jupyter_mcp_server"
 49 |     default_url = "/mcp"
 50 |     load_other_extensions = True
 51 |     
 52 |     # Configuration traits
 53 |     document_url = Unicode(
 54 |         "local",
 55 |         config=True,
 56 |         help='Document URL - use "local" for local serverapp access or http://... for remote'
 57 |     )
 58 |     
 59 |     runtime_url = Unicode(
 60 |         "local",
 61 |         config=True,
 62 |         help='Runtime URL - use "local" for local serverapp access or http://... for remote'
 63 |     )
 64 |     
 65 |     document_id = Unicode(
 66 |         "notebook.ipynb",
 67 |         config=True,
 68 |         help='Default document ID (notebook path)'
 69 |     )
 70 |     
 71 |     start_new_runtime = Bool(
 72 |         False,
 73 |         config=True,
 74 |         help='Whether to start a new kernel runtime on initialization'
 75 |     )
 76 |     
 77 |     runtime_id = Unicode(
 78 |         "",
 79 |         config=True,
 80 |         help='Existing kernel ID to connect to (if not starting new runtime)'
 81 |     )
 82 |     
 83 |     document_token = Unicode(
 84 |         "",
 85 |         config=True,
 86 |         help='Authentication token for document server (if remote)'
 87 |     )
 88 |     
 89 |     runtime_token = Unicode(
 90 |         "",
 91 |         config=True,
 92 |         help='Authentication token for runtime server (if remote)'
 93 |     )
 94 |     
 95 |     provider = Unicode(
 96 |         "jupyter",
 97 |         config=True,
 98 |         help='Provider type for document/runtime'
 99 |     )
100 |     
101 |     def initialize_settings(self):
102 |         """
103 |         Initialize extension settings.
104 |         
105 |         This is called during extension loading to set up configuration
106 |         and update the server context.
107 |         """
108 |         # Reduce noise from httpx logging (used by JupyterLab for PyPI extension discovery)
109 |         logging.getLogger("httpx").setLevel(logging.WARNING)
110 |         
111 |         logger.info(f"Initializing Jupyter MCP Server Extension")
112 |         logger.info(f"  Document URL: {self.document_url}")
113 |         logger.info(f"  Runtime URL: {self.runtime_url}")
114 |         logger.info(f"  Document ID: {self.document_id}")
115 |         logger.info(f"  Start New Runtime: {self.start_new_runtime}")
116 |         if self.runtime_id:
117 |             logger.info(f"  Runtime ID: {self.runtime_id}")
118 |         
119 |         # Update the global server context
120 |         context = get_server_context()
121 |         context.update(
122 |             context_type="JUPYTER_SERVER",
123 |             serverapp=self.serverapp,
124 |             document_url=self.document_url,
125 |             runtime_url=self.runtime_url
126 |         )
127 |         
128 |         # Update global MCP configuration
129 |         from jupyter_mcp_server.config import get_config
130 |         config = get_config()
131 |         config.document_url = self.document_url
132 |         config.runtime_url = self.runtime_url
133 |         config.document_id = self.document_id
134 |         config.document_token = self.document_token if self.document_token else None
135 |         config.runtime_token = self.runtime_token if self.runtime_token else None
136 |         config.start_new_runtime = self.start_new_runtime
137 |         config.runtime_id = self.runtime_id if self.runtime_id else None
138 |         config.provider = self.provider
139 |         
140 |         # Store configuration in settings for handlers
141 |         self.settings.update({
142 |             "mcp_document_url": self.document_url,
143 |             "mcp_runtime_url": self.runtime_url,
144 |             "mcp_document_id": self.document_id,
145 |             "mcp_document_token": self.document_token,
146 |             "mcp_runtime_token": self.runtime_token,
147 |             "mcp_start_new_runtime": self.start_new_runtime,
148 |             "mcp_runtime_id": self.runtime_id,
149 |             "mcp_provider": self.provider,
150 |             "mcp_serverapp": self.serverapp,
151 |         })
152 |         
153 |         # Trigger auto-enrollment if document_id is configured
154 |         # Note: Auto-enrollment supports 3 modes:
155 |         # 1. With existing kernel (runtime_id set)
156 |         # 2. With new kernel (start_new_runtime=True)
157 |         # 3. Without kernel - notebook-only mode (both False/None)
158 |         if self.document_id:
159 |             from tornado.ioloop import IOLoop
160 |             from jupyter_mcp_server.enroll import auto_enroll_document
161 |             from jupyter_mcp_server.server import notebook_manager, use_notebook_tool, server_context
162 |             
163 |             # Schedule auto-enrollment to run after Jupyter Server is fully started
164 |             async def _run_auto_enrollment():
165 |                 try:
166 |                     logger.info(f"Running auto-enrollment for document '{self.document_id}'")
167 |                     await auto_enroll_document(
168 |                         config=config,
169 |                         notebook_manager=notebook_manager,
170 |                         use_notebook_tool=use_notebook_tool,
171 |                         server_context=server_context,
172 |                     )
173 |                     logger.info(f"Auto-enrollment completed for document '{self.document_id}'")
174 |                 except Exception as e:
175 |                     logger.error(f"Failed to auto-enroll document: {e}", exc_info=True)
176 |             
177 |             # Schedule the enrollment to run on the IOLoop after server starts
178 |             # Use callback with delay to ensure server is fully initialized
179 |             IOLoop.current().call_later(1.0, lambda: IOLoop.current().add_callback(_run_auto_enrollment))
180 |         
181 |         logger.info("Jupyter MCP Server Extension settings initialized")
182 |     
183 |     def initialize_handlers(self):
184 |         """
185 |         Register MCP protocol handlers.
186 |         
187 |         Strategy: Implement MCP protocol directly in Tornado handlers that
188 |         call the MCP tools from server.py. This avoids the complexity of
189 |         wrapping the Starlette ASGI app.
190 |         
191 |         Endpoints:
192 |         - GET/POST /mcp - MCP protocol endpoint (SSE-based)
193 |         - GET /mcp/healthz - Health check (Tornado handler)
194 |         - GET /mcp/tools/list - List available tools (Tornado handler)
195 |         - POST /mcp/tools/call - Execute a tool (Tornado handler)
196 |         """
197 |         base_url = self.serverapp.base_url
198 |         
199 |         # Import here to avoid circular imports
200 |         from jupyter_mcp_server.jupyter_extension.handlers import MCPSSEHandler
201 |         
202 |         # Define handlers
203 |         handlers = [
204 |             # MCP protocol endpoint - SSE-based handler
205 |             # Match /mcp with or without trailing slash
206 |             (url_path_join(base_url, "mcp/?"), MCPSSEHandler),
207 |             # Utility endpoints (optional, for debugging)
208 |             (url_path_join(base_url, "mcp/healthz"), MCPHealthHandler),
209 |             (url_path_join(base_url, "mcp/tools/list"), MCPToolsListHandler),
210 |             (url_path_join(base_url, "mcp/tools/call"), MCPToolsCallHandler),
211 |         ]
212 |         
213 |         # Register handlers
214 |         self.handlers.extend(handlers)
215 |         
216 |         # Log registered endpoints using url_path_join for consistent formatting
217 |         logger.info(f"Registered MCP handlers at {url_path_join(base_url, 'mcp/')}")
218 |         logger.info(f"  - MCP protocol: {url_path_join(base_url, 'mcp')} (SSE-based)")
219 |         logger.info(f"  - Health check: {url_path_join(base_url, 'mcp/healthz')}")
220 |         logger.info(f"  - List tools: {url_path_join(base_url, 'mcp/tools/list')}")
221 |         logger.info(f"  - Call tool: {url_path_join(base_url, 'mcp/tools/call')}")
222 |     
223 |     def initialize_templates(self):
224 |         """
225 |         Initialize Jinja templates.
226 |         
227 |         Not needed for API-only extension, but included for completeness.
228 |         """
229 |         pass
230 |     
231 |     async def stop_extension(self):
232 |         """
233 |         Clean up when extension stops.
234 |         
235 |         Shutdown any managed kernels and cleanup resources.
236 |         """
237 |         logger.info("Stopping Jupyter MCP Server Extension")
238 |         
239 |         # Reset server context
240 |         context = get_server_context()
241 |         context.reset()
242 |         
243 |         logger.info("Jupyter MCP Server Extension stopped")
244 | 
245 | 
246 | # Extension loading functions
247 | 
248 | def _jupyter_server_extension_points():
249 |     """
250 |     Declare the Jupyter Server extension.
251 |     
252 |     Returns:
253 |         List of extension metadata dictionaries
254 |     """
255 |     return [
256 |         {
257 |             "module": "jupyter_mcp_server.jupyter_extension.extension",
258 |             "app": JupyterMCPServerExtensionApp
259 |         }
260 |     ]
261 | 
262 | 
263 | def _load_jupyter_server_extension(serverapp):
264 |     """
265 |     Load the extension (for backward compatibility).
266 |     
267 |     Args:
268 |         serverapp: Jupyter ServerApp instance
269 |     """
270 |     extension = JupyterMCPServerExtensionApp()
271 |     extension.serverapp = serverapp
272 |     extension.initialize_settings()
273 |     extension.initialize_handlers()
274 |     extension.initialize_templates()
275 | 
276 | 
277 | # For classic Notebook server compatibility
278 | load_jupyter_server_extension = _load_jupyter_server_extension
279 | 
```

--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright (c) 2023-2024 Datalayer, Inc.
  2 | #
  3 | # BSD 3-Clause License
  4 | 
  5 | """
  6 | Pytest configuration and shared fixtures for Jupyter MCP Server tests.
  7 | 
  8 | This module provides:
  9 | - jupyter_server fixture: Session-scoped Jupyter Lab server
 10 | - jupyter_server_with_extension fixture: Jupyter Lab with MCP extension
 11 | - jupyter_mcp_server fixture: Standalone MCP server instance
 12 | - mcp_client fixture: MCP protocol client for testing
 13 | - _start_server helper: Generic server startup with health checks
 14 | - JUPYTER_TOKEN: Authentication token for Jupyter API
 15 | """
 16 | 
 17 | import logging
 18 | import os
 19 | import socket
 20 | import subprocess
 21 | import time
 22 | from http import HTTPStatus
 23 | 
 24 | import pytest
 25 | import pytest_asyncio
 26 | import requests
 27 | from requests.exceptions import ConnectionError
 28 | 
 29 | 
 30 | JUPYTER_TOKEN = "MY_TOKEN"
 31 | 
 32 | # Test mode configuration - set to False to skip testing specific modes
 33 | TEST_MCP_SERVER = os.environ.get("TEST_MCP_SERVER", "true").lower() == "true"
 34 | TEST_JUPYTER_SERVER = os.environ.get("TEST_JUPYTER_SERVER", "true").lower() == "true"
 35 | 
 36 | 
 37 | def _start_server(
 38 |     name: str, host: str, port: int, command: list, readiness_endpoint: str, max_retries: int = 5
 39 | ):
 40 |     """A Helper that starts a web server as a python subprocess and wait until it's ready to accept connections
 41 | 
 42 |     This method can be used to start both Jupyter and Jupyter MCP servers
 43 |     
 44 |     Uses subprocess.DEVNULL to prevent pipe blocking issues with verbose output.
 45 |     """
 46 |     _log_prefix = name
 47 |     url = f"http://{host}:{port}"
 48 |     url_readiness = f"{url}{readiness_endpoint}"
 49 |     logging.info(f"{_log_prefix}: starting ...")
 50 |     logging.debug(f"{_log_prefix}: command: {' '.join(command)}")
 51 |     
 52 |     # Use DEVNULL to prevent any pipe blocking issues
 53 |     p_serv = subprocess.Popen(
 54 |         command, 
 55 |         stdout=subprocess.DEVNULL, 
 56 |         stderr=subprocess.DEVNULL
 57 |     )
 58 |     _log_prefix = f"{_log_prefix} [{p_serv.pid}]"
 59 |     
 60 |     while max_retries > 0:
 61 |         # Check if process died
 62 |         poll_result = p_serv.poll()
 63 |         if poll_result is not None:
 64 |             logging.error(f"{_log_prefix}: process died with exit code {poll_result}")
 65 |             pytest.fail(f"{name} failed to start (exit code {poll_result}). Check if port {port} is available.")
 66 |         
 67 |         try:
 68 |             response = requests.get(url_readiness, timeout=10)
 69 |             if response is not None and response.status_code == HTTPStatus.OK:
 70 |                 logging.info(f"{_log_prefix}: started ({url})!")
 71 |                 yield url
 72 |                 break
 73 |         except (ConnectionError, requests.exceptions.Timeout):
 74 |             logging.debug(
 75 |                 f"{_log_prefix}: waiting to accept connections [{max_retries}]"
 76 |             )
 77 |             time.sleep(2)
 78 |             max_retries -= 1
 79 |             
 80 |     if not max_retries:
 81 |         logging.error(f"{_log_prefix}: fail to start after retries. Check if port {port} is available.")
 82 |         pytest.fail(f"{name} failed to start after max retries. Port {port} may be in use or server crashed.")
 83 |     logging.debug(f"{_log_prefix}: stopping ...")
 84 |     try:
 85 |         p_serv.terminate()
 86 |         p_serv.wait(timeout=5)  # Reduced timeout for faster cleanup
 87 |         logging.info(f"{_log_prefix}: stopped")
 88 |     except subprocess.TimeoutExpired:
 89 |         logging.warning(f"{_log_prefix}: terminate timeout, forcing kill")
 90 |         p_serv.kill()
 91 |         try:
 92 |             p_serv.wait(timeout=2)
 93 |         except subprocess.TimeoutExpired:
 94 |             logging.error(f"{_log_prefix}: kill timeout, process may be stuck")
 95 |     except Exception as e:
 96 |         logging.error(f"{_log_prefix}: error during shutdown: {e}")
 97 | 
 98 | 
 99 | @pytest.fixture(scope="session")
100 | def jupyter_server():
101 |     """Start the Jupyter server and returns its URL
102 |     
103 |     This is a session-scoped fixture that starts a single Jupyter Lab instance
104 |     for all tests. Both MCP_SERVER and JUPYTER_SERVER mode tests can share this.
105 |     
106 |     Only starts if at least one test mode is enabled.
107 |     """
108 |     if not TEST_MCP_SERVER and not TEST_JUPYTER_SERVER:
109 |         pytest.skip("Both TEST_MCP_SERVER and TEST_JUPYTER_SERVER are disabled")
110 |     
111 |     host = "localhost"
112 |     port = 8888
113 |     yield from _start_server(
114 |         name="JupyterLab",
115 |         host=host,
116 |         port=port,
117 |         command=[
118 |             "jupyter",
119 |             "lab",
120 |             "--port",
121 |             str(port),
122 |             "--IdentityProvider.token",
123 |             JUPYTER_TOKEN,
124 |             "--ip",
125 |             host,
126 |             "--ServerApp.root_dir",
127 |             "./dev/content",
128 |             "--no-browser",
129 |         ],
130 |         readiness_endpoint="/api",
131 |         max_retries=10,
132 |     )
133 | 
134 | 
135 | @pytest.fixture(scope="session")
136 | def jupyter_server_with_extension():
137 |     """Start Jupyter server with MCP extension loaded (JUPYTER_SERVER mode)
138 |     
139 |     This fixture starts Jupyter Lab with the jupyter_mcp_server extension enabled,
140 |     allowing tests to verify JUPYTER_SERVER mode functionality (YDoc, direct kernel access, etc).
141 |     
142 |     Only starts if TEST_JUPYTER_SERVER=True, otherwise skips.
143 |     """
144 |     if not TEST_JUPYTER_SERVER:
145 |         pytest.skip("TEST_JUPYTER_SERVER is disabled")
146 |     
147 |     host = "localhost"
148 |     port = 8889  # Different port to avoid conflicts
149 |     yield from _start_server(
150 |         name="JupyterLab+MCP",
151 |         host=host,
152 |         port=port,
153 |         command=[
154 |             "jupyter",
155 |             "lab",
156 |             "--port",
157 |             str(port),
158 |             "--IdentityProvider.token",
159 |             JUPYTER_TOKEN,
160 |             "--ip",
161 |             host,
162 |             "--ServerApp.root_dir",
163 |             "./dev/content",
164 |             "--no-browser",
165 |             # Load the MCP extension
166 |             "--ServerApp.jpserver_extensions",
167 |             '{"jupyter_mcp_server": True}',
168 |         ],
169 |         readiness_endpoint="/api",
170 |         max_retries=10,
171 |     )
172 | 
173 | 
174 | ###############################################################################
175 | # MCP Server Fixtures
176 | ###############################################################################
177 | 
178 | @pytest.fixture(scope="function")
179 | def jupyter_mcp_server(request, jupyter_server):
180 |     """Start the Jupyter MCP server and returns its URL
181 |     
182 |     This fixture starts a standalone MCP server that communicates with Jupyter
183 |     via HTTP (MCP_SERVER mode). It can be parametrized to control runtime startup.
184 |     
185 |     Parameters:
186 |         request.param (bool): Whether to start a new kernel runtime (default: True)
187 |     """
188 |     # Find an available port
189 |     def find_free_port():
190 |         with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
191 |             s.bind(('', 0))
192 |             s.listen(1)
193 |             port = s.getsockname()[1]
194 |         return port
195 |     
196 |     host = "localhost"
197 |     port = find_free_port()
198 |     start_new_runtime = True
199 |     try:
200 |         start_new_runtime = request.param
201 |     except AttributeError:
202 |         # fixture not parametrized
203 |         pass
204 |     
205 |     yield from _start_server(
206 |         name="Jupyter MCP",
207 |         host=host,
208 |         port=port,
209 |         command=[
210 |             "python",
211 |             "-m",
212 |             "jupyter_mcp_server",
213 |             "--transport",
214 |             "streamable-http",
215 |             "--document-url",
216 |             jupyter_server,
217 |             "--document-id",
218 |             "notebook.ipynb",
219 |             "--document-token",
220 |             JUPYTER_TOKEN,
221 |             "--runtime-url",
222 |             jupyter_server,
223 |             "--start-new-runtime",
224 |             str(start_new_runtime),
225 |             "--runtime-token",
226 |             JUPYTER_TOKEN,
227 |             "--port",
228 |             str(port),
229 |         ],
230 |         readiness_endpoint="/api/healthz",
231 |     )
232 | 
233 | 
234 | def _get_test_params():
235 |     """Generate test parameters based on TEST_MCP_SERVER and TEST_JUPYTER_SERVER flags"""
236 |     params = []
237 |     if TEST_MCP_SERVER:
238 |         params.append("mcp_server")
239 |     if TEST_JUPYTER_SERVER:
240 |         params.append("jupyter_extension")
241 |     
242 |     if not params:
243 |         pytest.skip("Both TEST_MCP_SERVER and TEST_JUPYTER_SERVER are disabled")
244 |     
245 |     return params
246 | 
247 | 
248 | @pytest.fixture(scope="function", params=_get_test_params())
249 | def mcp_server_url(request):
250 |     """Parametrized fixture that provides both MCP_SERVER and JUPYTER_SERVER mode URLs
251 |     
252 |     This fixture enables testing the same functionality against both deployment modes:
253 |     - mcp_server: Standalone MCP server (HTTP transport) - when TEST_MCP_SERVER=True
254 |     - jupyter_extension: Jupyter extension mode (direct API access) - when TEST_JUPYTER_SERVER=True
255 |     
256 |     Both expose MCP protocol endpoints that can be tested with MCPClient.
257 |     
258 |     You can control which modes to test via environment variables:
259 |         TEST_MCP_SERVER=true/false (default: true)
260 |         TEST_JUPYTER_SERVER=true/false (default: true)
261 |     
262 |     Parameters:
263 |         request.param (str): Either "mcp_server" or "jupyter_extension"
264 |     
265 |     Returns:
266 |         str: URL of the MCP endpoint for the selected mode
267 |     """
268 |     if request.param == "mcp_server":
269 |         # Get jupyter_server fixture dynamically
270 |         jupyter_server = request.getfixturevalue("jupyter_server")
271 |         
272 |         # Start standalone MCP server
273 |         import socket
274 |         def find_free_port():
275 |             with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
276 |                 s.bind(('', 0))
277 |                 s.listen(1)
278 |                 port = s.getsockname()[1]
279 |             return port
280 |         
281 |         host = "localhost"
282 |         port = find_free_port()
283 |         
284 |         yield from _start_server(
285 |             name="Jupyter MCP",
286 |             host=host,
287 |             port=port,
288 |             command=[
289 |                 "python",
290 |                 "-m",
291 |                 "jupyter_mcp_server",
292 |                 "--transport",
293 |                 "streamable-http",
294 |                 "--document-url",
295 |                 jupyter_server,
296 |                 "--document-id",
297 |                 "notebook.ipynb",
298 |                 "--document-token",
299 |                 JUPYTER_TOKEN,
300 |                 "--runtime-url",
301 |                 jupyter_server,
302 |                 "--start-new-runtime",
303 |                 "True",
304 |                 "--runtime-token",
305 |                 JUPYTER_TOKEN,
306 |                 "--port",
307 |                 str(port),
308 |             ],
309 |             readiness_endpoint="/api/healthz",
310 |         )
311 |     else:  # jupyter_extension
312 |         # Get jupyter_server_with_extension fixture dynamically
313 |         jupyter_server_with_extension = request.getfixturevalue("jupyter_server_with_extension")
314 |         # Use the extension's MCP endpoints (note: no /mcp suffix, the extension handles routing)
315 |         yield jupyter_server_with_extension
316 | 
317 | 
318 | ###############################################################################
319 | 
320 | 
321 | @pytest_asyncio.fixture(scope="function")
322 | async def mcp_client(jupyter_mcp_server):
323 |     """An MCP client that can connect to the Jupyter MCP server
324 |     
325 |     This fixture provides an MCPClient instance configured to connect to
326 |     the standalone MCP server. It requires the test_common module.
327 |     
328 |     Returns:
329 |         MCPClient: Configured client for MCP protocol communication
330 |     """
331 |     from .test_common import MCPClient
332 |     return MCPClient(jupyter_mcp_server)
333 | 
334 | 
335 | @pytest.fixture(scope="function")
336 | def mcp_client_parametrized(mcp_server_url):
337 |     """MCP client that works with both server modes via parametrization
338 |     
339 |     This fixture creates an MCPClient that can connect to either:
340 |     - Standalone MCP server (MCP_SERVER mode)
341 |     - Jupyter extension MCP endpoints (JUPYTER_SERVER mode)
342 |     
343 |     Returns:
344 |         MCPClient: Configured client for the parametrized server mode
345 |     """
346 |     from .test_common import MCPClient
347 |     return MCPClient(mcp_server_url)
348 | 
```
Page 2/6FirstPrevNextLast