#
tokens: 45366/50000 7/296 files (page 8/12)
lines: off (toggle) GitHub
raw markdown copy
This is page 8 of 12. Use http://codebase.md/oraios/serena?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .devcontainer
│   └── devcontainer.json
├── .dockerignore
├── .env.example
├── .github
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── config.yml
│   │   ├── feature_request.md
│   │   └── issue--bug--performance-problem--question-.md
│   └── workflows
│       ├── codespell.yml
│       ├── docker.yml
│       ├── junie.yml
│       ├── lint_and_docs.yaml
│       ├── publish.yml
│       └── pytest.yml
├── .gitignore
├── .serena
│   ├── memories
│   │   ├── adding_new_language_support_guide.md
│   │   ├── serena_core_concepts_and_architecture.md
│   │   ├── serena_repository_structure.md
│   │   └── suggested_commands.md
│   └── project.yml
├── .vscode
│   └── settings.json
├── CHANGELOG.md
├── CLAUDE.md
├── compose.yaml
├── CONTRIBUTING.md
├── docker_build_and_run.sh
├── DOCKER.md
├── Dockerfile
├── docs
│   ├── custom_agent.md
│   └── serena_on_chatgpt.md
├── flake.lock
├── flake.nix
├── lessons_learned.md
├── LICENSE
├── llms-install.md
├── public
│   └── .gitignore
├── pyproject.toml
├── README.md
├── resources
│   ├── serena-icons.cdr
│   ├── serena-logo-dark-mode.svg
│   ├── serena-logo.cdr
│   ├── serena-logo.svg
│   └── vscode_sponsor_logo.png
├── roadmap.md
├── scripts
│   ├── agno_agent.py
│   ├── demo_run_tools.py
│   ├── gen_prompt_factory.py
│   ├── mcp_server.py
│   ├── print_mode_context_options.py
│   └── print_tool_overview.py
├── src
│   ├── interprompt
│   │   ├── __init__.py
│   │   ├── .syncCommitId.remote
│   │   ├── .syncCommitId.this
│   │   ├── jinja_template.py
│   │   ├── multilang_prompt.py
│   │   ├── prompt_factory.py
│   │   └── util
│   │       ├── __init__.py
│   │       └── class_decorators.py
│   ├── README.md
│   ├── serena
│   │   ├── __init__.py
│   │   ├── agent.py
│   │   ├── agno.py
│   │   ├── analytics.py
│   │   ├── cli.py
│   │   ├── code_editor.py
│   │   ├── config
│   │   │   ├── __init__.py
│   │   │   ├── context_mode.py
│   │   │   └── serena_config.py
│   │   ├── constants.py
│   │   ├── dashboard.py
│   │   ├── generated
│   │   │   └── generated_prompt_factory.py
│   │   ├── gui_log_viewer.py
│   │   ├── mcp.py
│   │   ├── project.py
│   │   ├── prompt_factory.py
│   │   ├── resources
│   │   │   ├── config
│   │   │   │   ├── contexts
│   │   │   │   │   ├── agent.yml
│   │   │   │   │   ├── chatgpt.yml
│   │   │   │   │   ├── codex.yml
│   │   │   │   │   ├── context.template.yml
│   │   │   │   │   ├── desktop-app.yml
│   │   │   │   │   ├── ide-assistant.yml
│   │   │   │   │   └── oaicompat-agent.yml
│   │   │   │   ├── internal_modes
│   │   │   │   │   └── jetbrains.yml
│   │   │   │   ├── modes
│   │   │   │   │   ├── editing.yml
│   │   │   │   │   ├── interactive.yml
│   │   │   │   │   ├── mode.template.yml
│   │   │   │   │   ├── no-onboarding.yml
│   │   │   │   │   ├── onboarding.yml
│   │   │   │   │   ├── one-shot.yml
│   │   │   │   │   └── planning.yml
│   │   │   │   └── prompt_templates
│   │   │   │       ├── simple_tool_outputs.yml
│   │   │   │       └── system_prompt.yml
│   │   │   ├── dashboard
│   │   │   │   ├── dashboard.js
│   │   │   │   ├── index.html
│   │   │   │   ├── jquery.min.js
│   │   │   │   ├── serena-icon-16.png
│   │   │   │   ├── serena-icon-32.png
│   │   │   │   ├── serena-icon-48.png
│   │   │   │   ├── serena-logs-dark-mode.png
│   │   │   │   └── serena-logs.png
│   │   │   ├── project.template.yml
│   │   │   └── serena_config.template.yml
│   │   ├── symbol.py
│   │   ├── text_utils.py
│   │   ├── tools
│   │   │   ├── __init__.py
│   │   │   ├── cmd_tools.py
│   │   │   ├── config_tools.py
│   │   │   ├── file_tools.py
│   │   │   ├── jetbrains_plugin_client.py
│   │   │   ├── jetbrains_tools.py
│   │   │   ├── memory_tools.py
│   │   │   ├── symbol_tools.py
│   │   │   ├── tools_base.py
│   │   │   └── workflow_tools.py
│   │   └── util
│   │       ├── class_decorators.py
│   │       ├── exception.py
│   │       ├── file_system.py
│   │       ├── general.py
│   │       ├── git.py
│   │       ├── inspection.py
│   │       ├── logging.py
│   │       ├── shell.py
│   │       └── thread.py
│   └── solidlsp
│       ├── __init__.py
│       ├── .gitignore
│       ├── language_servers
│       │   ├── al_language_server.py
│       │   ├── bash_language_server.py
│       │   ├── clangd_language_server.py
│       │   ├── clojure_lsp.py
│       │   ├── common.py
│       │   ├── csharp_language_server.py
│       │   ├── dart_language_server.py
│       │   ├── eclipse_jdtls.py
│       │   ├── elixir_tools
│       │   │   ├── __init__.py
│       │   │   ├── elixir_tools.py
│       │   │   └── README.md
│       │   ├── elm_language_server.py
│       │   ├── erlang_language_server.py
│       │   ├── gopls.py
│       │   ├── intelephense.py
│       │   ├── jedi_server.py
│       │   ├── kotlin_language_server.py
│       │   ├── lua_ls.py
│       │   ├── marksman.py
│       │   ├── nixd_ls.py
│       │   ├── omnisharp
│       │   │   ├── initialize_params.json
│       │   │   ├── runtime_dependencies.json
│       │   │   └── workspace_did_change_configuration.json
│       │   ├── omnisharp.py
│       │   ├── perl_language_server.py
│       │   ├── pyright_server.py
│       │   ├── r_language_server.py
│       │   ├── regal_server.py
│       │   ├── ruby_lsp.py
│       │   ├── rust_analyzer.py
│       │   ├── solargraph.py
│       │   ├── sourcekit_lsp.py
│       │   ├── terraform_ls.py
│       │   ├── typescript_language_server.py
│       │   ├── vts_language_server.py
│       │   └── zls.py
│       ├── ls_config.py
│       ├── ls_exceptions.py
│       ├── ls_handler.py
│       ├── ls_logger.py
│       ├── ls_request.py
│       ├── ls_types.py
│       ├── ls_utils.py
│       ├── ls.py
│       ├── lsp_protocol_handler
│       │   ├── lsp_constants.py
│       │   ├── lsp_requests.py
│       │   ├── lsp_types.py
│       │   └── server.py
│       ├── settings.py
│       └── util
│           ├── subprocess_util.py
│           └── zip.py
├── test
│   ├── __init__.py
│   ├── conftest.py
│   ├── resources
│   │   └── repos
│   │       ├── al
│   │       │   └── test_repo
│   │       │       ├── app.json
│   │       │       └── src
│   │       │           ├── Codeunits
│   │       │           │   ├── CustomerMgt.Codeunit.al
│   │       │           │   └── PaymentProcessorImpl.Codeunit.al
│   │       │           ├── Enums
│   │       │           │   └── CustomerType.Enum.al
│   │       │           ├── Interfaces
│   │       │           │   └── IPaymentProcessor.Interface.al
│   │       │           ├── Pages
│   │       │           │   ├── CustomerCard.Page.al
│   │       │           │   └── CustomerList.Page.al
│   │       │           ├── TableExtensions
│   │       │           │   └── Item.TableExt.al
│   │       │           └── Tables
│   │       │               └── Customer.Table.al
│   │       ├── bash
│   │       │   └── test_repo
│   │       │       ├── config.sh
│   │       │       ├── main.sh
│   │       │       └── utils.sh
│   │       ├── clojure
│   │       │   └── test_repo
│   │       │       ├── deps.edn
│   │       │       └── src
│   │       │           └── test_app
│   │       │               ├── core.clj
│   │       │               └── utils.clj
│   │       ├── csharp
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── Models
│   │       │       │   └── Person.cs
│   │       │       ├── Program.cs
│   │       │       ├── serena.sln
│   │       │       └── TestProject.csproj
│   │       ├── dart
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── lib
│   │       │       │   ├── helper.dart
│   │       │       │   ├── main.dart
│   │       │       │   └── models.dart
│   │       │       └── pubspec.yaml
│   │       ├── elixir
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── lib
│   │       │       │   ├── examples.ex
│   │       │       │   ├── ignored_dir
│   │       │       │   │   └── ignored_module.ex
│   │       │       │   ├── models.ex
│   │       │       │   ├── services.ex
│   │       │       │   ├── test_repo.ex
│   │       │       │   └── utils.ex
│   │       │       ├── mix.exs
│   │       │       ├── mix.lock
│   │       │       ├── scripts
│   │       │       │   └── build_script.ex
│   │       │       └── test
│   │       │           ├── models_test.exs
│   │       │           └── test_repo_test.exs
│   │       ├── elm
│   │       │   └── test_repo
│   │       │       ├── elm.json
│   │       │       ├── Main.elm
│   │       │       └── Utils.elm
│   │       ├── erlang
│   │       │   └── test_repo
│   │       │       ├── hello.erl
│   │       │       ├── ignored_dir
│   │       │       │   └── ignored_module.erl
│   │       │       ├── include
│   │       │       │   ├── records.hrl
│   │       │       │   └── types.hrl
│   │       │       ├── math_utils.erl
│   │       │       ├── rebar.config
│   │       │       ├── src
│   │       │       │   ├── app.erl
│   │       │       │   ├── models.erl
│   │       │       │   ├── services.erl
│   │       │       │   └── utils.erl
│   │       │       └── test
│   │       │           ├── models_tests.erl
│   │       │           └── utils_tests.erl
│   │       ├── go
│   │       │   └── test_repo
│   │       │       └── main.go
│   │       ├── java
│   │       │   └── test_repo
│   │       │       ├── pom.xml
│   │       │       └── src
│   │       │           └── main
│   │       │               └── java
│   │       │                   └── test_repo
│   │       │                       ├── Main.java
│   │       │                       ├── Model.java
│   │       │                       ├── ModelUser.java
│   │       │                       └── Utils.java
│   │       ├── kotlin
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── build.gradle.kts
│   │       │       └── src
│   │       │           └── main
│   │       │               └── kotlin
│   │       │                   └── test_repo
│   │       │                       ├── Main.kt
│   │       │                       ├── Model.kt
│   │       │                       ├── ModelUser.kt
│   │       │                       └── Utils.kt
│   │       ├── lua
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── main.lua
│   │       │       ├── src
│   │       │       │   ├── calculator.lua
│   │       │       │   └── utils.lua
│   │       │       └── tests
│   │       │           └── test_calculator.lua
│   │       ├── markdown
│   │       │   └── test_repo
│   │       │       ├── api.md
│   │       │       ├── CONTRIBUTING.md
│   │       │       ├── guide.md
│   │       │       └── README.md
│   │       ├── nix
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── default.nix
│   │       │       ├── flake.nix
│   │       │       ├── lib
│   │       │       │   └── utils.nix
│   │       │       ├── modules
│   │       │       │   └── example.nix
│   │       │       └── scripts
│   │       │           └── hello.sh
│   │       ├── perl
│   │       │   └── test_repo
│   │       │       ├── helper.pl
│   │       │       └── main.pl
│   │       ├── php
│   │       │   └── test_repo
│   │       │       ├── helper.php
│   │       │       ├── index.php
│   │       │       └── simple_var.php
│   │       ├── python
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── custom_test
│   │       │       │   ├── __init__.py
│   │       │       │   └── advanced_features.py
│   │       │       ├── examples
│   │       │       │   ├── __init__.py
│   │       │       │   └── user_management.py
│   │       │       ├── ignore_this_dir_with_postfix
│   │       │       │   └── ignored_module.py
│   │       │       ├── scripts
│   │       │       │   ├── __init__.py
│   │       │       │   └── run_app.py
│   │       │       └── test_repo
│   │       │           ├── __init__.py
│   │       │           ├── complex_types.py
│   │       │           ├── models.py
│   │       │           ├── name_collisions.py
│   │       │           ├── nested_base.py
│   │       │           ├── nested.py
│   │       │           ├── overloaded.py
│   │       │           ├── services.py
│   │       │           ├── utils.py
│   │       │           └── variables.py
│   │       ├── r
│   │       │   └── test_repo
│   │       │       ├── .Rbuildignore
│   │       │       ├── DESCRIPTION
│   │       │       ├── examples
│   │       │       │   └── analysis.R
│   │       │       ├── NAMESPACE
│   │       │       └── R
│   │       │           ├── models.R
│   │       │           └── utils.R
│   │       ├── rego
│   │       │   └── test_repo
│   │       │       ├── policies
│   │       │       │   ├── authz.rego
│   │       │       │   └── validation.rego
│   │       │       └── utils
│   │       │           └── helpers.rego
│   │       ├── ruby
│   │       │   └── test_repo
│   │       │       ├── .solargraph.yml
│   │       │       ├── examples
│   │       │       │   └── user_management.rb
│   │       │       ├── lib.rb
│   │       │       ├── main.rb
│   │       │       ├── models.rb
│   │       │       ├── nested.rb
│   │       │       ├── services.rb
│   │       │       └── variables.rb
│   │       ├── rust
│   │       │   ├── test_repo
│   │       │   │   ├── Cargo.lock
│   │       │   │   ├── Cargo.toml
│   │       │   │   └── src
│   │       │   │       ├── lib.rs
│   │       │   │       └── main.rs
│   │       │   └── test_repo_2024
│   │       │       ├── Cargo.lock
│   │       │       ├── Cargo.toml
│   │       │       └── src
│   │       │           ├── lib.rs
│   │       │           └── main.rs
│   │       ├── swift
│   │       │   └── test_repo
│   │       │       ├── Package.swift
│   │       │       └── src
│   │       │           ├── main.swift
│   │       │           └── utils.swift
│   │       ├── terraform
│   │       │   └── test_repo
│   │       │       ├── data.tf
│   │       │       ├── main.tf
│   │       │       ├── outputs.tf
│   │       │       └── variables.tf
│   │       ├── typescript
│   │       │   └── test_repo
│   │       │       ├── .serena
│   │       │       │   └── project.yml
│   │       │       ├── index.ts
│   │       │       ├── tsconfig.json
│   │       │       └── use_helper.ts
│   │       └── zig
│   │           └── test_repo
│   │               ├── .gitignore
│   │               ├── build.zig
│   │               ├── src
│   │               │   ├── calculator.zig
│   │               │   ├── main.zig
│   │               │   └── math_utils.zig
│   │               └── zls.json
│   ├── serena
│   │   ├── __init__.py
│   │   ├── __snapshots__
│   │   │   └── test_symbol_editing.ambr
│   │   ├── config
│   │   │   ├── __init__.py
│   │   │   └── test_serena_config.py
│   │   ├── test_edit_marker.py
│   │   ├── test_mcp.py
│   │   ├── test_serena_agent.py
│   │   ├── test_symbol_editing.py
│   │   ├── test_symbol.py
│   │   ├── test_text_utils.py
│   │   ├── test_tool_parameter_types.py
│   │   └── util
│   │       ├── test_exception.py
│   │       └── test_file_system.py
│   └── solidlsp
│       ├── al
│       │   └── test_al_basic.py
│       ├── bash
│       │   ├── __init__.py
│       │   └── test_bash_basic.py
│       ├── clojure
│       │   ├── __init__.py
│       │   └── test_clojure_basic.py
│       ├── csharp
│       │   └── test_csharp_basic.py
│       ├── dart
│       │   ├── __init__.py
│       │   └── test_dart_basic.py
│       ├── elixir
│       │   ├── __init__.py
│       │   ├── conftest.py
│       │   ├── test_elixir_basic.py
│       │   ├── test_elixir_ignored_dirs.py
│       │   ├── test_elixir_integration.py
│       │   └── test_elixir_symbol_retrieval.py
│       ├── elm
│       │   └── test_elm_basic.py
│       ├── erlang
│       │   ├── __init__.py
│       │   ├── conftest.py
│       │   ├── test_erlang_basic.py
│       │   ├── test_erlang_ignored_dirs.py
│       │   └── test_erlang_symbol_retrieval.py
│       ├── go
│       │   └── test_go_basic.py
│       ├── java
│       │   └── test_java_basic.py
│       ├── kotlin
│       │   └── test_kotlin_basic.py
│       ├── lua
│       │   └── test_lua_basic.py
│       ├── markdown
│       │   ├── __init__.py
│       │   └── test_markdown_basic.py
│       ├── nix
│       │   └── test_nix_basic.py
│       ├── perl
│       │   └── test_perl_basic.py
│       ├── php
│       │   └── test_php_basic.py
│       ├── python
│       │   ├── test_python_basic.py
│       │   ├── test_retrieval_with_ignored_dirs.py
│       │   └── test_symbol_retrieval.py
│       ├── r
│       │   ├── __init__.py
│       │   └── test_r_basic.py
│       ├── rego
│       │   └── test_rego_basic.py
│       ├── ruby
│       │   ├── test_ruby_basic.py
│       │   └── test_ruby_symbol_retrieval.py
│       ├── rust
│       │   ├── test_rust_2024_edition.py
│       │   └── test_rust_basic.py
│       ├── swift
│       │   └── test_swift_basic.py
│       ├── terraform
│       │   └── test_terraform_basic.py
│       ├── typescript
│       │   └── test_typescript_basic.py
│       ├── util
│       │   └── test_zip.py
│       └── zig
│           └── test_zig_basic.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/test/serena/test_text_utils.py:
--------------------------------------------------------------------------------

```python
import re

import pytest

from serena.text_utils import LineType, search_files, search_text


class TestSearchText:
    def test_search_text_with_string_pattern(self):
        """Test searching with a simple string pattern."""
        content = """
        def hello_world():
            print("Hello, World!")
            return 42
        """

        # Search for a simple string pattern
        matches = search_text("print", content=content)

        assert len(matches) == 1
        assert matches[0].num_matched_lines == 1
        assert matches[0].start_line == 3
        assert matches[0].end_line == 3
        assert matches[0].lines[0].line_content.strip() == 'print("Hello, World!")'

    def test_search_text_with_regex_pattern(self):
        """Test searching with a regex pattern."""
        content = """
        class DataProcessor:
            def __init__(self, data):
                self.data = data

            def process(self):
                return [x * 2 for x in self.data if x > 0]

            def filter(self, predicate):
                return [x for x in self.data if predicate(x)]
        """

        # Search for a regex pattern matching method definitions
        pattern = r"def\s+\w+\s*\([^)]*\):"
        matches = search_text(pattern, content=content)

        assert len(matches) == 3
        assert matches[0].lines[0].match_type == LineType.MATCH
        assert "def __init__" in matches[0].lines[0].line_content
        assert "def process" in matches[1].lines[0].line_content
        assert "def filter" in matches[2].lines[0].line_content

    def test_search_text_with_compiled_regex(self):
        """Test searching with a pre-compiled regex pattern."""
        content = """
        import os
        import sys
        from pathlib import Path

        # Configuration variables
        DEBUG = True
        MAX_RETRIES = 3

        def configure_logging():
            log_level = "DEBUG" if DEBUG else "INFO"
            print(f"Setting log level to {log_level}")
        """

        # Search for variable assignments with a compiled regex
        pattern = re.compile(r"^\s*[A-Z_]+ = .+$")
        matches = search_text(pattern, content=content)

        assert len(matches) == 2
        assert "DEBUG = True" in matches[0].lines[0].line_content
        assert "MAX_RETRIES = 3" in matches[1].lines[0].line_content

    def test_search_text_with_context_lines(self):
        """Test searching with context lines before and after the match."""
        content = """
        def complex_function(a, b, c):
            # This is a complex function that does something.
            if a > b:
                return a * c
            elif b > a:
                return b * c
            else:
                return (a + b) * c
        """

        # Search with context lines
        matches = search_text("return", content=content, context_lines_before=1, context_lines_after=1)

        assert len(matches) == 3

        # Check the first match with context
        first_match = matches[0]
        assert len(first_match.lines) == 3
        assert first_match.lines[0].match_type == LineType.BEFORE_MATCH
        assert first_match.lines[1].match_type == LineType.MATCH
        assert first_match.lines[2].match_type == LineType.AFTER_MATCH

        # Verify the content of lines
        assert "if a > b:" in first_match.lines[0].line_content
        assert "return a * c" in first_match.lines[1].line_content
        assert "elif b > a:" in first_match.lines[2].line_content

    def test_search_text_with_multiline_match(self):
        """Test searching with multiline pattern matching."""
        content = """
        def factorial(n):
            if n <= 1:
                return 1
            else:
                return n * factorial(n-1)

        result = factorial(5)  # Should be 120
        """

        # Search for a pattern that spans multiple lines (if-else block)
        pattern = r"if.*?else.*?return"
        matches = search_text(pattern, content=content, allow_multiline_match=True)

        assert len(matches) == 1
        multiline_match = matches[0]
        assert multiline_match.num_matched_lines >= 3
        assert "if n <= 1:" in multiline_match.lines[0].line_content

        # All matched lines should have match_type == LineType.MATCH
        match_lines = [line for line in multiline_match.lines if line.match_type == LineType.MATCH]
        assert len(match_lines) >= 3

    def test_search_text_with_glob_pattern(self):
        """Test searching with glob-like patterns."""
        content = """
        class UserService:
            def get_user(self, user_id):
                return {"id": user_id, "name": "Test User"}

            def create_user(self, user_data):
                print(f"Creating user: {user_data}")
                return {"id": 123, **user_data}

            def update_user(self, user_id, user_data):
                print(f"Updating user {user_id} with {user_data}")
                return True
        """

        # Search with a glob pattern for all user methods
        matches = search_text("*_user*", content=content, is_glob=True)

        assert len(matches) == 3
        assert "get_user" in matches[0].lines[0].line_content
        assert "create_user" in matches[1].lines[0].line_content
        assert "update_user" in matches[2].lines[0].line_content

    def test_search_text_with_complex_glob_pattern(self):
        """Test searching with more complex glob patterns."""
        content = """
        def process_data(data):
            return [transform(item) for item in data]

        def transform(item):
            if isinstance(item, dict):
                return {k: v.upper() if isinstance(v, str) else v for k, v in item.items()}
            elif isinstance(item, list):
                return [x * 2 for x in item if isinstance(x, (int, float))]
            elif isinstance(item, str):
                return item.upper()
            else:
                return item
        """

        # Search with a simplified glob pattern to find all isinstance occurrences
        matches = search_text("*isinstance*", content=content, is_glob=True)

        # Should match lines with isinstance(item, dict) and isinstance(item, list)
        assert len(matches) >= 2
        instance_matches = [
            line.line_content
            for match in matches
            for line in match.lines
            if line.match_type == LineType.MATCH and "isinstance(item," in line.line_content
        ]
        assert len(instance_matches) >= 2
        assert any("isinstance(item, dict)" in line for line in instance_matches)
        assert any("isinstance(item, list)" in line for line in instance_matches)

    def test_search_text_glob_with_special_chars(self):
        """Glob patterns containing regex special characters should match literally."""
        content = """
        def func_square():
            print("value[42]")

        def func_curly():
            print("value{bar}")
        """

        matches_square = search_text(r"*\[42\]*", content=content, is_glob=True)
        assert len(matches_square) == 1
        assert "[42]" in matches_square[0].lines[0].line_content

        matches_curly = search_text("*{bar}*", content=content, is_glob=True)
        assert len(matches_curly) == 1
        assert "{bar}" in matches_curly[0].lines[0].line_content

    def test_search_text_no_matches(self):
        """Test searching with a pattern that doesn't match anything."""
        content = """
        def calculate_average(numbers):
            if not numbers:
                return 0
            return sum(numbers) / len(numbers)
        """

        # Search for a pattern that doesn't exist in the content
        matches = search_text("missing_function", content=content)

        assert len(matches) == 0


# Mock file reader that always returns matching content
def mock_reader_always_match(file_path: str) -> str:
    """Mock file reader that returns content guaranteed to match the simple pattern."""
    return "This line contains a match."


class TestSearchFiles:
    @pytest.mark.parametrize(
        "file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description",
        [
            # Basic cases
            (["a.py", "b.txt"], "match", None, None, ["a.py", "b.txt"], "No filters"),
            (["a.py", "b.txt"], "match", "*.py", None, ["a.py"], "Include only .py files"),
            (["a.py", "b.txt"], "match", None, "*.txt", ["a.py"], "Exclude .txt files"),
            (["a.py", "b.txt", "c.py"], "match", "*.py", "c.*", ["a.py"], "Include .py, exclude c.*"),
            # Directory matching - Using pathspec patterns
            (["main.c", "test/main.c"], "match", "test/*", None, ["test/main.c"], "Include files in test/ subdir"),
            (["data/a.csv", "data/b.log"], "match", "data/*", "*.log", ["data/a.csv"], "Include data/*, exclude *.log"),
            (["src/a.py", "tests/b.py"], "match", "src/**", "tests/**", ["src/a.py"], "Include src/**, exclude tests/**"),
            (["src/mod/a.py", "tests/b.py"], "match", "**/*.py", "tests/**", ["src/mod/a.py"], "Include **/*.py, exclude tests/**"),
            (["file.py", "dir/file.py"], "match", "dir/*.py", None, ["dir/file.py"], "Include files directly in dir"),
            (["file.py", "dir/sub/file.py"], "match", "dir/**/*.py", None, ["dir/sub/file.py"], "Include files recursively in dir"),
            # Overlap and edge cases
            (["file.py", "dir/file.py"], "match", "*.py", "dir/*", ["file.py"], "Include *.py, exclude files directly in dir"),
            (["root.py", "adir/a.py", "bdir/b.py"], "match", "a*/*.py", None, ["adir/a.py"], "Include files in dirs starting with 'a'"),
            (["a.txt", "b.log"], "match", "*.py", None, [], "No files match include pattern"),
            (["a.py", "b.py"], "match", None, "*.py", [], "All files match exclude pattern"),
            (["a.py", "b.py"], "match", "a.*", "*.py", [], "Include a.* but exclude *.py -> empty"),
            (["a.py", "b.py"], "match", "*.py", "b.*", ["a.py"], "Include *.py but exclude b.* -> a.py"),
        ],
        ids=lambda x: x if isinstance(x, str) else "",  # Use description as test ID
    )
    def test_search_files_include_exclude(
        self, file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description
    ):
        """
        Test the include/exclude glob filtering logic in search_files using PathSpec patterns.
        """
        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=mock_reader_always_match,
            paths_include_glob=paths_include_glob,
            paths_exclude_glob=paths_exclude_glob,
            context_lines_before=0,  # No context needed for this test focus
            context_lines_after=0,
        )

        # Extract the source file paths from the results
        actual_matched_files = sorted([result.source_file_path for result in results if result.source_file_path])

        # Assert that the matched files are exactly the ones expected
        assert actual_matched_files == sorted(expected_matched_files)

        # Basic check on results structure if files were expected
        if expected_matched_files:
            assert len(results) == len(expected_matched_files)
            for result in results:
                assert len(result.matched_lines) == 1  # Mock reader returns one matching line
                assert result.matched_lines[0].line_content == "This line contains a match."
                assert result.matched_lines[0].match_type == LineType.MATCH

    @pytest.mark.parametrize(
        "file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description",
        [
            # Glob patterns that were problematic with gitignore syntax
            (
                ["src/serena/agent.py", "src/serena/process_isolated_agent.py", "test/agent.py"],
                "match",
                "src/**agent.py",
                None,
                ["src/serena/agent.py", "src/serena/process_isolated_agent.py"],
                "Glob: src/**agent.py should match files ending with agent.py under src/",
            ),
            (
                ["src/serena/agent.py", "src/serena/process_isolated_agent.py", "other/agent.py"],
                "match",
                "**agent.py",
                None,
                ["src/serena/agent.py", "src/serena/process_isolated_agent.py", "other/agent.py"],
                "Glob: **agent.py should match files ending with agent.py anywhere",
            ),
            (
                ["dir/subdir/file.py", "dir/other/file.py", "elsewhere/file.py"],
                "match",
                "dir/**file.py",
                None,
                ["dir/subdir/file.py", "dir/other/file.py"],
                "Glob: dir/**file.py should match files ending with file.py under dir/",
            ),
            (
                ["src/a/b/c/test.py", "src/x/test.py", "other/test.py"],
                "match",
                "src/**/test.py",
                None,
                ["src/a/b/c/test.py", "src/x/test.py"],
                "Glob: src/**/test.py should match test.py files under src/ at any depth",
            ),
            # Edge cases for ** patterns
            (
                ["agent.py", "src/agent.py", "src/serena/agent.py"],
                "match",
                "**agent.py",
                None,
                ["agent.py", "src/agent.py", "src/serena/agent.py"],
                "Glob: **agent.py should match at root and any depth",
            ),
            (["file.txt", "src/file.txt"], "match", "src/**", None, ["src/file.txt"], "Glob: src/** should match everything under src/"),
        ],
        ids=lambda x: x if isinstance(x, str) else "",  # Use description as test ID
    )
    def test_search_files_glob_patterns(
        self, file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description
    ):
        """
        Test glob patterns that were problematic with the previous gitignore-based implementation.
        """
        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=mock_reader_always_match,
            paths_include_glob=paths_include_glob,
            paths_exclude_glob=paths_exclude_glob,
            context_lines_before=0,
            context_lines_after=0,
        )

        # Extract the source file paths from the results
        actual_matched_files = sorted([result.source_file_path for result in results if result.source_file_path])

        # Assert that the matched files are exactly the ones expected
        assert actual_matched_files == sorted(
            expected_matched_files
        ), f"Pattern '{paths_include_glob}' failed: expected {sorted(expected_matched_files)}, got {actual_matched_files}"

        # Basic check on results structure if files were expected
        if expected_matched_files:
            assert len(results) == len(expected_matched_files)
            for result in results:
                assert len(result.matched_lines) == 1  # Mock reader returns one matching line
                assert result.matched_lines[0].line_content == "This line contains a match."
                assert result.matched_lines[0].match_type == LineType.MATCH

    @pytest.mark.parametrize(
        "file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description",
        [
            # Brace expansion in include glob
            (
                ["a.py", "b.js", "c.txt"],
                "match",
                "*.{py,js}",
                None,
                ["a.py", "b.js"],
                "Brace expansion in include glob",
            ),
            # Brace expansion in exclude glob
            (
                ["a.py", "b.log", "c.txt"],
                "match",
                "*.{py,log,txt}",
                "*.{log,txt}",
                ["a.py"],
                "Brace expansion in exclude glob",
            ),
            # Brace expansion in both include and exclude
            (
                ["src/a.ts", "src/b.js", "test/a.ts", "test/b.js"],
                "match",
                "**/*.{ts,js}",
                "test/**/*.{ts,js}",
                ["src/a.ts", "src/b.js"],
                "Brace expansion in both include and exclude",
            ),
            # No matching files with brace expansion
            (
                ["a.py", "b.js"],
                "match",
                "*.{c,h}",
                None,
                [],
                "Brace expansion with no matching files",
            ),
            # Multiple brace expansions
            (
                ["src/a/a.py", "src/b/b.py", "lib/a/a.py", "lib/b/b.py"],
                "match",
                "{src,lib}/{a,b}/*.py",
                "lib/b/*.py",
                ["src/a/a.py", "src/b/b.py", "lib/a/a.py"],
                "Multiple brace expansions in include/exclude",
            ),
        ],
        ids=lambda x: x if isinstance(x, str) else "",
    )
    def test_search_files_with_brace_expansion(
        self, file_paths, pattern, paths_include_glob, paths_exclude_glob, expected_matched_files, description
    ):
        """Test search_files with glob patterns containing brace expansions."""
        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=mock_reader_always_match,
            paths_include_glob=paths_include_glob,
            paths_exclude_glob=paths_exclude_glob,
        )

        actual_matched_files = sorted([result.source_file_path for result in results if result.source_file_path])
        assert actual_matched_files == sorted(expected_matched_files), f"Test failed: {description}"

    def test_search_files_no_pattern_match_in_content(self):
        """Test that no results are returned if the pattern doesn't match the file content, even if files pass filters."""
        file_paths = ["a.py", "b.txt"]
        pattern = "non_existent_pattern_in_mock_content"  # This won't match mock_reader_always_match content
        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=mock_reader_always_match,  # Content is "This line contains a match."
            paths_include_glob=None,  # Both files would pass filters
            paths_exclude_glob=None,
        )
        assert len(results) == 0, "Should not find matches if pattern doesn't match content"

    def test_search_files_regex_pattern_with_filters(self):
        """Test using a regex pattern works correctly along with include/exclude filters."""

        def specific_mock_reader(file_path: str) -> str:
            # Provide different content for different files to test regex matching
            if file_path == "a.py":  # noqa: SIM116
                return "File A: value=123\nFile A: value=456"
            elif file_path == "b.py":
                return "File B: value=789"
            elif file_path == "c.txt":
                return "File C: value=000"
            return "No values here."

        file_paths = ["a.py", "b.py", "c.txt"]
        pattern = r"value=(\d+)"

        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=specific_mock_reader,
            paths_include_glob="*.py",  # Only include .py files
            paths_exclude_glob="b.*",  # Exclude files starting with b
        )

        # Expected: a.py included, b.py excluded by glob, c.txt excluded by glob
        # a.py has two matches for the regex pattern
        assert len(results) == 2, "Expected 2 matches only from a.py"
        actual_matched_files = sorted([result.source_file_path for result in results if result.source_file_path])
        assert actual_matched_files == ["a.py", "a.py"], "Both matches should be from a.py"
        # Check the content of the matched lines
        assert results[0].matched_lines[0].line_content == "File A: value=123"
        assert results[1].matched_lines[0].line_content == "File A: value=456"

    def test_search_files_context_lines_with_filters(self):
        """Test context lines are included correctly when filters are active."""

        def context_mock_reader(file_path: str) -> str:
            if file_path == "include_me.txt":
                return "Line before 1\nLine before 2\nMATCH HERE\nLine after 1\nLine after 2"
            elif file_path == "exclude_me.log":
                return "Noise\nMATCH HERE\nNoise"
            return "No match"

        file_paths = ["include_me.txt", "exclude_me.log"]
        pattern = "MATCH HERE"

        results = search_files(
            relative_file_paths=file_paths,
            pattern=pattern,
            file_reader=context_mock_reader,
            paths_include_glob="*.txt",  # Only include .txt files
            paths_exclude_glob=None,
            context_lines_before=1,
            context_lines_after=1,
        )

        # Expected: Only include_me.txt should be processed and matched
        assert len(results) == 1, "Expected only one result from the included file"
        result = results[0]
        assert result.source_file_path == "include_me.txt"
        assert len(result.lines) == 3, "Expected 3 lines (1 before, 1 match, 1 after)"
        assert result.lines[0].line_content == "Line before 2", "Incorrect 'before' context line"
        assert result.lines[0].match_type == LineType.BEFORE_MATCH
        assert result.lines[1].line_content == "MATCH HERE", "Incorrect 'match' line"
        assert result.lines[1].match_type == LineType.MATCH
        assert result.lines[2].line_content == "Line after 1", "Incorrect 'after' context line"
        assert result.lines[2].match_type == LineType.AFTER_MATCH


class TestGlobMatch:
    """Test the glob_match function directly."""

    @pytest.mark.parametrize(
        "pattern, path, expected",
        [
            # Basic wildcard patterns
            ("*.py", "file.py", True),
            ("*.py", "file.txt", False),
            ("*agent.py", "agent.py", True),
            ("*agent.py", "process_isolated_agent.py", True),
            ("*agent.py", "agent_test.py", False),
            # Double asterisk patterns
            ("**agent.py", "agent.py", True),
            ("**agent.py", "src/agent.py", True),
            ("**agent.py", "src/serena/agent.py", True),
            ("**agent.py", "src/serena/process_isolated_agent.py", True),
            ("**agent.py", "agent_test.py", False),
            # Prefix with double asterisk
            ("src/**agent.py", "src/agent.py", True),
            ("src/**agent.py", "src/serena/agent.py", True),
            ("src/**agent.py", "src/serena/process_isolated_agent.py", True),
            ("src/**agent.py", "other/agent.py", False),
            ("src/**agent.py", "src/agent_test.py", False),
            # Directory patterns
            ("src/**", "src/file.py", True),
            ("src/**", "src/dir/file.py", True),
            ("src/**", "other/file.py", False),
            # Exact matches with double asterisk
            ("src/**/test.py", "src/test.py", True),
            ("src/**/test.py", "src/a/b/test.py", True),
            ("src/**/test.py", "src/test_file.py", False),
            # Simple patterns without asterisks
            ("src/file.py", "src/file.py", True),
            ("src/file.py", "src/other.py", False),
        ],
    )
    def test_glob_match(self, pattern, path, expected):
        """Test glob_match function with various patterns."""
        from src.serena.text_utils import glob_match

        assert glob_match(pattern, path) == expected


class TestExpandBraces:
    """Test the expand_braces function."""

    @pytest.mark.parametrize(
        "pattern, expected",
        [
            # Basic case
            ("src/*.{js,ts}", ["src/*.js", "src/*.ts"]),
            # No braces
            ("src/*.py", ["src/*.py"]),
            # Multiple brace sets
            ("src/{a,b}/{c,d}.py", ["src/a/c.py", "src/a/d.py", "src/b/c.py", "src/b/d.py"]),
            # Empty string
            ("", [""]),
            # Braces with empty elements
            ("src/{a,,b}.py", ["src/a.py", "src/.py", "src/b.py"]),
            # No commas
            ("src/{a}.py", ["src/a.py"]),
        ],
    )
    def test_expand_braces(self, pattern, expected):
        """Test brace expansion for glob patterns."""
        from serena.text_utils import expand_braces

        assert sorted(expand_braces(pattern)) == sorted(expected)

```

--------------------------------------------------------------------------------
/src/serena/config/serena_config.py:
--------------------------------------------------------------------------------

```python
"""
The Serena Model Context Protocol (MCP) Server
"""

import os
import shutil
from collections.abc import Iterable
from copy import deepcopy
from dataclasses import dataclass, field
from datetime import datetime
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, Self, TypeVar

import yaml
from ruamel.yaml.comments import CommentedMap
from sensai.util import logging
from sensai.util.logging import LogTime, datetime_tag
from sensai.util.string import ToStringMixin

from serena.constants import (
    DEFAULT_ENCODING,
    PROJECT_TEMPLATE_FILE,
    REPO_ROOT,
    SERENA_CONFIG_TEMPLATE_FILE,
    SERENA_MANAGED_DIR_IN_HOME,
    SERENA_MANAGED_DIR_NAME,
)
from serena.util.general import load_yaml, save_yaml
from serena.util.inspection import determine_programming_language_composition
from solidlsp.ls_config import Language

from ..analytics import RegisteredTokenCountEstimator
from ..util.class_decorators import singleton

if TYPE_CHECKING:
    from ..project import Project

log = logging.getLogger(__name__)
T = TypeVar("T")
DEFAULT_TOOL_TIMEOUT: float = 240


@singleton
class SerenaPaths:
    """
    Provides paths to various Serena-related directories and files.
    """

    def __init__(self) -> None:
        self.user_config_dir: str = SERENA_MANAGED_DIR_IN_HOME
        """
        the path to the user's Serena configuration directory, which is typically ~/.serena
        """

    def get_next_log_file_path(self, prefix: str) -> str:
        """
        :param prefix: the filename prefix indicating the type of the log file
        :return: the full path to the log file to use
        """
        log_dir = os.path.join(self.user_config_dir, "logs", datetime.now().strftime("%Y-%m-%d"))
        os.makedirs(log_dir, exist_ok=True)
        return os.path.join(log_dir, prefix + "_" + datetime_tag() + ".txt")

    # TODO: Paths from constants.py should be moved here


class ToolSet:
    def __init__(self, tool_names: set[str]) -> None:
        self._tool_names = tool_names

    @classmethod
    def default(cls) -> "ToolSet":
        """
        :return: the default tool set, which contains all tools that are enabled by default
        """
        from serena.tools import ToolRegistry

        return cls(set(ToolRegistry().get_tool_names_default_enabled()))

    def apply(self, *tool_inclusion_definitions: "ToolInclusionDefinition") -> "ToolSet":
        """
        :param tool_inclusion_definitions: the definitions to apply
        :return: a new tool set with the definitions applied
        """
        from serena.tools import ToolRegistry

        registry = ToolRegistry()
        tool_names = set(self._tool_names)
        for definition in tool_inclusion_definitions:
            included_tools = []
            excluded_tools = []
            for included_tool in definition.included_optional_tools:
                if not registry.is_valid_tool_name(included_tool):
                    raise ValueError(f"Invalid tool name '{included_tool}' provided for inclusion")
                if included_tool not in tool_names:
                    tool_names.add(included_tool)
                    included_tools.append(included_tool)
            for excluded_tool in definition.excluded_tools:
                if not registry.is_valid_tool_name(excluded_tool):
                    raise ValueError(f"Invalid tool name '{excluded_tool}' provided for exclusion")
                if excluded_tool in tool_names:
                    tool_names.remove(excluded_tool)
                    excluded_tools.append(excluded_tool)
            if included_tools:
                log.info(f"{definition} included {len(included_tools)} tools: {', '.join(included_tools)}")
            if excluded_tools:
                log.info(f"{definition} excluded {len(excluded_tools)} tools: {', '.join(excluded_tools)}")
        return ToolSet(tool_names)

    def without_editing_tools(self) -> "ToolSet":
        """
        :return: a new tool set that excludes all tools that can edit
        """
        from serena.tools import ToolRegistry

        registry = ToolRegistry()
        tool_names = set(self._tool_names)
        for tool_name in self._tool_names:
            if registry.get_tool_class_by_name(tool_name).can_edit():
                tool_names.remove(tool_name)
        return ToolSet(tool_names)

    def get_tool_names(self) -> set[str]:
        """
        Returns the names of the tools that are currently included in the tool set.
        """
        return self._tool_names

    def includes_name(self, tool_name: str) -> bool:
        return tool_name in self._tool_names


@dataclass
class ToolInclusionDefinition:
    excluded_tools: Iterable[str] = ()
    included_optional_tools: Iterable[str] = ()


class SerenaConfigError(Exception):
    pass


def get_serena_managed_in_project_dir(project_root: str | Path) -> str:
    return os.path.join(project_root, SERENA_MANAGED_DIR_NAME)


def is_running_in_docker() -> bool:
    """Check if we're running inside a Docker container."""
    # Check for Docker-specific files
    if os.path.exists("/.dockerenv"):
        return True
    # Check cgroup for docker references
    try:
        with open("/proc/self/cgroup") as f:
            return "docker" in f.read()
    except FileNotFoundError:
        return False


@dataclass(kw_only=True)
class ProjectConfig(ToolInclusionDefinition, ToStringMixin):
    project_name: str
    language: Language
    ignored_paths: list[str] = field(default_factory=list)
    read_only: bool = False
    ignore_all_files_in_gitignore: bool = True
    initial_prompt: str = ""
    encoding: str = DEFAULT_ENCODING

    SERENA_DEFAULT_PROJECT_FILE = "project.yml"

    def _tostring_includes(self) -> list[str]:
        return ["project_name"]

    @classmethod
    def autogenerate(
        cls, project_root: str | Path, project_name: str | None = None, project_language: Language | None = None, save_to_disk: bool = True
    ) -> Self:
        """
        Autogenerate a project configuration for a given project root.

        :param project_root: the path to the project root
        :param project_name: the name of the project; if None, the name of the project will be the name of the directory
            containing the project
        :param project_language: the programming language of the project; if None, it will be determined automatically
        :param save_to_disk: whether to save the project configuration to disk
        :return: the project configuration
        """
        project_root = Path(project_root).resolve()
        if not project_root.exists():
            raise FileNotFoundError(f"Project root not found: {project_root}")
        with LogTime("Project configuration auto-generation", logger=log):
            project_name = project_name or project_root.name
            if project_language is None:
                language_composition = determine_programming_language_composition(str(project_root))
                if len(language_composition) == 0:
                    raise ValueError(
                        f"No source files found in {project_root}\n\n"
                        f"To use Serena with this project, you need to either:\n"
                        f"1. Add source files in one of the supported languages (Python, JavaScript/TypeScript, Java, C#, Rust, Go, Ruby, C++, PHP, Swift, Elixir, Terraform, Bash)\n"
                        f"2. Create a project configuration file manually at:\n"
                        f"   {os.path.join(project_root, cls.rel_path_to_project_yml())}\n\n"
                        f"Example project.yml:\n"
                        f"  project_name: {project_name}\n"
                        f"  language: python  # or typescript, java, csharp, rust, go, ruby, cpp, php, swift, elixir, terraform, bash\n"
                    )
                # find the language with the highest percentage
                dominant_language = max(language_composition.keys(), key=lambda lang: language_composition[lang])
            else:
                dominant_language = project_language.value
            config_with_comments = load_yaml(PROJECT_TEMPLATE_FILE, preserve_comments=True)
            config_with_comments["project_name"] = project_name
            config_with_comments["language"] = dominant_language
            if save_to_disk:
                save_yaml(str(project_root / cls.rel_path_to_project_yml()), config_with_comments, preserve_comments=True)
            return cls._from_dict(config_with_comments)

    @classmethod
    def rel_path_to_project_yml(cls) -> str:
        return os.path.join(SERENA_MANAGED_DIR_NAME, cls.SERENA_DEFAULT_PROJECT_FILE)

    @classmethod
    def _from_dict(cls, data: dict[str, Any]) -> Self:
        """
        Create a ProjectConfig instance from a configuration dictionary
        """
        language_str = data["language"].lower()
        project_name = data["project_name"]
        # backwards compatibility
        if language_str == "javascript":
            log.warning(f"Found deprecated project language `javascript` in project {project_name}, please change to `typescript`")
            language_str = "typescript"
        try:
            language = Language(language_str)
        except ValueError as e:
            raise ValueError(f"Invalid language: {data['language']}.\nValid languages are: {[l.value for l in Language]}") from e
        return cls(
            project_name=project_name,
            language=language,
            ignored_paths=data.get("ignored_paths", []),
            excluded_tools=data.get("excluded_tools", []),
            included_optional_tools=data.get("included_optional_tools", []),
            read_only=data.get("read_only", False),
            ignore_all_files_in_gitignore=data.get("ignore_all_files_in_gitignore", True),
            initial_prompt=data.get("initial_prompt", ""),
            encoding=data.get("encoding", DEFAULT_ENCODING),
        )

    @classmethod
    def load(cls, project_root: Path | str, autogenerate: bool = False) -> Self:
        """
        Load a ProjectConfig instance from the path to the project root.
        """
        project_root = Path(project_root)
        yaml_path = project_root / cls.rel_path_to_project_yml()
        if not yaml_path.exists():
            if autogenerate:
                return cls.autogenerate(project_root)
            else:
                raise FileNotFoundError(f"Project configuration file not found: {yaml_path}")
        with open(yaml_path, encoding="utf-8") as f:
            yaml_data = yaml.safe_load(f)
        if "project_name" not in yaml_data:
            yaml_data["project_name"] = project_root.name
        return cls._from_dict(yaml_data)


class RegisteredProject(ToStringMixin):
    def __init__(self, project_root: str, project_config: "ProjectConfig", project_instance: Optional["Project"] = None) -> None:
        """
        Represents a registered project in the Serena configuration.

        :param project_root: the root directory of the project
        :param project_config: the configuration of the project
        """
        self.project_root = Path(project_root).resolve()
        self.project_config = project_config
        self._project_instance = project_instance

    def _tostring_exclude_private(self) -> bool:
        return True

    @property
    def project_name(self) -> str:
        return self.project_config.project_name

    @classmethod
    def from_project_instance(cls, project_instance: "Project") -> "RegisteredProject":
        return RegisteredProject(
            project_root=project_instance.project_root,
            project_config=project_instance.project_config,
            project_instance=project_instance,
        )

    def matches_root_path(self, path: str | Path) -> bool:
        """
        Check if the given path matches the project root path.

        :param path: the path to check
        :return: True if the path matches the project root, False otherwise
        """
        return self.project_root == Path(path).resolve()

    def get_project_instance(self) -> "Project":
        """
        Returns the project instance for this registered project, loading it if necessary.
        """
        if self._project_instance is None:
            from ..project import Project

            with LogTime(f"Loading project instance for {self}", logger=log):
                self._project_instance = Project(project_root=str(self.project_root), project_config=self.project_config)
        return self._project_instance


@dataclass(kw_only=True)
class SerenaConfig(ToolInclusionDefinition, ToStringMixin):
    """
    Holds the Serena agent configuration, which is typically loaded from a YAML configuration file
    (when instantiated via :method:`from_config_file`), which is updated when projects are added or removed.
    For testing purposes, it can also be instantiated directly with the desired parameters.
    """

    projects: list[RegisteredProject] = field(default_factory=list)
    gui_log_window_enabled: bool = False
    log_level: int = logging.INFO
    trace_lsp_communication: bool = False
    web_dashboard: bool = True
    web_dashboard_open_on_launch: bool = True
    tool_timeout: float = DEFAULT_TOOL_TIMEOUT
    loaded_commented_yaml: CommentedMap | None = None
    config_file_path: str | None = None
    """
    the path to the configuration file to which updates of the configuration shall be saved;
    if None, the configuration is not saved to disk
    """
    jetbrains: bool = False
    """
    whether to apply JetBrains mode
    """
    record_tool_usage_stats: bool = False
    """Whether to record tool usage statistics, they will be shown in the web dashboard if recording is active. 
    """
    token_count_estimator: str = RegisteredTokenCountEstimator.TIKTOKEN_GPT4O.name
    """Only relevant if `record_tool_usage` is True; the name of the token count estimator to use for tool usage statistics.
    See the `RegisteredTokenCountEstimator` enum for available options.
    
    Note: some token estimators (like tiktoken) may require downloading data files
    on the first run, which can take some time and require internet access. Others, like the Anthropic ones, may require an API key
    and rate limits may apply.
    """
    default_max_tool_answer_chars: int = 150_000
    """Used as default for tools where the apply method has a default maximal answer length.
    Even though the value of the max_answer_chars can be changed when calling the tool, it may make sense to adjust this default 
    through the global configuration.
    """
    ls_specific_settings: dict = field(default_factory=dict)
    """Advanced configuration option allowing to configure language server implementation specific options, see SolidLSPSettings for more info."""

    CONFIG_FILE = "serena_config.yml"
    CONFIG_FILE_DOCKER = "serena_config.docker.yml"  # Docker-specific config file; auto-generated if missing, mounted via docker-compose for user customization

    def _tostring_includes(self) -> list[str]:
        return ["config_file_path"]

    @classmethod
    def generate_config_file(cls, config_file_path: str) -> None:
        """
        Generates a Serena configuration file at the specified path from the template file.

        :param config_file_path: the path where the configuration file should be generated
        """
        log.info(f"Auto-generating Serena configuration file in {config_file_path}")
        loaded_commented_yaml = load_yaml(SERENA_CONFIG_TEMPLATE_FILE, preserve_comments=True)
        save_yaml(config_file_path, loaded_commented_yaml, preserve_comments=True)

    @classmethod
    def _determine_config_file_path(cls) -> str:
        """
        :return: the location where the Serena configuration file is stored/should be stored
        """
        if is_running_in_docker():
            return os.path.join(REPO_ROOT, cls.CONFIG_FILE_DOCKER)
        else:
            config_path = os.path.join(SERENA_MANAGED_DIR_IN_HOME, cls.CONFIG_FILE)

            # if the config file does not exist, check if we can migrate it from the old location
            if not os.path.exists(config_path):
                old_config_path = os.path.join(REPO_ROOT, cls.CONFIG_FILE)
                if os.path.exists(old_config_path):
                    log.info(f"Moving Serena configuration file from {old_config_path} to {config_path}")
                    os.makedirs(os.path.dirname(config_path), exist_ok=True)
                    shutil.move(old_config_path, config_path)

            return config_path

    @classmethod
    def from_config_file(cls, generate_if_missing: bool = True) -> "SerenaConfig":
        """
        Static constructor to create SerenaConfig from the configuration file
        """
        config_file_path = cls._determine_config_file_path()

        # create the configuration file from the template if necessary
        if not os.path.exists(config_file_path):
            if not generate_if_missing:
                raise FileNotFoundError(f"Serena configuration file not found: {config_file_path}")
            log.info(f"Serena configuration file not found at {config_file_path}, autogenerating...")
            cls.generate_config_file(config_file_path)

        # load the configuration
        log.info(f"Loading Serena configuration from {config_file_path}")
        try:
            loaded_commented_yaml = load_yaml(config_file_path, preserve_comments=True)
        except Exception as e:
            raise ValueError(f"Error loading Serena configuration from {config_file_path}: {e}") from e

        # create the configuration instance
        instance = cls(loaded_commented_yaml=loaded_commented_yaml, config_file_path=config_file_path)

        # read projects
        if "projects" not in loaded_commented_yaml:
            raise SerenaConfigError("`projects` key not found in Serena configuration. Please update your `serena_config.yml` file.")

        # load list of known projects
        instance.projects = []
        num_project_migrations = 0
        for path in loaded_commented_yaml["projects"]:
            path = Path(path).resolve()
            if not path.exists() or (path.is_dir() and not (path / ProjectConfig.rel_path_to_project_yml()).exists()):
                log.warning(f"Project path {path} does not exist or does not contain a project configuration file, skipping.")
                continue
            if path.is_file():
                path = cls._migrate_out_of_project_config_file(path)
                if path is None:
                    continue
                num_project_migrations += 1
            project_config = ProjectConfig.load(path)
            project = RegisteredProject(
                project_root=str(path),
                project_config=project_config,
            )
            instance.projects.append(project)

        # set other configuration parameters
        if is_running_in_docker():
            instance.gui_log_window_enabled = False  # not supported in Docker
        else:
            instance.gui_log_window_enabled = loaded_commented_yaml.get("gui_log_window", False)
        instance.log_level = loaded_commented_yaml.get("log_level", loaded_commented_yaml.get("gui_log_level", logging.INFO))
        instance.web_dashboard = loaded_commented_yaml.get("web_dashboard", True)
        instance.web_dashboard_open_on_launch = loaded_commented_yaml.get("web_dashboard_open_on_launch", True)
        instance.tool_timeout = loaded_commented_yaml.get("tool_timeout", DEFAULT_TOOL_TIMEOUT)
        instance.trace_lsp_communication = loaded_commented_yaml.get("trace_lsp_communication", False)
        instance.excluded_tools = loaded_commented_yaml.get("excluded_tools", [])
        instance.included_optional_tools = loaded_commented_yaml.get("included_optional_tools", [])
        instance.jetbrains = loaded_commented_yaml.get("jetbrains", False)
        instance.record_tool_usage_stats = loaded_commented_yaml.get("record_tool_usage_stats", False)
        instance.token_count_estimator = loaded_commented_yaml.get(
            "token_count_estimator", RegisteredTokenCountEstimator.TIKTOKEN_GPT4O.name
        )
        instance.default_max_tool_answer_chars = loaded_commented_yaml.get("default_max_tool_answer_chars", 150_000)
        instance.ls_specific_settings = loaded_commented_yaml.get("ls_specific_settings", {})

        # re-save the configuration file if any migrations were performed
        if num_project_migrations > 0:
            log.info(
                f"Migrated {num_project_migrations} project configurations from legacy format to in-project configuration; re-saving configuration"
            )
            instance.save()

        return instance

    @classmethod
    def _migrate_out_of_project_config_file(cls, path: Path) -> Path | None:
        """
        Migrates a legacy project configuration file (which is a YAML file containing the project root) to the
        in-project configuration file (project.yml) inside the project root directory.

        :param path: the path to the legacy project configuration file
        :return: the project root path if the migration was successful, None otherwise.
        """
        log.info(f"Found legacy project configuration file {path}, migrating to in-project configuration.")
        try:
            with open(path, encoding="utf-8") as f:
                project_config_data = yaml.safe_load(f)
            if "project_name" not in project_config_data:
                project_name = path.stem
                with open(path, "a", encoding="utf-8") as f:
                    f.write(f"\nproject_name: {project_name}")
            project_root = project_config_data["project_root"]
            shutil.move(str(path), str(Path(project_root) / ProjectConfig.rel_path_to_project_yml()))
            return Path(project_root).resolve()
        except Exception as e:
            log.error(f"Error migrating configuration file: {e}")
            return None

    @cached_property
    def project_paths(self) -> list[str]:
        return sorted(str(project.project_root) for project in self.projects)

    @cached_property
    def project_names(self) -> list[str]:
        return sorted(project.project_config.project_name for project in self.projects)

    def get_project(self, project_root_or_name: str) -> Optional["Project"]:
        # look for project by name
        project_candidates = []
        for project in self.projects:
            if project.project_config.project_name == project_root_or_name:
                project_candidates.append(project)
        if len(project_candidates) == 1:
            return project_candidates[0].get_project_instance()
        elif len(project_candidates) > 1:
            raise ValueError(
                f"Multiple projects found with name '{project_root_or_name}'. Please activate it by location instead. "
                f"Locations: {[p.project_root for p in project_candidates]}"
            )
        # no project found by name; check if it's a path
        if os.path.isdir(project_root_or_name):
            for project in self.projects:
                if project.matches_root_path(project_root_or_name):
                    return project.get_project_instance()
        return None

    def add_project_from_path(self, project_root: Path | str) -> "Project":
        """
        Add a project to the Serena configuration from a given path. Will raise a FileExistsError if a
        project already exists at the path.

        :param project_root: the path to the project to add
        :return: the project that was added
        """
        from ..project import Project

        project_root = Path(project_root).resolve()
        if not project_root.exists():
            raise FileNotFoundError(f"Error: Path does not exist: {project_root}")
        if not project_root.is_dir():
            raise FileNotFoundError(f"Error: Path is not a directory: {project_root}")

        for already_registered_project in self.projects:
            if str(already_registered_project.project_root) == str(project_root):
                raise FileExistsError(
                    f"Project with path {project_root} was already added with name '{already_registered_project.project_name}'."
                )

        project_config = ProjectConfig.load(project_root, autogenerate=True)

        new_project = Project(project_root=str(project_root), project_config=project_config, is_newly_created=True)
        self.projects.append(RegisteredProject.from_project_instance(new_project))
        self.save()

        return new_project

    def remove_project(self, project_name: str) -> None:
        # find the index of the project with the desired name and remove it
        for i, project in enumerate(list(self.projects)):
            if project.project_name == project_name:
                del self.projects[i]
                break
        else:
            raise ValueError(f"Project '{project_name}' not found in Serena configuration; valid project names: {self.project_names}")
        self.save()

    def save(self) -> None:
        """
        Saves the configuration to the file from which it was loaded (if any)
        """
        if self.config_file_path is None:
            return
        assert self.loaded_commented_yaml is not None, "Cannot save configuration without loaded YAML"
        loaded_original_yaml = deepcopy(self.loaded_commented_yaml)
        # projects are unique absolute paths
        # we also canonicalize them before saving
        loaded_original_yaml["projects"] = sorted({str(project.project_root) for project in self.projects})
        save_yaml(self.config_file_path, loaded_original_yaml, preserve_comments=True)

```

--------------------------------------------------------------------------------
/test/solidlsp/python/test_symbol_retrieval.py:
--------------------------------------------------------------------------------

```python
"""
Tests for the language server symbol-related functionality.

These tests focus on the following methods:
- request_containing_symbol
- request_referencing_symbols
"""

import os

import pytest

from serena.symbol import LanguageServerSymbol
from solidlsp import SolidLanguageServer
from solidlsp.ls_config import Language
from solidlsp.ls_types import SymbolKind

pytestmark = pytest.mark.python


class TestLanguageServerSymbols:
    """Test the language server's symbol-related functionality."""

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_containing_symbol_function(self, language_server: SolidLanguageServer) -> None:
        """Test request_containing_symbol for a function."""
        # Test for a position inside the create_user method
        file_path = os.path.join("test_repo", "services.py")
        # Line 17 is inside the create_user method body
        containing_symbol = language_server.request_containing_symbol(file_path, 17, 20, include_body=True)

        # Verify that we found the containing symbol
        assert containing_symbol is not None
        assert containing_symbol["name"] == "create_user"
        assert containing_symbol["kind"] == SymbolKind.Method
        if "body" in containing_symbol:
            assert containing_symbol["body"].strip().startswith("def create_user(self")

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_references_to_variables(self, language_server: SolidLanguageServer) -> None:
        """Test request_referencing_symbols for a variable."""
        file_path = os.path.join("test_repo", "variables.py")
        # Line 75 contains the field status that is later modified
        ref_symbols = [ref.symbol for ref in language_server.request_referencing_symbols(file_path, 74, 4)]

        assert len(ref_symbols) > 0
        ref_lines = [ref["location"]["range"]["start"]["line"] for ref in ref_symbols if "location" in ref and "range" in ref["location"]]
        ref_names = [ref["name"] for ref in ref_symbols]
        assert 87 in ref_lines
        assert 95 in ref_lines
        assert "dataclass_instance" in ref_names
        assert "second_dataclass" in ref_names

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_containing_symbol_class(self, language_server: SolidLanguageServer) -> None:
        """Test request_containing_symbol for a class."""
        # Test for a position inside the UserService class but outside any method
        file_path = os.path.join("test_repo", "services.py")
        # Line 9 is the class definition line for UserService
        containing_symbol = language_server.request_containing_symbol(file_path, 9, 7)

        # Verify that we found the containing symbol
        assert containing_symbol is not None
        assert containing_symbol["name"] == "UserService"
        assert containing_symbol["kind"] == SymbolKind.Class

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_containing_symbol_nested(self, language_server: SolidLanguageServer) -> None:
        """Test request_containing_symbol with nested scopes."""
        # Test for a position inside a method which is inside a class
        file_path = os.path.join("test_repo", "services.py")
        # Line 18 is inside the create_user method inside UserService class
        containing_symbol = language_server.request_containing_symbol(file_path, 18, 25)

        # Verify that we found the innermost containing symbol (the method)
        assert containing_symbol is not None
        assert containing_symbol["name"] == "create_user"
        assert containing_symbol["kind"] == SymbolKind.Method

        # Get the parent containing symbol
        if "location" in containing_symbol and "range" in containing_symbol["location"]:
            parent_symbol = language_server.request_containing_symbol(
                file_path,
                containing_symbol["location"]["range"]["start"]["line"],
                containing_symbol["location"]["range"]["start"]["character"] - 1,
            )

            # Verify that the parent is the class
            assert parent_symbol is not None
            assert parent_symbol["name"] == "UserService"
            assert parent_symbol["kind"] == SymbolKind.Class

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_containing_symbol_none(self, language_server: SolidLanguageServer) -> None:
        """Test request_containing_symbol for a position with no containing symbol."""
        # Test for a position outside any function/class (e.g., in imports)
        file_path = os.path.join("test_repo", "services.py")
        # Line 1 is in imports, not inside any function or class
        containing_symbol = language_server.request_containing_symbol(file_path, 1, 10)

        # Should return None or an empty dictionary
        assert containing_symbol is None or containing_symbol == {}

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_referencing_symbols_function(self, language_server: SolidLanguageServer) -> None:
        """Test request_referencing_symbols for a function."""
        # Test referencing symbols for create_user function
        file_path = os.path.join("test_repo", "services.py")
        # Line 15 contains the create_user function definition
        symbols = language_server.request_document_symbols(file_path)
        create_user_symbol = next((s for s in symbols[0] if s.get("name") == "create_user"), None)
        if not create_user_symbol or "selectionRange" not in create_user_symbol:
            raise AssertionError("create_user symbol or its selectionRange not found")
        sel_start = create_user_symbol["selectionRange"]["start"]
        ref_symbols = [
            ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
        ]
        assert len(ref_symbols) > 0, "No referencing symbols found for create_user (selectionRange)"

        # Verify the structure of referencing symbols
        for symbol in ref_symbols:
            assert "name" in symbol
            assert "kind" in symbol
            if "location" in symbol and "range" in symbol["location"]:
                assert "start" in symbol["location"]["range"]
                assert "end" in symbol["location"]["range"]

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_referencing_symbols_class(self, language_server: SolidLanguageServer) -> None:
        """Test request_referencing_symbols for a class."""
        # Test referencing symbols for User class
        file_path = os.path.join("test_repo", "models.py")
        # Line 31 contains the User class definition
        symbols = language_server.request_document_symbols(file_path)
        user_symbol = next((s for s in symbols[0] if s.get("name") == "User"), None)
        if not user_symbol or "selectionRange" not in user_symbol:
            raise AssertionError("User symbol or its selectionRange not found")
        sel_start = user_symbol["selectionRange"]["start"]
        ref_symbols = [
            ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
        ]
        services_references = [
            symbol
            for symbol in ref_symbols
            if "location" in symbol and "uri" in symbol["location"] and "services.py" in symbol["location"]["uri"]
        ]
        assert len(services_references) > 0, "No referencing symbols from services.py for User (selectionRange)"

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_referencing_symbols_parameter(self, language_server: SolidLanguageServer) -> None:
        """Test request_referencing_symbols for a function parameter."""
        # Test referencing symbols for id parameter in get_user
        file_path = os.path.join("test_repo", "services.py")
        # Line 24 contains the get_user method with id parameter
        symbols = language_server.request_document_symbols(file_path)
        get_user_symbol = next((s for s in symbols[0] if s.get("name") == "get_user"), None)
        if not get_user_symbol or "selectionRange" not in get_user_symbol:
            raise AssertionError("get_user symbol or its selectionRange not found")
        sel_start = get_user_symbol["selectionRange"]["start"]
        ref_symbols = [
            ref.symbol for ref in language_server.request_referencing_symbols(file_path, sel_start["line"], sel_start["character"])
        ]
        method_refs = [
            symbol
            for symbol in ref_symbols
            if "location" in symbol and "range" in symbol["location"] and symbol["location"]["range"]["start"]["line"] > sel_start["line"]
        ]
        assert len(method_refs) > 0, "No referencing symbols within method body for get_user (selectionRange)"

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_referencing_symbols_none(self, language_server: SolidLanguageServer) -> None:
        """Test request_referencing_symbols for a position with no symbol."""
        # For positions with no symbol, the method might throw an error or return None/empty list
        # We'll modify our test to handle this by using a try-except block

        file_path = os.path.join("test_repo", "services.py")
        # Line 3 is a blank line or comment
        try:
            ref_symbols = [ref.symbol for ref in language_server.request_referencing_symbols(file_path, 3, 0)]
            # If we get here, make sure we got an empty result
            assert ref_symbols == [] or ref_symbols is None
        except Exception:
            # The method might raise an exception for invalid positions
            # which is acceptable behavior
            pass

    # Tests for request_defining_symbol
    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_defining_symbol_variable(self, language_server: SolidLanguageServer) -> None:
        """Test request_defining_symbol for a variable usage."""
        # Test finding the definition of a symbol in the create_user method
        file_path = os.path.join("test_repo", "services.py")
        # Line 21 contains self.users[id] = user
        defining_symbol = language_server.request_defining_symbol(file_path, 21, 10)

        # Verify that we found the defining symbol
        # The defining symbol method returns a dictionary with information about the defining symbol
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "create_user"

        # Verify the location and kind of the symbol
        # SymbolKind.Method = 6 for a method
        assert defining_symbol.get("kind") == SymbolKind.Method.value
        if "location" in defining_symbol and "uri" in defining_symbol["location"]:
            assert "services.py" in defining_symbol["location"]["uri"]

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_defining_symbol_imported_class(self, language_server: SolidLanguageServer) -> None:
        """Test request_defining_symbol for an imported class."""
        # Test finding the definition of the 'User' class used in the UserService.create_user method
        file_path = os.path.join("test_repo", "services.py")
        # Line 20 references 'User' which was imported from models
        defining_symbol = language_server.request_defining_symbol(file_path, 20, 15)

        # Verify that we found the defining symbol - this should be the User class from models
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "User"

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_defining_symbol_method_call(self, language_server: SolidLanguageServer) -> None:
        """Test request_defining_symbol for a method call."""
        # Create an example file path for a file that calls UserService.create_user
        examples_file_path = os.path.join("examples", "user_management.py")

        # Find the line number where create_user is called
        # This could vary, so we'll use a relative position that makes sense
        defining_symbol = language_server.request_defining_symbol(examples_file_path, 10, 30)

        # Verify that we found the defining symbol - should be the create_user method
        # Because this might fail if the structure isn't exactly as expected, we'll use try-except
        try:
            assert defining_symbol is not None
            assert defining_symbol.get("name") == "create_user"
            # The defining symbol should be in the services.py file
            if "location" in defining_symbol and "uri" in defining_symbol["location"]:
                assert "services.py" in defining_symbol["location"]["uri"]
        except AssertionError:
            # If the file structure doesn't match what we expect, we can't guarantee this test
            # will pass, so we'll consider it a warning rather than a failure
            import warnings

            warnings.warn("Could not verify method call definition - file structure may differ from expected")

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_defining_symbol_none(self, language_server: SolidLanguageServer) -> None:
        """Test request_defining_symbol for a position with no symbol."""
        # Test for a position with no symbol (e.g., whitespace or comment)
        file_path = os.path.join("test_repo", "services.py")
        # Line 3 is a blank line
        defining_symbol = language_server.request_defining_symbol(file_path, 3, 0)

        # Should return None for positions with no symbol
        assert defining_symbol is None

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_containing_symbol_variable(self, language_server: SolidLanguageServer) -> None:
        """Test request_containing_symbol where the symbol is a variable."""
        # Test for a position inside a variable definition
        file_path = os.path.join("test_repo", "services.py")
        # Line 74 defines the 'user' variable
        containing_symbol = language_server.request_containing_symbol(file_path, 73, 1)

        # Verify that we found the containing symbol
        assert containing_symbol is not None
        assert containing_symbol["name"] == "user_var_str"
        assert containing_symbol["kind"] == SymbolKind.Variable

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_defining_symbol_nested_function(self, language_server: SolidLanguageServer) -> None:
        """Test request_defining_symbol for a nested function or closure."""
        # Use the existing nested.py file which contains nested classes and methods
        file_path = os.path.join("test_repo", "nested.py")

        # Test 1: Find definition of nested method - line with 'b = OuterClass().NestedClass().find_me()'
        defining_symbol = language_server.request_defining_symbol(file_path, 15, 35)  # Position of find_me() call

        # This should resolve to the find_me method in the NestedClass
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "find_me"
        assert defining_symbol.get("kind") == SymbolKind.Method.value

        # Test 2: Find definition of the nested class
        defining_symbol = language_server.request_defining_symbol(file_path, 15, 18)  # Position of NestedClass

        # This should resolve to the NestedClass
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "NestedClass"
        assert defining_symbol.get("kind") == SymbolKind.Class.value

        # Test 3: Find definition of a method-local function
        defining_symbol = language_server.request_defining_symbol(file_path, 9, 15)  # Position inside func_within_func

        # This is challenging for many language servers and may fail
        try:
            assert defining_symbol is not None
            assert defining_symbol.get("name") == "func_within_func"
        except (AssertionError, TypeError, KeyError):
            # This is expected to potentially fail in many implementations
            import warnings

            warnings.warn("Could not resolve nested class method definition - implementation limitation")

        # Test 2: Find definition of the nested class
        defining_symbol = language_server.request_defining_symbol(file_path, 15, 18)  # Position of NestedClass

        # This should resolve to the NestedClass
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "NestedClass"
        assert defining_symbol.get("kind") == SymbolKind.Class.value

        # Test 3: Find definition of a method-local function
        defining_symbol = language_server.request_defining_symbol(file_path, 9, 15)  # Position inside func_within_func

        # This is challenging for many language servers and may fail
        assert defining_symbol is not None
        assert defining_symbol.get("name") == "func_within_func"
        assert defining_symbol.get("kind") == SymbolKind.Function.value

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_symbol_methods_integration(self, language_server: SolidLanguageServer) -> None:
        """Test the integration between different symbol-related methods."""
        # This test demonstrates using the various symbol methods together
        # by finding a symbol and then checking its definition

        file_path = os.path.join("test_repo", "services.py")

        # First approach: Use a method from the UserService class
        # Step 1: Find a method we know exists
        containing_symbol = language_server.request_containing_symbol(file_path, 15, 8)  # create_user method
        assert containing_symbol is not None
        assert containing_symbol["name"] == "create_user"

        # Step 2: Get the defining symbol for the same position
        # This should be the same method
        defining_symbol = language_server.request_defining_symbol(file_path, 15, 8)
        assert defining_symbol is not None
        assert defining_symbol["name"] == "create_user"

        # Step 3: Verify that they refer to the same symbol
        assert defining_symbol["kind"] == containing_symbol["kind"]
        if "location" in defining_symbol and "location" in containing_symbol:
            assert defining_symbol["location"]["uri"] == containing_symbol["location"]["uri"]

        # The integration test is successful if we've gotten this far,
        # as it demonstrates the integration between request_containing_symbol and request_defining_symbol

        # Try to get the container information for our method, but be flexible
        # since implementations may vary
        container_name = defining_symbol.get("containerName", None)
        if container_name and "UserService" in container_name:
            # If containerName contains UserService, that's a valid implementation
            pass
        else:
            # Try an alternative approach - looking for the containing class
            try:
                # Look for the class symbol in the file
                for line in range(5, 12):  # Approximate range where UserService class should be defined
                    symbol = language_server.request_containing_symbol(file_path, line, 5)  # column 5 should be within class definition
                    if symbol and symbol.get("name") == "UserService" and symbol.get("kind") == SymbolKind.Class.value:
                        # Found the class - this is also a valid implementation
                        break
            except Exception:
                # Just log a warning - this is an alternative verification and not essential
                import warnings

                warnings.warn("Could not verify container hierarchy - implementation detail")

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_symbol_tree_structure(self, language_server: SolidLanguageServer) -> None:
        """Test that the symbol tree structure is correctly built."""
        # Get all symbols in the test file
        repo_structure = language_server.request_full_symbol_tree()
        assert len(repo_structure) == 1
        # Assert that the root symbol is the test_repo directory
        assert repo_structure[0]["name"] == "test_repo"
        assert repo_structure[0]["kind"] == SymbolKind.Package
        assert "children" in repo_structure[0]
        # Assert that the children are the top-level packages
        child_names = {child["name"] for child in repo_structure[0]["children"]}
        child_kinds = {child["kind"] for child in repo_structure[0]["children"]}
        assert child_names == {"test_repo", "custom_test", "examples", "scripts"}
        assert child_kinds == {SymbolKind.Package}
        examples_package = next(child for child in repo_structure[0]["children"] if child["name"] == "examples")
        # assert that children are __init__ and user_management
        assert {child["name"] for child in examples_package["children"]} == {"__init__", "user_management"}
        assert {child["kind"] for child in examples_package["children"]} == {SymbolKind.File}

        # assert that tree of user_management node is same as retrieved directly
        user_management_node = next(child for child in examples_package["children"] if child["name"] == "user_management")
        if "location" in user_management_node and "relativePath" in user_management_node["location"]:
            user_management_rel_path = user_management_node["location"]["relativePath"]
            assert user_management_rel_path == os.path.join("examples", "user_management.py")
            _, user_management_roots = language_server.request_document_symbols(os.path.join("examples", "user_management.py"))
            assert user_management_roots == user_management_node["children"]

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_symbol_tree_structure_subdir(self, language_server: SolidLanguageServer) -> None:
        """Test that the symbol tree structure is correctly built."""
        # Get all symbols in the test file
        examples_package_roots = language_server.request_full_symbol_tree(within_relative_path="examples")
        assert len(examples_package_roots) == 1
        examples_package = examples_package_roots[0]
        assert examples_package["name"] == "examples"
        assert examples_package["kind"] == SymbolKind.Package
        # assert that children are __init__ and user_management
        assert {child["name"] for child in examples_package["children"]} == {"__init__", "user_management"}
        assert {child["kind"] for child in examples_package["children"]} == {SymbolKind.File}

        # assert that tree of user_management node is same as retrieved directly
        user_management_node = next(child for child in examples_package["children"] if child["name"] == "user_management")
        if "location" in user_management_node and "relativePath" in user_management_node["location"]:
            user_management_rel_path = user_management_node["location"]["relativePath"]
            assert user_management_rel_path == os.path.join("examples", "user_management.py")
            _, user_management_roots = language_server.request_document_symbols(os.path.join("examples", "user_management.py"))
            assert user_management_roots == user_management_node["children"]

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_dir_overview(self, language_server: SolidLanguageServer) -> None:
        """Test that request_dir_overview returns correct symbol information for files in a directory."""
        # Get overview of the examples directory
        overview = language_server.request_dir_overview("test_repo")

        # Verify that we have entries for both files
        assert os.path.join("test_repo", "nested.py") in overview

        # Get the symbols for user_management.py
        services_symbols = overview[os.path.join("test_repo", "services.py")]
        assert len(services_symbols) > 0

        # Check for specific symbols from services.py
        expected_symbols = {
            "UserService",
            "ItemService",
            "create_service_container",
            "user_var_str",
            "user_service",
        }
        retrieved_symbols = {symbol["name"] for symbol in services_symbols if "name" in symbol}
        assert expected_symbols.issubset(retrieved_symbols)

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_request_document_overview(self, language_server: SolidLanguageServer) -> None:
        """Test that request_document_overview returns correct symbol information for a file."""
        # Get overview of the user_management.py file
        overview = language_server.request_document_overview(os.path.join("examples", "user_management.py"))

        # Verify that we have entries for both files
        symbol_names = {LanguageServerSymbol(s_info).name for s_info in overview}
        assert {"UserStats", "UserManager", "process_user_data", "main"}.issubset(symbol_names)

    @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
    def test_containing_symbol_of_var_is_file(self, language_server: SolidLanguageServer) -> None:
        """Test that the containing symbol of a variable is the file itself."""
        # Get the containing symbol of a variable in a file
        file_path = os.path.join("test_repo", "services.py")
        # import of typing
        references_to_typing = [
            ref.symbol
            for ref in language_server.request_referencing_symbols(file_path, 4, 6, include_imports=False, include_file_symbols=True)
        ]
        assert {ref["kind"] for ref in references_to_typing} == {SymbolKind.File}
        assert {ref["body"] for ref in references_to_typing} == {""}

        # now include bodies
        references_to_typing = [
            ref.symbol
            for ref in language_server.request_referencing_symbols(
                file_path, 4, 6, include_imports=False, include_file_symbols=True, include_body=True
            )
        ]
        assert {ref["kind"] for ref in references_to_typing} == {SymbolKind.File}
        assert references_to_typing[0]["body"]

```

--------------------------------------------------------------------------------
/src/serena/symbol.py:
--------------------------------------------------------------------------------

```python
import json
import logging
import os
from abc import ABC, abstractmethod
from collections.abc import Iterator, Sequence
from dataclasses import asdict, dataclass
from typing import TYPE_CHECKING, Any, Self, Union

from sensai.util.string import ToStringMixin

from solidlsp import SolidLanguageServer
from solidlsp.ls import ReferenceInSymbol as LSPReferenceInSymbol
from solidlsp.ls_types import Position, SymbolKind, UnifiedSymbolInformation

from .project import Project

if TYPE_CHECKING:
    from .agent import SerenaAgent

log = logging.getLogger(__name__)


@dataclass
class LanguageServerSymbolLocation:
    """
    Represents the (start) location of a symbol identifier, which, within Serena, uniquely identifies the symbol.
    """

    relative_path: str | None
    """
    the relative path of the file containing the symbol; if None, the symbol is defined outside of the project's scope
    """
    line: int | None
    """
    the line number in which the symbol identifier is defined (if the symbol is a function, class, etc.);
    may be None for some types of symbols (e.g. SymbolKind.File)
    """
    column: int | None
    """
    the column number in which the symbol identifier is defined (if the symbol is a function, class, etc.);
    may be None for some types of symbols (e.g. SymbolKind.File)
    """

    def __post_init__(self) -> None:
        if self.relative_path is not None:
            self.relative_path = self.relative_path.replace("/", os.path.sep)

    def to_dict(self, include_relative_path: bool = True) -> dict[str, Any]:
        result = asdict(self)
        if not include_relative_path:
            result.pop("relative_path", None)
        return result

    def has_position_in_file(self) -> bool:
        return self.relative_path is not None and self.line is not None and self.column is not None


@dataclass
class PositionInFile:
    """
    Represents a character position within a file
    """

    line: int
    """
    the 0-based line number in the file
    """
    col: int
    """
    the 0-based column
    """

    def to_lsp_position(self) -> Position:
        """
        Convert to LSP Position.
        """
        return Position(line=self.line, character=self.col)


class Symbol(ABC):
    @abstractmethod
    def get_body_start_position(self) -> PositionInFile | None:
        pass

    @abstractmethod
    def get_body_end_position(self) -> PositionInFile | None:
        pass

    def get_body_start_position_or_raise(self) -> PositionInFile:
        """
        Get the start position of the symbol body, raising an error if it is not defined.
        """
        pos = self.get_body_start_position()
        if pos is None:
            raise ValueError(f"Body start position is not defined for {self}")
        return pos

    def get_body_end_position_or_raise(self) -> PositionInFile:
        """
        Get the end position of the symbol body, raising an error if it is not defined.
        """
        pos = self.get_body_end_position()
        if pos is None:
            raise ValueError(f"Body end position is not defined for {self}")
        return pos

    @abstractmethod
    def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
        """
        :return: whether a symbol definition of this symbol's kind is usually separated from the
            previous/next definition by at least one empty line.
        """


class LanguageServerSymbol(Symbol, ToStringMixin):
    _NAME_PATH_SEP = "/"

    @staticmethod
    def match_name_path(
        name_path: str,
        symbol_name_path_parts: list[str],
        substring_matching: bool,
    ) -> bool:
        """
        Checks if a given `name_path` matches a symbol's qualified name parts.
        See docstring of `Symbol.find` for more details.
        """
        assert name_path, "name_path must not be empty"
        assert symbol_name_path_parts, "symbol_name_path_parts must not be empty"
        name_path_sep = LanguageServerSymbol._NAME_PATH_SEP

        is_absolute_pattern = name_path.startswith(name_path_sep)
        pattern_parts = name_path.lstrip(name_path_sep).rstrip(name_path_sep).split(name_path_sep)

        # filtering based on ancestors
        if len(pattern_parts) > len(symbol_name_path_parts):
            # can't possibly match if pattern has more parts than symbol
            return False
        if is_absolute_pattern and len(pattern_parts) != len(symbol_name_path_parts):
            # for absolute patterns, the number of parts must match exactly
            return False
        if symbol_name_path_parts[-len(pattern_parts) : -1] != pattern_parts[:-1]:
            # ancestors must match
            return False

        # matching the last part of the symbol name
        name_to_match = pattern_parts[-1]
        symbol_name = symbol_name_path_parts[-1]
        if substring_matching:
            return name_to_match in symbol_name
        else:
            return name_to_match == symbol_name

    def __init__(self, symbol_root_from_ls: UnifiedSymbolInformation) -> None:
        self.symbol_root = symbol_root_from_ls

    def _tostring_includes(self) -> list[str]:
        return []

    def _tostring_additional_entries(self) -> dict[str, Any]:
        return dict(name=self.name, kind=self.kind, num_children=len(self.symbol_root["children"]))

    @property
    def name(self) -> str:
        return self.symbol_root["name"]

    @property
    def kind(self) -> str:
        return SymbolKind(self.symbol_kind).name

    @property
    def symbol_kind(self) -> SymbolKind:
        return self.symbol_root["kind"]

    def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
        return self.symbol_kind in (SymbolKind.Function, SymbolKind.Method, SymbolKind.Class, SymbolKind.Interface, SymbolKind.Struct)

    @property
    def relative_path(self) -> str | None:
        location = self.symbol_root.get("location")
        if location:
            return location.get("relativePath")
        return None

    @property
    def location(self) -> LanguageServerSymbolLocation:
        """
        :return: the start location of the actual symbol identifier
        """
        return LanguageServerSymbolLocation(relative_path=self.relative_path, line=self.line, column=self.column)

    @property
    def body_start_position(self) -> Position | None:
        location = self.symbol_root.get("location")
        if location:
            range_info = location.get("range")
            if range_info:
                start_pos = range_info.get("start")
                if start_pos:
                    return start_pos
        return None

    @property
    def body_end_position(self) -> Position | None:
        location = self.symbol_root.get("location")
        if location:
            range_info = location.get("range")
            if range_info:
                end_pos = range_info.get("end")
                if end_pos:
                    return end_pos
        return None

    def get_body_start_position(self) -> PositionInFile | None:
        start_pos = self.body_start_position
        if start_pos is None:
            return None
        return PositionInFile(line=start_pos["line"], col=start_pos["character"])

    def get_body_end_position(self) -> PositionInFile | None:
        end_pos = self.body_end_position
        if end_pos is None:
            return None
        return PositionInFile(line=end_pos["line"], col=end_pos["character"])

    def get_body_line_numbers(self) -> tuple[int | None, int | None]:
        start_pos = self.body_start_position
        end_pos = self.body_end_position
        start_line = start_pos["line"] if start_pos else None
        end_line = end_pos["line"] if end_pos else None
        return start_line, end_line

    @property
    def line(self) -> int | None:
        """
        :return: the line in which the symbol identifier is defined.
        """
        if "selectionRange" in self.symbol_root:
            return self.symbol_root["selectionRange"]["start"]["line"]
        else:
            # line is expected to be undefined for some types of symbols (e.g. SymbolKind.File)
            return None

    @property
    def column(self) -> int | None:
        if "selectionRange" in self.symbol_root:
            return self.symbol_root["selectionRange"]["start"]["character"]
        else:
            # precise location is expected to be undefined for some types of symbols (e.g. SymbolKind.File)
            return None

    @property
    def body(self) -> str | None:
        return self.symbol_root.get("body")

    def get_name_path(self) -> str:
        """
        Get the name path of the symbol (e.g. "class/method/inner_function").
        """
        return self._NAME_PATH_SEP.join(self.get_name_path_parts())

    def get_name_path_parts(self) -> list[str]:
        """
        Get the parts of the name path of the symbol (e.g. ["class", "method", "inner_function"]).
        """
        ancestors_within_file = list(self.iter_ancestors(up_to_symbol_kind=SymbolKind.File))
        ancestors_within_file.reverse()
        return [a.name for a in ancestors_within_file] + [self.name]

    def iter_children(self) -> Iterator[Self]:
        for c in self.symbol_root["children"]:
            yield self.__class__(c)

    def iter_ancestors(self, up_to_symbol_kind: SymbolKind | None = None) -> Iterator[Self]:
        """
        Iterate over all ancestors of the symbol, starting with the parent and going up to the root or
        the given symbol kind.

        :param up_to_symbol_kind: if provided, iteration will stop *before* the first ancestor of the given kind.
            A typical use case is to pass `SymbolKind.File` or `SymbolKind.Package`.
        """
        parent = self.get_parent()
        if parent is not None:
            if up_to_symbol_kind is None or parent.symbol_kind != up_to_symbol_kind:
                yield parent
                yield from parent.iter_ancestors(up_to_symbol_kind=up_to_symbol_kind)

    def get_parent(self) -> Self | None:
        parent_root = self.symbol_root.get("parent")
        if parent_root is None:
            return None
        return self.__class__(parent_root)

    def find(
        self,
        name_path: str,
        substring_matching: bool = False,
        include_kinds: Sequence[SymbolKind] | None = None,
        exclude_kinds: Sequence[SymbolKind] | None = None,
    ) -> list[Self]:
        """
        Find all symbols within the symbol's subtree that match the given `name_path`.
        The matching behavior is determined by the structure of `name_path`, which can
        either be a simple name (e.g. "method") or a name path like "class/method" (relative name path)
        or "/class/method" (absolute name path).

        Key aspects of the name path matching behavior:
        - Trailing slashes in `name_path` play no role and are ignored.
        - The name of the retrieved symbols will match (either exactly or as a substring)
          the last segment of `name_path`, while other segments will restrict the search to symbols that
          have a desired sequence of ancestors.
        - If there is no starting or intermediate slash in `name_path`, there is no
          restriction on the ancestor symbols. For example, passing `method` will match
          against symbols with name paths like `method`, `class/method`, `class/nested_class/method`, etc.
        - If `name_path` contains a `/` but doesn't start with a `/`, the matching is restricted to symbols
          with the same ancestors as the last segment of `name_path`. For example, passing `class/method` will match against
          `class/method` as well as `nested_class/class/method` but not `method`.
        - If `name_path` starts with a `/`, it will be treated as an absolute name path pattern, meaning
          that the first segment of it must match the first segment of the symbol's name path.
          For example, passing `/class` will match only against top-level symbols like `class` but not against `nested_class/class`.
          Passing `/class/method` will match against `class/method` but not `nested_class/class/method` or `method`.

        :param name_path: the name path to match against
        :param substring_matching: whether to use substring matching (as opposed to exact matching)
            of the last segment of `name_path` against the symbol name.
        :param include_kinds: an optional sequence of ints representing the LSP symbol kind.
            If provided, only symbols of the given kinds will be included in the result.
        :param exclude_kinds: If provided, symbols of the given kinds will be excluded from the result.
        """
        result = []

        def should_include(s: "LanguageServerSymbol") -> bool:
            if include_kinds is not None and s.symbol_kind not in include_kinds:
                return False
            if exclude_kinds is not None and s.symbol_kind in exclude_kinds:
                return False
            return LanguageServerSymbol.match_name_path(
                name_path=name_path,
                symbol_name_path_parts=s.get_name_path_parts(),
                substring_matching=substring_matching,
            )

        def traverse(s: "LanguageServerSymbol") -> None:
            if should_include(s):
                result.append(s)
            for c in s.iter_children():
                traverse(c)

        traverse(self)
        return result

    def to_dict(
        self,
        kind: bool = False,
        location: bool = False,
        depth: int = 0,
        include_body: bool = False,
        include_children_body: bool = False,
        include_relative_path: bool = True,
    ) -> dict[str, Any]:
        """
        Converts the symbol to a dictionary.

        :param kind: whether to include the kind of the symbol
        :param location: whether to include the location of the symbol
        :param depth: the depth of the symbol
        :param include_body: whether to include the body of the top-level symbol.
        :param include_children_body: whether to also include the body of the children.
            Note that the body of the children is part of the body of the parent symbol,
            so there is usually no need to set this to True unless you want process the output
            and pass the children without passing the parent body to the LM.
        :param include_relative_path: whether to include the relative path of the symbol in the location
            entry. Relative paths of the symbol's children are always excluded.
        :return: a dictionary representation of the symbol
        """
        result: dict[str, Any] = {"name": self.name, "name_path": self.get_name_path()}

        if kind:
            result["kind"] = self.kind

        if location:
            result["location"] = self.location.to_dict(include_relative_path=include_relative_path)
            body_start_line, body_end_line = self.get_body_line_numbers()
            result["body_location"] = {"start_line": body_start_line, "end_line": body_end_line}

        if include_body:
            if self.body is None:
                log.warning("Requested body for symbol, but it is not present. The symbol might have been loaded with include_body=False.")
            result["body"] = self.body

        def add_children(s: Self) -> list[dict[str, Any]]:
            children = []
            for c in s.iter_children():
                children.append(
                    c.to_dict(
                        kind=kind,
                        location=location,
                        depth=depth - 1,
                        include_body=include_children_body,
                        include_children_body=include_children_body,
                        # all children have the same relative path as the parent
                        include_relative_path=False,
                    )
                )
            return children

        if depth > 0:
            result["children"] = add_children(self)

        return result


@dataclass
class ReferenceInLanguageServerSymbol(ToStringMixin):
    """
    Represents the location of a reference to another symbol within a symbol/file.

    The contained symbol is the symbol within which the reference is located,
    not the symbol that is referenced.
    """

    symbol: LanguageServerSymbol
    """
    the symbol within which the reference is located
    """
    line: int
    """
    the line number in which the reference is located (0-based)
    """
    character: int
    """
    the column number in which the reference is located (0-based)
    """

    @classmethod
    def from_lsp_reference(cls, reference: LSPReferenceInSymbol) -> Self:
        return cls(symbol=LanguageServerSymbol(reference.symbol), line=reference.line, character=reference.character)

    def get_relative_path(self) -> str | None:
        return self.symbol.location.relative_path


class LanguageServerSymbolRetriever:
    def __init__(self, lang_server: SolidLanguageServer, agent: Union["SerenaAgent", None] = None) -> None:
        """
        :param lang_server: the language server to use for symbol retrieval as well as editing operations.
        :param agent: the agent to use (only needed for marking files as modified). You can pass None if you don't
            need an agent to be aware of file modifications performed by the symbol manager.
        """
        self._lang_server = lang_server
        self.agent = agent

    def set_language_server(self, lang_server: SolidLanguageServer) -> None:
        """
        Set the language server to use for symbol retrieval and editing operations.
        This is useful if you want to change the language server after initializing the SymbolManager.
        """
        self._lang_server = lang_server

    def get_language_server(self) -> SolidLanguageServer:
        return self._lang_server

    def find_by_name(
        self,
        name_path: str,
        include_body: bool = False,
        include_kinds: Sequence[SymbolKind] | None = None,
        exclude_kinds: Sequence[SymbolKind] | None = None,
        substring_matching: bool = False,
        within_relative_path: str | None = None,
    ) -> list[LanguageServerSymbol]:
        """
        Find all symbols that match the given name. See docstring of `Symbol.find` for more details.
        The only parameter not mentioned there is `within_relative_path`, which can be used to restrict the search
        to symbols within a specific file or directory.
        """
        symbols: list[LanguageServerSymbol] = []
        symbol_roots = self._lang_server.request_full_symbol_tree(within_relative_path=within_relative_path, include_body=include_body)
        for root in symbol_roots:
            symbols.extend(
                LanguageServerSymbol(root).find(
                    name_path, include_kinds=include_kinds, exclude_kinds=exclude_kinds, substring_matching=substring_matching
                )
            )
        return symbols

    def get_document_symbols(self, relative_path: str) -> list[LanguageServerSymbol]:
        symbol_dicts, _roots = self._lang_server.request_document_symbols(relative_path, include_body=False)
        symbols = [LanguageServerSymbol(s) for s in symbol_dicts]
        return symbols

    def find_by_location(self, location: LanguageServerSymbolLocation) -> LanguageServerSymbol | None:
        if location.relative_path is None:
            return None
        symbol_dicts, _roots = self._lang_server.request_document_symbols(location.relative_path, include_body=False)
        for symbol_dict in symbol_dicts:
            symbol = LanguageServerSymbol(symbol_dict)
            if symbol.location == location:
                return symbol
        return None

    def find_referencing_symbols(
        self,
        name_path: str,
        relative_file_path: str,
        include_body: bool = False,
        include_kinds: Sequence[SymbolKind] | None = None,
        exclude_kinds: Sequence[SymbolKind] | None = None,
    ) -> list[ReferenceInLanguageServerSymbol]:
        """
        Find all symbols that reference the symbol with the given name.
        If multiple symbols fit the name (e.g. for variables that are overwritten), will use the first one.

        :param name_path: the name path of the symbol to find
        :param relative_file_path: the relative path of the file in which the referenced symbol is defined.
        :param include_body: whether to include the body of all symbols in the result.
            Not recommended, as the referencing symbols will often be files, and thus the bodies will be very long.
        :param include_kinds: which kinds of symbols to include in the result.
        :param exclude_kinds: which kinds of symbols to exclude from the result.
        """
        symbol_candidates = self.find_by_name(name_path, substring_matching=False, within_relative_path=relative_file_path)
        if len(symbol_candidates) == 0:
            log.warning(f"No symbol with name {name_path} found in file {relative_file_path}")
            return []
        if len(symbol_candidates) > 1:
            log.error(
                f"Found {len(symbol_candidates)} symbols with name {name_path} in file {relative_file_path}."
                f"May be an overwritten variable, in which case you can ignore this error. Proceeding with the first one. "
                f"Found symbols for {name_path=} in {relative_file_path=}: \n"
                f"{json.dumps([s.location.to_dict() for s in symbol_candidates], indent=2)}"
            )
        symbol = symbol_candidates[0]
        return self.find_referencing_symbols_by_location(
            symbol.location, include_body=include_body, include_kinds=include_kinds, exclude_kinds=exclude_kinds
        )

    def find_referencing_symbols_by_location(
        self,
        symbol_location: LanguageServerSymbolLocation,
        include_body: bool = False,
        include_kinds: Sequence[SymbolKind] | None = None,
        exclude_kinds: Sequence[SymbolKind] | None = None,
    ) -> list[ReferenceInLanguageServerSymbol]:
        """
        Find all symbols that reference the symbol at the given location.

        :param symbol_location: the location of the symbol for which to find references.
            Does not need to include an end_line, as it is unused in the search.
        :param include_body: whether to include the body of all symbols in the result.
            Not recommended, as the referencing symbols will often be files, and thus the bodies will be very long.
            Note: you can filter out the bodies of the children if you set include_children_body=False
            in the to_dict method.
        :param include_kinds: an optional sequence of ints representing the LSP symbol kind.
            If provided, only symbols of the given kinds will be included in the result.
        :param exclude_kinds: If provided, symbols of the given kinds will be excluded from the result.
            Takes precedence over include_kinds.
        :return: a list of symbols that reference the given symbol
        """
        if not symbol_location.has_position_in_file():
            raise ValueError("Symbol location does not contain a valid position in a file")
        assert symbol_location.relative_path is not None
        assert symbol_location.line is not None
        assert symbol_location.column is not None
        references = self._lang_server.request_referencing_symbols(
            relative_file_path=symbol_location.relative_path,
            line=symbol_location.line,
            column=symbol_location.column,
            include_imports=False,
            include_self=False,
            include_body=include_body,
            include_file_symbols=True,
        )

        if include_kinds is not None:
            references = [s for s in references if s.symbol["kind"] in include_kinds]

        if exclude_kinds is not None:
            references = [s for s in references if s.symbol["kind"] not in exclude_kinds]

        return [ReferenceInLanguageServerSymbol.from_lsp_reference(r) for r in references]

    @dataclass
    class SymbolOverviewElement:
        name_path: str
        kind: int

        @classmethod
        def from_symbol(cls, symbol: LanguageServerSymbol) -> Self:
            return cls(name_path=symbol.get_name_path(), kind=int(symbol.symbol_kind))

    def get_symbol_overview(self, relative_path: str) -> dict[str, list[SymbolOverviewElement]]:
        path_to_unified_symbols = self._lang_server.request_overview(relative_path)
        result = {}
        for file_path, unified_symbols in path_to_unified_symbols.items():
            # TODO: maybe include not just top-level symbols? We could filter by kind to exclude variables
            #  The language server methods would need to be adjusted for this.
            result[file_path] = [self.SymbolOverviewElement.from_symbol(LanguageServerSymbol(s)) for s in unified_symbols]
        return result


class JetBrainsSymbol(Symbol):
    def __init__(self, symbol_dict: dict, project: Project) -> None:
        """
        :param symbol_dict: dictionary as returned by the JetBrains plugin client.
        """
        self._project = project
        self._dict = symbol_dict
        self._cached_file_content: str | None = None
        self._cached_body_start_position: PositionInFile | None = None
        self._cached_body_end_position: PositionInFile | None = None

    def get_relative_path(self) -> str:
        return self._dict["relative_path"]

    def get_file_content(self) -> str:
        if self._cached_file_content is None:
            path = os.path.join(self._project.project_root, self.get_relative_path())
            with open(path, encoding=self._project.project_config.encoding) as f:
                self._cached_file_content = f.read()
        return self._cached_file_content

    def is_position_in_file_available(self) -> bool:
        return "text_range" in self._dict

    def get_body_start_position(self) -> PositionInFile | None:
        if not self.is_position_in_file_available():
            return None
        if self._cached_body_start_position is None:
            pos = self._dict["text_range"]["start_pos"]
            line, col = pos["line"], pos["col"]
            self._cached_body_start_position = PositionInFile(line=line, col=col)
        return self._cached_body_start_position

    def get_body_end_position(self) -> PositionInFile | None:
        if not self.is_position_in_file_available():
            return None
        if self._cached_body_end_position is None:
            pos = self._dict["text_range"]["end_pos"]
            line, col = pos["line"], pos["col"]
            self._cached_body_end_position = PositionInFile(line=line, col=col)
        return self._cached_body_end_position

    def is_neighbouring_definition_separated_by_empty_line(self) -> bool:
        # NOTE: Symbol types cannot really be differentiated, because types are not handled in a language-agnostic way.
        return False

```

--------------------------------------------------------------------------------
/src/serena/agent.py:
--------------------------------------------------------------------------------

```python
"""
The Serena Model Context Protocol (MCP) Server
"""

import multiprocessing
import os
import platform
import sys
import threading
import webbrowser
from collections.abc import Callable
from concurrent.futures import Future, ThreadPoolExecutor
from logging import Logger
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, TypeVar

from sensai.util import logging
from sensai.util.logging import LogTime

from interprompt.jinja_template import JinjaTemplate
from serena import serena_version
from serena.analytics import RegisteredTokenCountEstimator, ToolUsageStats
from serena.config.context_mode import RegisteredContext, SerenaAgentContext, SerenaAgentMode
from serena.config.serena_config import SerenaConfig, ToolInclusionDefinition, ToolSet, get_serena_managed_in_project_dir
from serena.dashboard import SerenaDashboardAPI
from serena.project import Project
from serena.prompt_factory import SerenaPromptFactory
from serena.tools import ActivateProjectTool, GetCurrentConfigTool, Tool, ToolMarker, ToolRegistry
from serena.util.inspection import iter_subclasses
from serena.util.logging import MemoryLogHandler
from solidlsp import SolidLanguageServer

if TYPE_CHECKING:
    from serena.gui_log_viewer import GuiLogViewer

log = logging.getLogger(__name__)
TTool = TypeVar("TTool", bound="Tool")
T = TypeVar("T")
SUCCESS_RESULT = "OK"


class ProjectNotFoundError(Exception):
    pass


class MemoriesManager:
    def __init__(self, project_root: str):
        self._memory_dir = Path(get_serena_managed_in_project_dir(project_root)) / "memories"
        self._memory_dir.mkdir(parents=True, exist_ok=True)

    def _get_memory_file_path(self, name: str) -> Path:
        # strip all .md from the name. Models tend to get confused, sometimes passing the .md extension and sometimes not.
        name = name.replace(".md", "")
        filename = f"{name}.md"
        return self._memory_dir / filename

    def load_memory(self, name: str) -> str:
        memory_file_path = self._get_memory_file_path(name)
        if not memory_file_path.exists():
            return f"Memory file {name} not found, consider creating it with the `write_memory` tool if you need it."
        with open(memory_file_path, encoding="utf-8") as f:
            return f.read()

    def save_memory(self, name: str, content: str) -> str:
        memory_file_path = self._get_memory_file_path(name)
        with open(memory_file_path, "w", encoding="utf-8") as f:
            f.write(content)
        return f"Memory {name} written."

    def list_memories(self) -> list[str]:
        return [f.name.replace(".md", "") for f in self._memory_dir.iterdir() if f.is_file()]

    def delete_memory(self, name: str) -> str:
        memory_file_path = self._get_memory_file_path(name)
        memory_file_path.unlink()
        return f"Memory {name} deleted."


class AvailableTools:
    def __init__(self, tools: list[Tool]):
        """
        :param tools: the list of available tools
        """
        self.tools = tools
        self.tool_names = [tool.get_name_from_cls() for tool in tools]
        self.tool_marker_names = set()
        for marker_class in iter_subclasses(ToolMarker):
            for tool in tools:
                if isinstance(tool, marker_class):
                    self.tool_marker_names.add(marker_class.__name__)

    def __len__(self) -> int:
        return len(self.tools)


class SerenaAgent:
    def __init__(
        self,
        project: str | None = None,
        project_activation_callback: Callable[[], None] | None = None,
        serena_config: SerenaConfig | None = None,
        context: SerenaAgentContext | None = None,
        modes: list[SerenaAgentMode] | None = None,
        memory_log_handler: MemoryLogHandler | None = None,
    ):
        """
        :param project: the project to load immediately or None to not load any project; may be a path to the project or a name of
            an already registered project;
        :param project_activation_callback: a callback function to be called when a project is activated.
        :param serena_config: the Serena configuration or None to read the configuration from the default location.
        :param context: the context in which the agent is operating, None for default context.
            The context may adjust prompts, tool availability, and tool descriptions.
        :param modes: list of modes in which the agent is operating (they will be combined), None for default modes.
            The modes may adjust prompts, tool availability, and tool descriptions.
        :param memory_log_handler: a MemoryLogHandler instance from which to read log messages; if None, a new one will be created
            if necessary.
        """
        # obtain serena configuration using the decoupled factory function
        self.serena_config = serena_config or SerenaConfig.from_config_file()

        # project-specific instances, which will be initialized upon project activation
        self._active_project: Project | None = None
        self.language_server: SolidLanguageServer | None = None
        self.memories_manager: MemoriesManager | None = None

        # adjust log level
        serena_log_level = self.serena_config.log_level
        if Logger.root.level > serena_log_level:
            log.info(f"Changing the root logger level to {serena_log_level}")
            Logger.root.setLevel(serena_log_level)

        def get_memory_log_handler() -> MemoryLogHandler:
            nonlocal memory_log_handler
            if memory_log_handler is None:
                memory_log_handler = MemoryLogHandler(level=serena_log_level)
                Logger.root.addHandler(memory_log_handler)
            return memory_log_handler

        # open GUI log window if enabled
        self._gui_log_viewer: Optional["GuiLogViewer"] = None
        if self.serena_config.gui_log_window_enabled:
            if platform.system() == "Darwin":
                log.warning("GUI log window is not supported on macOS")
            else:
                # even importing on macOS may fail if tkinter dependencies are unavailable (depends on Python interpreter installation
                # which uv used as a base, unfortunately)
                from serena.gui_log_viewer import GuiLogViewer

                self._gui_log_viewer = GuiLogViewer("dashboard", title="Serena Logs", memory_log_handler=get_memory_log_handler())
                self._gui_log_viewer.start()

        # set the agent context
        if context is None:
            context = SerenaAgentContext.load_default()
        self._context = context

        # instantiate all tool classes
        self._all_tools: dict[type[Tool], Tool] = {tool_class: tool_class(self) for tool_class in ToolRegistry().get_all_tool_classes()}
        tool_names = [tool.get_name_from_cls() for tool in self._all_tools.values()]

        # If GUI log window is enabled, set the tool names for highlighting
        if self._gui_log_viewer is not None:
            self._gui_log_viewer.set_tool_names(tool_names)

        self._tool_usage_stats: ToolUsageStats | None = None
        if self.serena_config.record_tool_usage_stats:
            token_count_estimator = RegisteredTokenCountEstimator[self.serena_config.token_count_estimator]
            log.info(f"Tool usage statistics recording is enabled with token count estimator: {token_count_estimator.name}.")
            self._tool_usage_stats = ToolUsageStats(token_count_estimator)

        # start the dashboard (web frontend), registering its log handler
        if self.serena_config.web_dashboard:
            self._dashboard_thread, port = SerenaDashboardAPI(
                get_memory_log_handler(), tool_names, agent=self, tool_usage_stats=self._tool_usage_stats
            ).run_in_thread()
            dashboard_url = f"http://127.0.0.1:{port}/dashboard/index.html"
            log.info("Serena web dashboard started at %s", dashboard_url)
            if self.serena_config.web_dashboard_open_on_launch:
                # open the dashboard URL in the default web browser (using a separate process to control
                # output redirection)
                process = multiprocessing.Process(target=self._open_dashboard, args=(dashboard_url,))
                process.start()
                process.join(timeout=1)

        # log fundamental information
        log.info(f"Starting Serena server (version={serena_version()}, process id={os.getpid()}, parent process id={os.getppid()})")
        log.info("Configuration file: %s", self.serena_config.config_file_path)
        log.info("Available projects: {}".format(", ".join(self.serena_config.project_names)))
        log.info(f"Loaded tools ({len(self._all_tools)}): {', '.join([tool.get_name_from_cls() for tool in self._all_tools.values()])}")

        self._check_shell_settings()

        # determine the base toolset defining the set of exposed tools (which e.g. the MCP shall see),
        # limited by the Serena config, the context (which is fixed for the session) and JetBrains mode
        tool_inclusion_definitions: list[ToolInclusionDefinition] = [self.serena_config, self._context]
        if self._context.name == RegisteredContext.IDE_ASSISTANT.value:
            tool_inclusion_definitions.extend(self._ide_assistant_context_tool_inclusion_definitions(project))
        if self.serena_config.jetbrains:
            tool_inclusion_definitions.append(SerenaAgentMode.from_name_internal("jetbrains"))

        self._base_tool_set = ToolSet.default().apply(*tool_inclusion_definitions)
        self._exposed_tools = AvailableTools([t for t in self._all_tools.values() if self._base_tool_set.includes_name(t.get_name())])
        log.info(f"Number of exposed tools: {len(self._exposed_tools)}")

        # create executor for starting the language server and running tools in another thread
        # This executor is used to achieve linear task execution, so it is important to use a single-threaded executor.
        self._task_executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="SerenaAgentExecutor")
        self._task_executor_lock = threading.Lock()
        self._task_executor_task_index = 1

        # Initialize the prompt factory
        self.prompt_factory = SerenaPromptFactory()
        self._project_activation_callback = project_activation_callback

        # set the active modes
        if modes is None:
            modes = SerenaAgentMode.load_default_modes()
        self._modes = modes

        self._active_tools: dict[type[Tool], Tool] = {}
        self._update_active_tools()

        # activate a project configuration (if provided or if there is only a single project available)
        if project is not None:
            try:
                self.activate_project_from_path_or_name(project)
            except Exception as e:
                log.error(f"Error activating project '{project}' at startup: {e}", exc_info=e)

    def get_context(self) -> SerenaAgentContext:
        return self._context

    def get_tool_description_override(self, tool_name: str) -> str | None:
        return self._context.tool_description_overrides.get(tool_name, None)

    def _check_shell_settings(self) -> None:
        # On Windows, Claude Code sets COMSPEC to Git-Bash (often even with a path containing spaces),
        # which causes all sorts of trouble, preventing language servers from being launched correctly.
        # So we make sure that COMSPEC is unset if it has been set to bash specifically.
        if platform.system() == "Windows":
            comspec = os.environ.get("COMSPEC", "")
            if "bash" in comspec:
                os.environ["COMSPEC"] = ""  # force use of default shell
                log.info("Adjusting COMSPEC environment variable to use the default shell instead of '%s'", comspec)

    def _ide_assistant_context_tool_inclusion_definitions(self, project_root_or_name: str | None) -> list[ToolInclusionDefinition]:
        """
        In the IDE assistant context, the agent is assumed to work on a single project, and we thus
        want to apply that project's tool exclusions/inclusions from the get-go, limiting the set
        of tools that will be exposed to the client.
        Furthermore, we disable tools that are only relevant for project activation.
        So if the project exists, we apply all the aforementioned exclusions.

        :param project_root_or_name: the project root path or project name
        :return:
        """
        tool_inclusion_definitions = []
        if project_root_or_name is not None:
            # Note: Auto-generation is disabled, because the result must be returned instantaneously
            #   (project generation could take too much time), so as not to delay MCP server startup
            #   and provide responses to the client immediately.
            project = self.load_project_from_path_or_name(project_root_or_name, autogenerate=False)
            if project is not None:
                tool_inclusion_definitions.append(
                    ToolInclusionDefinition(
                        excluded_tools=[ActivateProjectTool.get_name_from_cls(), GetCurrentConfigTool.get_name_from_cls()]
                    )
                )
                tool_inclusion_definitions.append(project.project_config)
        return tool_inclusion_definitions

    def record_tool_usage_if_enabled(self, input_kwargs: dict, tool_result: str | dict, tool: Tool) -> None:
        """
        Record the usage of a tool with the given input and output strings if tool usage statistics recording is enabled.
        """
        tool_name = tool.get_name()
        if self._tool_usage_stats is not None:
            input_str = str(input_kwargs)
            output_str = str(tool_result)
            log.debug(f"Recording tool usage for tool '{tool_name}'")
            self._tool_usage_stats.record_tool_usage(tool_name, input_str, output_str)
        else:
            log.debug(f"Tool usage statistics recording is disabled, not recording usage of '{tool_name}'.")

    @staticmethod
    def _open_dashboard(url: str) -> None:
        # Redirect stdout and stderr file descriptors to /dev/null,
        # making sure that nothing can be written to stdout/stderr, even by subprocesses
        null_fd = os.open(os.devnull, os.O_WRONLY)
        os.dup2(null_fd, sys.stdout.fileno())
        os.dup2(null_fd, sys.stderr.fileno())
        os.close(null_fd)

        # open the dashboard URL in the default web browser
        webbrowser.open(url)

    def get_project_root(self) -> str:
        """
        :return: the root directory of the active project (if any); raises a ValueError if there is no active project
        """
        project = self.get_active_project()
        if project is None:
            raise ValueError("Cannot get project root if no project is active.")
        return project.project_root

    def get_exposed_tool_instances(self) -> list["Tool"]:
        """
        :return: the tool instances which are exposed (e.g. to the MCP client).
            Note that the set of exposed tools is fixed for the session, as
            clients don't react to changes in the set of tools, so this is the superset
            of tools that can be offered during the session.
            If a client should attempt to use a tool that is dynamically disabled
            (e.g. because a project is activated that disables it), it will receive an error.
        """
        return list(self._exposed_tools.tools)

    def get_active_project(self) -> Project | None:
        """
        :return: the active project or None if no project is active
        """
        return self._active_project

    def get_active_project_or_raise(self) -> Project:
        """
        :return: the active project or raises an exception if no project is active
        """
        project = self.get_active_project()
        if project is None:
            raise ValueError("No active project. Please activate a project first.")
        return project

    def set_modes(self, modes: list[SerenaAgentMode]) -> None:
        """
        Set the current mode configurations.

        :param modes: List of mode names or paths to use
        """
        self._modes = modes
        self._update_active_tools()

        log.info(f"Set modes to {[mode.name for mode in modes]}")

    def get_active_modes(self) -> list[SerenaAgentMode]:
        """
        :return: the list of active modes
        """
        return list(self._modes)

    def _format_prompt(self, prompt_template: str) -> str:
        template = JinjaTemplate(prompt_template)
        return template.render(available_tools=self._exposed_tools.tool_names, available_markers=self._exposed_tools.tool_marker_names)

    def create_system_prompt(self) -> str:
        available_markers = self._exposed_tools.tool_marker_names
        log.info("Generating system prompt with available_tools=(see exposed tools), available_markers=%s", available_markers)
        system_prompt = self.prompt_factory.create_system_prompt(
            context_system_prompt=self._format_prompt(self._context.prompt),
            mode_system_prompts=[self._format_prompt(mode.prompt) for mode in self._modes],
            available_tools=self._exposed_tools.tool_names,
            available_markers=available_markers,
        )
        log.info("System prompt:\n%s", system_prompt)
        return system_prompt

    def _update_active_tools(self) -> None:
        """
        Update the active tools based on enabled modes and the active project.
        The base tool set already takes the Serena configuration and the context into account
        (as well as any internal modes that are not handled dynamically, such as JetBrains mode).
        """
        tool_set = self._base_tool_set.apply(*self._modes)
        if self._active_project is not None:
            tool_set = tool_set.apply(self._active_project.project_config)
            if self._active_project.project_config.read_only:
                tool_set = tool_set.without_editing_tools()

        self._active_tools = {
            tool_class: tool_instance
            for tool_class, tool_instance in self._all_tools.items()
            if tool_set.includes_name(tool_instance.get_name())
        }

        log.info(f"Active tools ({len(self._active_tools)}): {', '.join(self.get_active_tool_names())}")

    def issue_task(self, task: Callable[[], Any], name: str | None = None) -> Future:
        """
        Issue a task to the executor for asynchronous execution.
        It is ensured that tasks are executed in the order they are issued, one after another.

        :param task: the task to execute
        :param name: the name of the task for logging purposes; if None, use the task function's name
        :return: a Future object representing the execution of the task
        """
        with self._task_executor_lock:
            task_name = f"Task-{self._task_executor_task_index}[{name or task.__name__}]"
            self._task_executor_task_index += 1

            def task_execution_wrapper() -> Any:
                with LogTime(task_name, logger=log):
                    return task()

            log.info(f"Scheduling {task_name}")
            return self._task_executor.submit(task_execution_wrapper)

    def execute_task(self, task: Callable[[], T]) -> T:
        """
        Executes the given task synchronously via the agent's task executor.
        This is useful for tasks that need to be executed immediately and whose results are needed right away.

        :param task: the task to execute
        :return: the result of the task execution
        """
        future = self.issue_task(task)
        return future.result()

    def is_using_language_server(self) -> bool:
        """
        :return: whether this agent uses language server-based code analysis
        """
        return not self.serena_config.jetbrains

    def _activate_project(self, project: Project) -> None:
        log.info(f"Activating {project.project_name} at {project.project_root}")
        self._active_project = project
        self._update_active_tools()

        # initialize project-specific instances which do not depend on the language server
        self.memories_manager = MemoriesManager(project.project_root)

        def init_language_server() -> None:
            # start the language server
            with LogTime("Language server initialization", logger=log):
                self.reset_language_server()
                assert self.language_server is not None

        # initialize the language server in the background (if in language server mode)
        if self.is_using_language_server():
            self.issue_task(init_language_server)

        if self._project_activation_callback is not None:
            self._project_activation_callback()

    def load_project_from_path_or_name(self, project_root_or_name: str, autogenerate: bool) -> Project | None:
        """
        Get a project instance from a path or a name.

        :param project_root_or_name: the path to the project root or the name of the project
        :param autogenerate: whether to autogenerate the project for the case where first argument is a directory
            which does not yet contain a Serena project configuration file
        :return: the project instance if it was found/could be created, None otherwise
        """
        project_instance: Project | None = self.serena_config.get_project(project_root_or_name)
        if project_instance is not None:
            log.info(f"Found registered project '{project_instance.project_name}' at path {project_instance.project_root}")
        elif autogenerate and os.path.isdir(project_root_or_name):
            project_instance = self.serena_config.add_project_from_path(project_root_or_name)
            log.info(f"Added new project {project_instance.project_name} for path {project_instance.project_root}")
        return project_instance

    def activate_project_from_path_or_name(self, project_root_or_name: str) -> Project:
        """
        Activate a project from a path or a name.
        If the project was already registered, it will just be activated.
        If the argument is a path at which no Serena project previously existed, the project will be created beforehand.
        Raises ProjectNotFoundError if the project could neither be found nor created.

        :return: a tuple of the project instance and a Boolean indicating whether the project was newly
            created
        """
        project_instance: Project | None = self.load_project_from_path_or_name(project_root_or_name, autogenerate=True)
        if project_instance is None:
            raise ProjectNotFoundError(
                f"Project '{project_root_or_name}' not found: Not a valid project name or directory. "
                f"Existing project names: {self.serena_config.project_names}"
            )
        self._activate_project(project_instance)
        return project_instance

    def get_active_tool_classes(self) -> list[type["Tool"]]:
        """
        :return: the list of active tool classes for the current project
        """
        return list(self._active_tools.keys())

    def get_active_tool_names(self) -> list[str]:
        """
        :return: the list of names of the active tools for the current project
        """
        return sorted([tool.get_name_from_cls() for tool in self.get_active_tool_classes()])

    def tool_is_active(self, tool_class: type["Tool"] | str) -> bool:
        """
        :param tool_class: the class or name of the tool to check
        :return: True if the tool is active, False otherwise
        """
        if isinstance(tool_class, str):
            return tool_class in self.get_active_tool_names()
        else:
            return tool_class in self.get_active_tool_classes()

    def get_current_config_overview(self) -> str:
        """
        :return: a string overview of the current configuration, including the active and available configuration options
        """
        result_str = "Current configuration:\n"
        result_str += f"Serena version: {serena_version()}\n"
        result_str += f"Loglevel: {self.serena_config.log_level}, trace_lsp_communication={self.serena_config.trace_lsp_communication}\n"
        if self._active_project is not None:
            result_str += f"Active project: {self._active_project.project_name}\n"
        else:
            result_str += "No active project\n"
        result_str += "Available projects:\n" + "\n".join(list(self.serena_config.project_names)) + "\n"
        result_str += f"Active context: {self._context.name}\n"

        # Active modes
        active_mode_names = [mode.name for mode in self.get_active_modes()]
        result_str += "Active modes: {}\n".format(", ".join(active_mode_names)) + "\n"

        # Available but not active modes
        all_available_modes = SerenaAgentMode.list_registered_mode_names()
        inactive_modes = [mode for mode in all_available_modes if mode not in active_mode_names]
        if inactive_modes:
            result_str += "Available but not active modes: {}\n".format(", ".join(inactive_modes)) + "\n"

        # Active tools
        result_str += "Active tools (after all exclusions from the project, context, and modes):\n"
        active_tool_names = self.get_active_tool_names()
        # print the tool names in chunks
        chunk_size = 4
        for i in range(0, len(active_tool_names), chunk_size):
            chunk = active_tool_names[i : i + chunk_size]
            result_str += "  " + ", ".join(chunk) + "\n"

        # Available but not active tools
        all_tool_names = sorted([tool.get_name_from_cls() for tool in self._all_tools.values()])
        inactive_tool_names = [tool for tool in all_tool_names if tool not in active_tool_names]
        if inactive_tool_names:
            result_str += "Available but not active tools:\n"
            for i in range(0, len(inactive_tool_names), chunk_size):
                chunk = inactive_tool_names[i : i + chunk_size]
                result_str += "  " + ", ".join(chunk) + "\n"

        return result_str

    def is_language_server_running(self) -> bool:
        return self.language_server is not None and self.language_server.is_running()

    def reset_language_server(self) -> None:
        """
        Starts/resets the language server for the current project
        """
        tool_timeout = self.serena_config.tool_timeout
        if tool_timeout is None or tool_timeout < 0:
            ls_timeout = None
        else:
            if tool_timeout < 10:
                raise ValueError(f"Tool timeout must be at least 10 seconds, but is {tool_timeout} seconds")
            ls_timeout = tool_timeout - 5  # the LS timeout is for a single call, it should be smaller than the tool timeout

        # stop the language server if it is running
        if self.is_language_server_running():
            assert self.language_server is not None
            log.info(f"Stopping the current language server at {self.language_server.repository_root_path} ...")
            self.language_server.stop()
            self.language_server = None

        # instantiate and start the language server
        assert self._active_project is not None
        self.language_server = self._active_project.create_language_server(
            log_level=self.serena_config.log_level,
            ls_timeout=ls_timeout,
            trace_lsp_communication=self.serena_config.trace_lsp_communication,
            ls_specific_settings=self.serena_config.ls_specific_settings,
        )
        log.info(f"Starting the language server for {self._active_project.project_name}")
        self.language_server.start()
        if not self.language_server.is_running():
            raise RuntimeError(
                f"Failed to start the language server for {self._active_project.project_name} at {self._active_project.project_root}"
            )

    def get_tool(self, tool_class: type[TTool]) -> TTool:
        return self._all_tools[tool_class]  # type: ignore

    def print_tool_overview(self) -> None:
        ToolRegistry().print_tool_overview(self._active_tools.values())

    def __del__(self) -> None:
        """
        Destructor to clean up the language server instance and GUI logger
        """
        if not hasattr(self, "_is_initialized"):
            return
        log.info("SerenaAgent is shutting down ...")
        if self.is_language_server_running():
            log.info("Stopping the language server ...")
            assert self.language_server is not None
            self.language_server.save_cache()
            self.language_server.stop()
        if self._gui_log_viewer:
            log.info("Stopping the GUI log window ...")
            self._gui_log_viewer.stop()

    def get_tool_by_name(self, tool_name: str) -> Tool:
        tool_class = ToolRegistry().get_tool_class_by_name(tool_name)
        return self.get_tool(tool_class)

```

--------------------------------------------------------------------------------
/src/solidlsp/lsp_protocol_handler/lsp_requests.py:
--------------------------------------------------------------------------------

```python
# Code generated. DO NOT EDIT.
# LSP v3.17.0
# TODO: Look into use of https://pypi.org/project/ts2python/ to generate the types for https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/

"""
This file provides the python interface corresponding to the requests and notifications defined in Typescript in the language server protocol.
This file is obtained from https://github.com/predragnikolic/OLSP under the MIT License with the following terms:

MIT License

Copyright (c) 2023 Предраг Николић

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

from typing import Union

from solidlsp.lsp_protocol_handler import lsp_types


class LspRequest:
    def __init__(self, send_request):
        self.send_request = send_request

    async def implementation(
        self, params: lsp_types.ImplementationParams
    ) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
        """A request to resolve the implementation locations of a symbol at a given text
        document position. The request's parameter is of type [TextDocumentPositionParams]
        (#TextDocumentPositionParams) the response is of type {@link Definition} or a
        Thenable that resolves to such.
        """
        return await self.send_request("textDocument/implementation", params)

    async def type_definition(
        self, params: lsp_types.TypeDefinitionParams
    ) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
        """A request to resolve the type definition locations of a symbol at a given text
        document position. The request's parameter is of type [TextDocumentPositionParams]
        (#TextDocumentPositionParams) the response is of type {@link Definition} or a
        Thenable that resolves to such.
        """
        return await self.send_request("textDocument/typeDefinition", params)

    async def document_color(self, params: lsp_types.DocumentColorParams) -> list["lsp_types.ColorInformation"]:
        """A request to list all color symbols found in a given text document. The request's
        parameter is of type {@link DocumentColorParams} the
        response is of type {@link ColorInformation ColorInformation[]} or a Thenable
        that resolves to such.
        """
        return await self.send_request("textDocument/documentColor", params)

    async def color_presentation(self, params: lsp_types.ColorPresentationParams) -> list["lsp_types.ColorPresentation"]:
        """A request to list all presentation for a color. The request's
        parameter is of type {@link ColorPresentationParams} the
        response is of type {@link ColorInformation ColorInformation[]} or a Thenable
        that resolves to such.
        """
        return await self.send_request("textDocument/colorPresentation", params)

    async def folding_range(self, params: lsp_types.FoldingRangeParams) -> list["lsp_types.FoldingRange"] | None:
        """A request to provide folding ranges in a document. The request's
        parameter is of type {@link FoldingRangeParams}, the
        response is of type {@link FoldingRangeList} or a Thenable
        that resolves to such.
        """
        return await self.send_request("textDocument/foldingRange", params)

    async def declaration(
        self, params: lsp_types.DeclarationParams
    ) -> Union["lsp_types.Declaration", list["lsp_types.LocationLink"], None]:
        """A request to resolve the type definition locations of a symbol at a given text
        document position. The request's parameter is of type [TextDocumentPositionParams]
        (#TextDocumentPositionParams) the response is of type {@link Declaration}
        or a typed array of {@link DeclarationLink} or a Thenable that resolves
        to such.
        """
        return await self.send_request("textDocument/declaration", params)

    async def selection_range(self, params: lsp_types.SelectionRangeParams) -> list["lsp_types.SelectionRange"] | None:
        """A request to provide selection ranges in a document. The request's
        parameter is of type {@link SelectionRangeParams}, the
        response is of type {@link SelectionRange SelectionRange[]} or a Thenable
        that resolves to such.
        """
        return await self.send_request("textDocument/selectionRange", params)

    async def prepare_call_hierarchy(self, params: lsp_types.CallHierarchyPrepareParams) -> list["lsp_types.CallHierarchyItem"] | None:
        """A request to result a `CallHierarchyItem` in a document at a given position.
        Can be used as an input to an incoming or outgoing call hierarchy.

        @since 3.16.0
        """
        return await self.send_request("textDocument/prepareCallHierarchy", params)

    async def incoming_calls(
        self, params: lsp_types.CallHierarchyIncomingCallsParams
    ) -> list["lsp_types.CallHierarchyIncomingCall"] | None:
        """A request to resolve the incoming calls for a given `CallHierarchyItem`.

        @since 3.16.0
        """
        return await self.send_request("callHierarchy/incomingCalls", params)

    async def outgoing_calls(
        self, params: lsp_types.CallHierarchyOutgoingCallsParams
    ) -> list["lsp_types.CallHierarchyOutgoingCall"] | None:
        """A request to resolve the outgoing calls for a given `CallHierarchyItem`.

        @since 3.16.0
        """
        return await self.send_request("callHierarchy/outgoingCalls", params)

    async def semantic_tokens_full(self, params: lsp_types.SemanticTokensParams) -> Union["lsp_types.SemanticTokens", None]:
        """@since 3.16.0"""
        return await self.send_request("textDocument/semanticTokens/full", params)

    async def semantic_tokens_delta(
        self, params: lsp_types.SemanticTokensDeltaParams
    ) -> Union["lsp_types.SemanticTokens", "lsp_types.SemanticTokensDelta", None]:
        """@since 3.16.0"""
        return await self.send_request("textDocument/semanticTokens/full/delta", params)

    async def semantic_tokens_range(self, params: lsp_types.SemanticTokensRangeParams) -> Union["lsp_types.SemanticTokens", None]:
        """@since 3.16.0"""
        return await self.send_request("textDocument/semanticTokens/range", params)

    async def linked_editing_range(self, params: lsp_types.LinkedEditingRangeParams) -> Union["lsp_types.LinkedEditingRanges", None]:
        """A request to provide ranges that can be edited together.

        @since 3.16.0
        """
        return await self.send_request("textDocument/linkedEditingRange", params)

    async def will_create_files(self, params: lsp_types.CreateFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
        """The will create files request is sent from the client to the server before files are actually
        created as long as the creation is triggered from within the client.

        @since 3.16.0
        """
        return await self.send_request("workspace/willCreateFiles", params)

    async def will_rename_files(self, params: lsp_types.RenameFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
        """The will rename files request is sent from the client to the server before files are actually
        renamed as long as the rename is triggered from within the client.

        @since 3.16.0
        """
        return await self.send_request("workspace/willRenameFiles", params)

    async def will_delete_files(self, params: lsp_types.DeleteFilesParams) -> Union["lsp_types.WorkspaceEdit", None]:
        """The did delete files notification is sent from the client to the server when
        files were deleted from within the client.

        @since 3.16.0
        """
        return await self.send_request("workspace/willDeleteFiles", params)

    async def moniker(self, params: lsp_types.MonikerParams) -> list["lsp_types.Moniker"] | None:
        """A request to get the moniker of a symbol at a given text document position.
        The request parameter is of type {@link TextDocumentPositionParams}.
        The response is of type {@link Moniker Moniker[]} or `null`.
        """
        return await self.send_request("textDocument/moniker", params)

    async def prepare_type_hierarchy(self, params: lsp_types.TypeHierarchyPrepareParams) -> list["lsp_types.TypeHierarchyItem"] | None:
        """A request to result a `TypeHierarchyItem` in a document at a given position.
        Can be used as an input to a subtypes or supertypes type hierarchy.

        @since 3.17.0
        """
        return await self.send_request("textDocument/prepareTypeHierarchy", params)

    async def type_hierarchy_supertypes(
        self, params: lsp_types.TypeHierarchySupertypesParams
    ) -> list["lsp_types.TypeHierarchyItem"] | None:
        """A request to resolve the supertypes for a given `TypeHierarchyItem`.

        @since 3.17.0
        """
        return await self.send_request("typeHierarchy/supertypes", params)

    async def type_hierarchy_subtypes(self, params: lsp_types.TypeHierarchySubtypesParams) -> list["lsp_types.TypeHierarchyItem"] | None:
        """A request to resolve the subtypes for a given `TypeHierarchyItem`.

        @since 3.17.0
        """
        return await self.send_request("typeHierarchy/subtypes", params)

    async def inline_value(self, params: lsp_types.InlineValueParams) -> list["lsp_types.InlineValue"] | None:
        """A request to provide inline values in a document. The request's parameter is of
        type {@link InlineValueParams}, the response is of type
        {@link InlineValue InlineValue[]} or a Thenable that resolves to such.

        @since 3.17.0
        """
        return await self.send_request("textDocument/inlineValue", params)

    async def inlay_hint(self, params: lsp_types.InlayHintParams) -> list["lsp_types.InlayHint"] | None:
        """A request to provide inlay hints in a document. The request's parameter is of
        type {@link InlayHintsParams}, the response is of type
        {@link InlayHint InlayHint[]} or a Thenable that resolves to such.

        @since 3.17.0
        """
        return await self.send_request("textDocument/inlayHint", params)

    async def resolve_inlay_hint(self, params: lsp_types.InlayHint) -> "lsp_types.InlayHint":
        """A request to resolve additional properties for an inlay hint.
        The request's parameter is of type {@link InlayHint}, the response is
        of type {@link InlayHint} or a Thenable that resolves to such.

        @since 3.17.0
        """
        return await self.send_request("inlayHint/resolve", params)

    async def text_document_diagnostic(self, params: lsp_types.DocumentDiagnosticParams) -> "lsp_types.DocumentDiagnosticReport":
        """The document diagnostic request definition.

        @since 3.17.0
        """
        return await self.send_request("textDocument/diagnostic", params)

    async def workspace_diagnostic(self, params: lsp_types.WorkspaceDiagnosticParams) -> "lsp_types.WorkspaceDiagnosticReport":
        """The workspace diagnostic request definition.

        @since 3.17.0
        """
        return await self.send_request("workspace/diagnostic", params)

    async def initialize(self, params: lsp_types.InitializeParams) -> "lsp_types.InitializeResult":
        """The initialize request is sent from the client to the server.
        It is sent once as the request after starting up the server.
        The requests parameter is of type {@link InitializeParams}
        the response if of type {@link InitializeResult} of a Thenable that
        resolves to such.
        """
        return await self.send_request("initialize", params)

    async def shutdown(self) -> None:
        """A shutdown request is sent from the client to the server.
        It is sent once when the client decides to shutdown the
        server. The only notification that is sent after a shutdown request
        is the exit event.
        """
        return await self.send_request("shutdown")

    async def will_save_wait_until(self, params: lsp_types.WillSaveTextDocumentParams) -> list["lsp_types.TextEdit"] | None:
        """A document will save request is sent from the client to the server before
        the document is actually saved. The request can return an array of TextEdits
        which will be applied to the text document before it is saved. Please note that
        clients might drop results if computing the text edits took too long or if a
        server constantly fails on this request. This is done to keep the save fast and
        reliable.
        """
        return await self.send_request("textDocument/willSaveWaitUntil", params)

    async def completion(
        self, params: lsp_types.CompletionParams
    ) -> Union[list["lsp_types.CompletionItem"], "lsp_types.CompletionList", None]:
        """Request to request completion at a given text document position. The request's
        parameter is of type {@link TextDocumentPosition} the response
        is of type {@link CompletionItem CompletionItem[]} or {@link CompletionList}
        or a Thenable that resolves to such.

        The request can delay the computation of the {@link CompletionItem.detail `detail`}
        and {@link CompletionItem.documentation `documentation`} properties to the `completionItem/resolve`
        request. However, properties that are needed for the initial sorting and filtering, like `sortText`,
        `filterText`, `insertText`, and `textEdit`, must not be changed during resolve.
        """
        return await self.send_request("textDocument/completion", params)

    async def resolve_completion_item(self, params: lsp_types.CompletionItem) -> "lsp_types.CompletionItem":
        """Request to resolve additional information for a given completion item.The request's
        parameter is of type {@link CompletionItem} the response
        is of type {@link CompletionItem} or a Thenable that resolves to such.
        """
        return await self.send_request("completionItem/resolve", params)

    async def hover(self, params: lsp_types.HoverParams) -> Union["lsp_types.Hover", None]:
        """Request to request hover information at a given text document position. The request's
        parameter is of type {@link TextDocumentPosition} the response is of
        type {@link Hover} or a Thenable that resolves to such.
        """
        return await self.send_request("textDocument/hover", params)

    async def signature_help(self, params: lsp_types.SignatureHelpParams) -> Union["lsp_types.SignatureHelp", None]:
        return await self.send_request("textDocument/signatureHelp", params)

    async def definition(self, params: lsp_types.DefinitionParams) -> Union["lsp_types.Definition", list["lsp_types.LocationLink"], None]:
        """A request to resolve the definition location of a symbol at a given text
        document position. The request's parameter is of type [TextDocumentPosition]
        (#TextDocumentPosition) the response is of either type {@link Definition}
        or a typed array of {@link DefinitionLink} or a Thenable that resolves
        to such.
        """
        return await self.send_request("textDocument/definition", params)

    async def references(self, params: lsp_types.ReferenceParams) -> list["lsp_types.Location"] | None:
        """A request to resolve project-wide references for the symbol denoted
        by the given text document position. The request's parameter is of
        type {@link ReferenceParams} the response is of type
        {@link Location Location[]} or a Thenable that resolves to such.
        """
        return await self.send_request("textDocument/references", params)

    async def document_highlight(self, params: lsp_types.DocumentHighlightParams) -> list["lsp_types.DocumentHighlight"] | None:
        """Request to resolve a {@link DocumentHighlight} for a given
        text document position. The request's parameter is of type [TextDocumentPosition]
        (#TextDocumentPosition) the request response is of type [DocumentHighlight[]]
        (#DocumentHighlight) or a Thenable that resolves to such.
        """
        return await self.send_request("textDocument/documentHighlight", params)

    async def document_symbol(
        self, params: lsp_types.DocumentSymbolParams
    ) -> list["lsp_types.SymbolInformation"] | list["lsp_types.DocumentSymbol"] | None:
        """A request to list all symbols found in a given text document. The request's
        parameter is of type {@link TextDocumentIdentifier} the
        response is of type {@link SymbolInformation SymbolInformation[]} or a Thenable
        that resolves to such.
        """
        return await self.send_request("textDocument/documentSymbol", params)

    async def code_action(self, params: lsp_types.CodeActionParams) -> list[Union["lsp_types.Command", "lsp_types.CodeAction"]] | None:
        """A request to provide commands for the given text document and range."""
        return await self.send_request("textDocument/codeAction", params)

    async def resolve_code_action(self, params: lsp_types.CodeAction) -> "lsp_types.CodeAction":
        """Request to resolve additional information for a given code action.The request's
        parameter is of type {@link CodeAction} the response
        is of type {@link CodeAction} or a Thenable that resolves to such.
        """
        return await self.send_request("codeAction/resolve", params)

    async def workspace_symbol(
        self, params: lsp_types.WorkspaceSymbolParams
    ) -> list["lsp_types.SymbolInformation"] | list["lsp_types.WorkspaceSymbol"] | None:
        """A request to list project-wide symbols matching the query string given
        by the {@link WorkspaceSymbolParams}. The response is
        of type {@link SymbolInformation SymbolInformation[]} or a Thenable that
        resolves to such.

        @since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients
         need to advertise support for WorkspaceSymbols via the client capability
         `workspace.symbol.resolveSupport`.
        """
        return await self.send_request("workspace/symbol", params)

    async def resolve_workspace_symbol(self, params: lsp_types.WorkspaceSymbol) -> "lsp_types.WorkspaceSymbol":
        """A request to resolve the range inside the workspace
        symbol's location.

        @since 3.17.0
        """
        return await self.send_request("workspaceSymbol/resolve", params)

    async def code_lens(self, params: lsp_types.CodeLensParams) -> list["lsp_types.CodeLens"] | None:
        """A request to provide code lens for the given text document."""
        return await self.send_request("textDocument/codeLens", params)

    async def resolve_code_lens(self, params: lsp_types.CodeLens) -> "lsp_types.CodeLens":
        """A request to resolve a command for a given code lens."""
        return await self.send_request("codeLens/resolve", params)

    async def document_link(self, params: lsp_types.DocumentLinkParams) -> list["lsp_types.DocumentLink"] | None:
        """A request to provide document links"""
        return await self.send_request("textDocument/documentLink", params)

    async def resolve_document_link(self, params: lsp_types.DocumentLink) -> "lsp_types.DocumentLink":
        """Request to resolve additional information for a given document link. The request's
        parameter is of type {@link DocumentLink} the response
        is of type {@link DocumentLink} or a Thenable that resolves to such.
        """
        return await self.send_request("documentLink/resolve", params)

    async def formatting(self, params: lsp_types.DocumentFormattingParams) -> list["lsp_types.TextEdit"] | None:
        """A request to to format a whole document."""
        return await self.send_request("textDocument/formatting", params)

    async def range_formatting(self, params: lsp_types.DocumentRangeFormattingParams) -> list["lsp_types.TextEdit"] | None:
        """A request to to format a range in a document."""
        return await self.send_request("textDocument/rangeFormatting", params)

    async def on_type_formatting(self, params: lsp_types.DocumentOnTypeFormattingParams) -> list["lsp_types.TextEdit"] | None:
        """A request to format a document on type."""
        return await self.send_request("textDocument/onTypeFormatting", params)

    async def rename(self, params: lsp_types.RenameParams) -> Union["lsp_types.WorkspaceEdit", None]:
        """A request to rename a symbol."""
        return await self.send_request("textDocument/rename", params)

    async def prepare_rename(self, params: lsp_types.PrepareRenameParams) -> Union["lsp_types.PrepareRenameResult", None]:
        """A request to test and perform the setup necessary for a rename.

        @since 3.16 - support for default behavior
        """
        return await self.send_request("textDocument/prepareRename", params)

    async def execute_command(self, params: lsp_types.ExecuteCommandParams) -> Union["lsp_types.LSPAny", None]:
        """A request send from the client to the server to execute a command. The request might return
        a workspace edit which the client will apply to the workspace.
        """
        return await self.send_request("workspace/executeCommand", params)


class LspNotification:
    def __init__(self, send_notification):
        self.send_notification = send_notification

    def did_change_workspace_folders(self, params: lsp_types.DidChangeWorkspaceFoldersParams) -> None:
        """The `workspace/didChangeWorkspaceFolders` notification is sent from the client to the server when the workspace
        folder configuration changes.
        """
        return self.send_notification("workspace/didChangeWorkspaceFolders", params)

    def cancel_work_done_progress(self, params: lsp_types.WorkDoneProgressCancelParams) -> None:
        """The `window/workDoneProgress/cancel` notification is sent from  the client to the server to cancel a progress
        initiated on the server side.
        """
        return self.send_notification("window/workDoneProgress/cancel", params)

    def did_create_files(self, params: lsp_types.CreateFilesParams) -> None:
        """The did create files notification is sent from the client to the server when
        files were created from within the client.

        @since 3.16.0
        """
        return self.send_notification("workspace/didCreateFiles", params)

    def did_rename_files(self, params: lsp_types.RenameFilesParams) -> None:
        """The did rename files notification is sent from the client to the server when
        files were renamed from within the client.

        @since 3.16.0
        """
        return self.send_notification("workspace/didRenameFiles", params)

    def did_delete_files(self, params: lsp_types.DeleteFilesParams) -> None:
        """The will delete files request is sent from the client to the server before files are actually
        deleted as long as the deletion is triggered from within the client.

        @since 3.16.0
        """
        return self.send_notification("workspace/didDeleteFiles", params)

    def did_open_notebook_document(self, params: lsp_types.DidOpenNotebookDocumentParams) -> None:
        """A notification sent when a notebook opens.

        @since 3.17.0
        """
        return self.send_notification("notebookDocument/didOpen", params)

    def did_change_notebook_document(self, params: lsp_types.DidChangeNotebookDocumentParams) -> None:
        return self.send_notification("notebookDocument/didChange", params)

    def did_save_notebook_document(self, params: lsp_types.DidSaveNotebookDocumentParams) -> None:
        """A notification sent when a notebook document is saved.

        @since 3.17.0
        """
        return self.send_notification("notebookDocument/didSave", params)

    def did_close_notebook_document(self, params: lsp_types.DidCloseNotebookDocumentParams) -> None:
        """A notification sent when a notebook closes.

        @since 3.17.0
        """
        return self.send_notification("notebookDocument/didClose", params)

    def initialized(self, params: lsp_types.InitializedParams) -> None:
        """The initialized notification is sent from the client to the
        server after the client is fully initialized and the server
        is allowed to send requests from the server to the client.
        """
        return self.send_notification("initialized", params)

    def exit(self) -> None:
        """The exit event is sent from the client to the server to
        ask the server to exit its process.
        """
        return self.send_notification("exit")

    def workspace_did_change_configuration(self, params: lsp_types.DidChangeConfigurationParams) -> None:
        """The configuration change notification is sent from the client to the server
        when the client's configuration has changed. The notification contains
        the changed configuration as defined by the language client.
        """
        return self.send_notification("workspace/didChangeConfiguration", params)

    def did_open_text_document(self, params: lsp_types.DidOpenTextDocumentParams) -> None:
        """The document open notification is sent from the client to the server to signal
        newly opened text documents. The document's truth is now managed by the client
        and the server must not try to read the document's truth using the document's
        uri. Open in this sense means it is managed by the client. It doesn't necessarily
        mean that its content is presented in an editor. An open notification must not
        be sent more than once without a corresponding close notification send before.
        This means open and close notification must be balanced and the max open count
        is one.
        """
        return self.send_notification("textDocument/didOpen", params)

    def did_change_text_document(self, params: lsp_types.DidChangeTextDocumentParams) -> None:
        """The document change notification is sent from the client to the server to signal
        changes to a text document.
        """
        return self.send_notification("textDocument/didChange", params)

    def did_close_text_document(self, params: lsp_types.DidCloseTextDocumentParams) -> None:
        """The document close notification is sent from the client to the server when
        the document got closed in the client. The document's truth now exists where
        the document's uri points to (e.g. if the document's uri is a file uri the
        truth now exists on disk). As with the open notification the close notification
        is about managing the document's content. Receiving a close notification
        doesn't mean that the document was open in an editor before. A close
        notification requires a previous open notification to be sent.
        """
        return self.send_notification("textDocument/didClose", params)

    def did_save_text_document(self, params: lsp_types.DidSaveTextDocumentParams) -> None:
        """The document save notification is sent from the client to the server when
        the document got saved in the client.
        """
        return self.send_notification("textDocument/didSave", params)

    def will_save_text_document(self, params: lsp_types.WillSaveTextDocumentParams) -> None:
        """A document will save notification is sent from the client to the server before
        the document is actually saved.
        """
        return self.send_notification("textDocument/willSave", params)

    def did_change_watched_files(self, params: lsp_types.DidChangeWatchedFilesParams) -> None:
        """The watched files notification is sent from the client to the server when
        the client detects changes to file watched by the language client.
        """
        return self.send_notification("workspace/didChangeWatchedFiles", params)

    def set_trace(self, params: lsp_types.SetTraceParams) -> None:
        return self.send_notification("$/setTrace", params)

    def cancel_request(self, params: lsp_types.CancelParams) -> None:
        return self.send_notification("$/cancelRequest", params)

    def progress(self, params: lsp_types.ProgressParams) -> None:
        return self.send_notification("$/progress", params)

```

--------------------------------------------------------------------------------
/src/solidlsp/language_servers/rust_analyzer.py:
--------------------------------------------------------------------------------

```python
"""
Provides Rust specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Rust.
"""

import logging
import os
import pathlib
import shutil
import subprocess
import threading

from overrides import override

from solidlsp.ls import SolidLanguageServer
from solidlsp.ls_config import LanguageServerConfig
from solidlsp.ls_logger import LanguageServerLogger
from solidlsp.lsp_protocol_handler.lsp_types import InitializeParams
from solidlsp.lsp_protocol_handler.server import ProcessLaunchInfo
from solidlsp.settings import SolidLSPSettings


class RustAnalyzer(SolidLanguageServer):
    """
    Provides Rust specific instantiation of the LanguageServer class. Contains various configurations and settings specific to Rust.
    """

    @staticmethod
    def _get_rustup_version():
        """Get installed rustup version or None if not found."""
        try:
            result = subprocess.run(["rustup", "--version"], capture_output=True, text=True, check=False)
            if result.returncode == 0:
                return result.stdout.strip()
        except FileNotFoundError:
            return None
        return None

    @staticmethod
    def _get_rust_analyzer_path():
        """Get rust-analyzer path via rustup or system PATH."""
        # First try rustup
        try:
            result = subprocess.run(["rustup", "which", "rust-analyzer"], capture_output=True, text=True, check=False)
            if result.returncode == 0:
                return result.stdout.strip()
        except FileNotFoundError:
            pass

        # Fallback to system PATH
        return shutil.which("rust-analyzer")

    @staticmethod
    def _ensure_rust_analyzer_installed():
        """Ensure rust-analyzer is available, install via rustup if needed."""
        path = RustAnalyzer._get_rust_analyzer_path()
        if path:
            return path

        # Check if rustup is available
        if not RustAnalyzer._get_rustup_version():
            raise RuntimeError(
                "Neither rust-analyzer nor rustup is installed.\n"
                "Please install Rust via https://rustup.rs/ or install rust-analyzer separately."
            )

        # Try to install rust-analyzer component
        result = subprocess.run(["rustup", "component", "add", "rust-analyzer"], check=False, capture_output=True, text=True)
        if result.returncode != 0:
            raise RuntimeError(f"Failed to install rust-analyzer via rustup: {result.stderr}")

        # Try again after installation
        path = RustAnalyzer._get_rust_analyzer_path()
        if not path:
            raise RuntimeError("rust-analyzer installation succeeded but binary not found in PATH")

        return path

    def __init__(
        self, config: LanguageServerConfig, logger: LanguageServerLogger, repository_root_path: str, solidlsp_settings: SolidLSPSettings
    ):
        """
        Creates a RustAnalyzer instance. This class is not meant to be instantiated directly. Use LanguageServer.create() instead.
        """
        rustanalyzer_executable_path = self._ensure_rust_analyzer_installed()
        logger.log(f"Using rust-analyzer at: {rustanalyzer_executable_path}", logging.INFO)

        super().__init__(
            config,
            logger,
            repository_root_path,
            ProcessLaunchInfo(cmd=rustanalyzer_executable_path, cwd=repository_root_path),
            "rust",
            solidlsp_settings,
        )
        self.server_ready = threading.Event()
        self.service_ready_event = threading.Event()
        self.initialize_searcher_command_available = threading.Event()
        self.resolve_main_method_available = threading.Event()

    @override
    def is_ignored_dirname(self, dirname: str) -> bool:
        return super().is_ignored_dirname(dirname) or dirname in ["target"]

    @staticmethod
    def _get_initialize_params(repository_absolute_path: str) -> InitializeParams:
        """
        Returns the initialize params for the Rust Analyzer Language Server.
        """
        root_uri = pathlib.Path(repository_absolute_path).as_uri()
        initialize_params = {
            "clientInfo": {"name": "Visual Studio Code - Insiders", "version": "1.82.0-insider"},
            "locale": "en",
            "capabilities": {
                "workspace": {
                    "applyEdit": True,
                    "workspaceEdit": {
                        "documentChanges": True,
                        "resourceOperations": ["create", "rename", "delete"],
                        "failureHandling": "textOnlyTransactional",
                        "normalizesLineEndings": True,
                        "changeAnnotationSupport": {"groupsOnLabel": True},
                    },
                    "configuration": True,
                    "didChangeWatchedFiles": {"dynamicRegistration": True, "relativePatternSupport": True},
                    "symbol": {
                        "dynamicRegistration": True,
                        "symbolKind": {"valueSet": list(range(1, 27))},
                        "tagSupport": {"valueSet": [1]},
                        "resolveSupport": {"properties": ["location.range"]},
                    },
                    "codeLens": {"refreshSupport": True},
                    "executeCommand": {"dynamicRegistration": True},
                    "didChangeConfiguration": {"dynamicRegistration": True},
                    "workspaceFolders": True,
                    "semanticTokens": {"refreshSupport": True},
                    "fileOperations": {
                        "dynamicRegistration": True,
                        "didCreate": True,
                        "didRename": True,
                        "didDelete": True,
                        "willCreate": True,
                        "willRename": True,
                        "willDelete": True,
                    },
                    "inlineValue": {"refreshSupport": True},
                    "inlayHint": {"refreshSupport": True},
                    "diagnostics": {"refreshSupport": True},
                },
                "textDocument": {
                    "publishDiagnostics": {
                        "relatedInformation": True,
                        "versionSupport": False,
                        "tagSupport": {"valueSet": [1, 2]},
                        "codeDescriptionSupport": True,
                        "dataSupport": True,
                    },
                    "synchronization": {"dynamicRegistration": True, "willSave": True, "willSaveWaitUntil": True, "didSave": True},
                    "completion": {
                        "dynamicRegistration": True,
                        "contextSupport": True,
                        "completionItem": {
                            "snippetSupport": True,
                            "commitCharactersSupport": True,
                            "documentationFormat": ["markdown", "plaintext"],
                            "deprecatedSupport": True,
                            "preselectSupport": True,
                            "tagSupport": {"valueSet": [1]},
                            "insertReplaceSupport": True,
                            "resolveSupport": {"properties": ["documentation", "detail", "additionalTextEdits"]},
                            "insertTextModeSupport": {"valueSet": [1, 2]},
                            "labelDetailsSupport": True,
                        },
                        "insertTextMode": 2,
                        "completionItemKind": {
                            "valueSet": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]
                        },
                        "completionList": {"itemDefaults": ["commitCharacters", "editRange", "insertTextFormat", "insertTextMode"]},
                    },
                    "hover": {"dynamicRegistration": True, "contentFormat": ["markdown", "plaintext"]},
                    "signatureHelp": {
                        "dynamicRegistration": True,
                        "signatureInformation": {
                            "documentationFormat": ["markdown", "plaintext"],
                            "parameterInformation": {"labelOffsetSupport": True},
                            "activeParameterSupport": True,
                        },
                        "contextSupport": True,
                    },
                    "definition": {"dynamicRegistration": True, "linkSupport": True},
                    "references": {"dynamicRegistration": True},
                    "documentHighlight": {"dynamicRegistration": True},
                    "documentSymbol": {
                        "dynamicRegistration": True,
                        "symbolKind": {"valueSet": list(range(1, 27))},
                        "hierarchicalDocumentSymbolSupport": True,
                        "tagSupport": {"valueSet": [1]},
                        "labelSupport": True,
                    },
                    "codeAction": {
                        "dynamicRegistration": True,
                        "isPreferredSupport": True,
                        "disabledSupport": True,
                        "dataSupport": True,
                        "resolveSupport": {"properties": ["edit"]},
                        "codeActionLiteralSupport": {
                            "codeActionKind": {
                                "valueSet": [
                                    "",
                                    "quickfix",
                                    "refactor",
                                    "refactor.extract",
                                    "refactor.inline",
                                    "refactor.rewrite",
                                    "source",
                                    "source.organizeImports",
                                ]
                            }
                        },
                        "honorsChangeAnnotations": False,
                    },
                    "codeLens": {"dynamicRegistration": True},
                    "formatting": {"dynamicRegistration": True},
                    "rangeFormatting": {"dynamicRegistration": True},
                    "onTypeFormatting": {"dynamicRegistration": True},
                    "rename": {
                        "dynamicRegistration": True,
                        "prepareSupport": True,
                        "prepareSupportDefaultBehavior": 1,
                        "honorsChangeAnnotations": True,
                    },
                    "documentLink": {"dynamicRegistration": True, "tooltipSupport": True},
                    "typeDefinition": {"dynamicRegistration": True, "linkSupport": True},
                    "implementation": {"dynamicRegistration": True, "linkSupport": True},
                    "colorProvider": {"dynamicRegistration": True},
                    "foldingRange": {
                        "dynamicRegistration": True,
                        "rangeLimit": 5000,
                        "lineFoldingOnly": True,
                        "foldingRangeKind": {"valueSet": ["comment", "imports", "region"]},
                        "foldingRange": {"collapsedText": False},
                    },
                    "declaration": {"dynamicRegistration": True, "linkSupport": True},
                    "selectionRange": {"dynamicRegistration": True},
                    "callHierarchy": {"dynamicRegistration": True},
                    "semanticTokens": {
                        "dynamicRegistration": True,
                        "tokenTypes": [
                            "namespace",
                            "type",
                            "class",
                            "enum",
                            "interface",
                            "struct",
                            "typeParameter",
                            "parameter",
                            "variable",
                            "property",
                            "enumMember",
                            "event",
                            "function",
                            "method",
                            "macro",
                            "keyword",
                            "modifier",
                            "comment",
                            "string",
                            "number",
                            "regexp",
                            "operator",
                            "decorator",
                        ],
                        "tokenModifiers": [
                            "declaration",
                            "definition",
                            "readonly",
                            "static",
                            "deprecated",
                            "abstract",
                            "async",
                            "modification",
                            "documentation",
                            "defaultLibrary",
                        ],
                        "formats": ["relative"],
                        "requests": {"range": True, "full": {"delta": True}},
                        "multilineTokenSupport": False,
                        "overlappingTokenSupport": False,
                        "serverCancelSupport": True,
                        "augmentsSyntaxTokens": False,
                    },
                    "linkedEditingRange": {"dynamicRegistration": True},
                    "typeHierarchy": {"dynamicRegistration": True},
                    "inlineValue": {"dynamicRegistration": True},
                    "inlayHint": {
                        "dynamicRegistration": True,
                        "resolveSupport": {"properties": ["tooltip", "textEdits", "label.tooltip", "label.location", "label.command"]},
                    },
                    "diagnostic": {"dynamicRegistration": True, "relatedDocumentSupport": False},
                },
                "window": {
                    "showMessage": {"messageActionItem": {"additionalPropertiesSupport": True}},
                    "showDocument": {"support": True},
                    "workDoneProgress": True,
                },
                "general": {
                    "staleRequestSupport": {
                        "cancel": True,
                        "retryOnContentModified": [
                            "textDocument/semanticTokens/full",
                            "textDocument/semanticTokens/range",
                            "textDocument/semanticTokens/full/delta",
                        ],
                    },
                    "regularExpressions": {"engine": "ECMAScript", "version": "ES2020"},
                    "markdown": {
                        "parser": "marked",
                        "version": "1.1.0",
                        "allowedTags": [
                            "ul",
                            "li",
                            "p",
                            "code",
                            "blockquote",
                            "ol",
                            "h1",
                            "h2",
                            "h3",
                            "h4",
                            "h5",
                            "h6",
                            "hr",
                            "em",
                            "pre",
                            "table",
                            "thead",
                            "tbody",
                            "tr",
                            "th",
                            "td",
                            "div",
                            "del",
                            "a",
                            "strong",
                            "br",
                            "img",
                            "span",
                        ],
                    },
                    "positionEncodings": ["utf-16"],
                },
                "notebookDocument": {"synchronization": {"dynamicRegistration": True, "executionSummarySupport": True}},
                "experimental": {
                    "snippetTextEdit": True,
                    "codeActionGroup": True,
                    "hoverActions": True,
                    "serverStatusNotification": True,
                    "colorDiagnosticOutput": True,
                    "openServerLogs": True,
                    "localDocs": True,
                    "commands": {
                        "commands": [
                            "rust-analyzer.runSingle",
                            "rust-analyzer.debugSingle",
                            "rust-analyzer.showReferences",
                            "rust-analyzer.gotoLocation",
                            "editor.action.triggerParameterHints",
                        ]
                    },
                },
            },
            "initializationOptions": {
                "cargoRunner": None,
                "runnables": {"extraEnv": None, "problemMatcher": ["$rustc"], "command": None, "extraArgs": []},
                "statusBar": {"clickAction": "openLogs"},
                "server": {"path": None, "extraEnv": None},
                "trace": {"server": "verbose", "extension": False},
                "debug": {
                    "engine": "auto",
                    "sourceFileMap": {"/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"},
                    "openDebugPane": False,
                    "engineSettings": {},
                },
                "restartServerOnConfigChange": False,
                "typing": {"continueCommentsOnNewline": True, "autoClosingAngleBrackets": {"enable": False}},
                "diagnostics": {
                    "previewRustcOutput": False,
                    "useRustcErrorCode": False,
                    "disabled": [],
                    "enable": True,
                    "experimental": {"enable": False},
                    "remapPrefix": {},
                    "warningsAsHint": [],
                    "warningsAsInfo": [],
                },
                "discoverProjectRunner": None,
                "showUnlinkedFileNotification": True,
                "showDependenciesExplorer": True,
                "assist": {"emitMustUse": False, "expressionFillDefault": "todo"},
                "cachePriming": {"enable": True, "numThreads": 0},
                "cargo": {
                    "autoreload": True,
                    "buildScripts": {
                        "enable": True,
                        "invocationLocation": "workspace",
                        "invocationStrategy": "per_workspace",
                        "overrideCommand": None,
                        "useRustcWrapper": True,
                    },
                    "cfgs": [],
                    "extraArgs": [],
                    "extraEnv": {},
                    "features": [],
                    "noDefaultFeatures": False,
                    "sysroot": "discover",
                    "sysrootSrc": None,
                    "target": None,
                    "unsetTest": ["core"],
                },
                "checkOnSave": True,
                "check": {
                    "allTargets": True,
                    "command": "check",
                    "extraArgs": [],
                    "extraEnv": {},
                    "features": None,
                    "ignore": [],
                    "invocationLocation": "workspace",
                    "invocationStrategy": "per_workspace",
                    "noDefaultFeatures": None,
                    "overrideCommand": None,
                    "targets": None,
                },
                "completion": {
                    "autoimport": {"enable": True},
                    "autoself": {"enable": True},
                    "callable": {"snippets": "fill_arguments"},
                    "fullFunctionSignatures": {"enable": False},
                    "limit": None,
                    "postfix": {"enable": True},
                    "privateEditable": {"enable": False},
                    "snippets": {
                        "custom": {
                            "Arc::new": {
                                "postfix": "arc",
                                "body": "Arc::new(${receiver})",
                                "requires": "std::sync::Arc",
                                "description": "Put the expression into an `Arc`",
                                "scope": "expr",
                            },
                            "Rc::new": {
                                "postfix": "rc",
                                "body": "Rc::new(${receiver})",
                                "requires": "std::rc::Rc",
                                "description": "Put the expression into an `Rc`",
                                "scope": "expr",
                            },
                            "Box::pin": {
                                "postfix": "pinbox",
                                "body": "Box::pin(${receiver})",
                                "requires": "std::boxed::Box",
                                "description": "Put the expression into a pinned `Box`",
                                "scope": "expr",
                            },
                            "Ok": {
                                "postfix": "ok",
                                "body": "Ok(${receiver})",
                                "description": "Wrap the expression in a `Result::Ok`",
                                "scope": "expr",
                            },
                            "Err": {
                                "postfix": "err",
                                "body": "Err(${receiver})",
                                "description": "Wrap the expression in a `Result::Err`",
                                "scope": "expr",
                            },
                            "Some": {
                                "postfix": "some",
                                "body": "Some(${receiver})",
                                "description": "Wrap the expression in an `Option::Some`",
                                "scope": "expr",
                            },
                        }
                    },
                },
                "files": {"excludeDirs": [], "watcher": "client"},
                "highlightRelated": {
                    "breakPoints": {"enable": True},
                    "closureCaptures": {"enable": True},
                    "exitPoints": {"enable": True},
                    "references": {"enable": True},
                    "yieldPoints": {"enable": True},
                },
                "hover": {
                    "actions": {
                        "debug": {"enable": True},
                        "enable": True,
                        "gotoTypeDef": {"enable": True},
                        "implementations": {"enable": True},
                        "references": {"enable": False},
                        "run": {"enable": True},
                    },
                    "documentation": {"enable": True, "keywords": {"enable": True}},
                    "links": {"enable": True},
                    "memoryLayout": {"alignment": "hexadecimal", "enable": True, "niches": False, "offset": "hexadecimal", "size": "both"},
                },
                "imports": {
                    "granularity": {"enforce": False, "group": "crate"},
                    "group": {"enable": True},
                    "merge": {"glob": True},
                    "preferNoStd": False,
                    "preferPrelude": False,
                    "prefix": "plain",
                },
                "inlayHints": {
                    "bindingModeHints": {"enable": False},
                    "chainingHints": {"enable": True},
                    "closingBraceHints": {"enable": True, "minLines": 25},
                    "closureCaptureHints": {"enable": False},
                    "closureReturnTypeHints": {"enable": "never"},
                    "closureStyle": "impl_fn",
                    "discriminantHints": {"enable": "never"},
                    "expressionAdjustmentHints": {"enable": "never", "hideOutsideUnsafe": False, "mode": "prefix"},
                    "lifetimeElisionHints": {"enable": "never", "useParameterNames": False},
                    "maxLength": 25,
                    "parameterHints": {"enable": True},
                    "reborrowHints": {"enable": "never"},
                    "renderColons": True,
                    "typeHints": {"enable": True, "hideClosureInitialization": False, "hideNamedConstructor": False},
                },
                "interpret": {"tests": False},
                "joinLines": {"joinAssignments": True, "joinElseIf": True, "removeTrailingComma": True, "unwrapTrivialBlock": True},
                "lens": {
                    "debug": {"enable": True},
                    "enable": True,
                    "forceCustomCommands": True,
                    "implementations": {"enable": True},
                    "location": "above_name",
                    "references": {
                        "adt": {"enable": False},
                        "enumVariant": {"enable": False},
                        "method": {"enable": False},
                        "trait": {"enable": False},
                    },
                    "run": {"enable": True},
                },
                "linkedProjects": [],
                "lru": {"capacity": None, "query": {"capacities": {}}},
                "notifications": {"cargoTomlNotFound": True},
                "numThreads": None,
                "procMacro": {"attributes": {"enable": True}, "enable": True, "ignored": {}, "server": None},
                "references": {"excludeImports": False},
                "rust": {"analyzerTargetDir": None},
                "rustc": {"source": None},
                "rustfmt": {"extraArgs": [], "overrideCommand": None, "rangeFormatting": {"enable": False}},
                "semanticHighlighting": {
                    "doc": {"comment": {"inject": {"enable": True}}},
                    "nonStandardTokens": True,
                    "operator": {"enable": True, "specialization": {"enable": False}},
                    "punctuation": {"enable": False, "separate": {"macro": {"bang": False}}, "specialization": {"enable": False}},
                    "strings": {"enable": True},
                },
                "signatureInfo": {"detail": "full", "documentation": {"enable": True}},
                "workspace": {"symbol": {"search": {"kind": "only_types", "limit": 128, "scope": "workspace"}}},
            },
            "trace": "verbose",
            "processId": os.getpid(),
            "rootPath": repository_absolute_path,
            "rootUri": root_uri,
            "workspaceFolders": [
                {
                    "uri": root_uri,
                    "name": os.path.basename(repository_absolute_path),
                }
            ],
        }
        return initialize_params

    def _start_server(self):
        """
        Starts the Rust Analyzer Language Server
        """

        def register_capability_handler(params):
            assert "registrations" in params
            for registration in params["registrations"]:
                if registration["method"] == "workspace/executeCommand":
                    self.initialize_searcher_command_available.set()
                    self.resolve_main_method_available.set()
            return

        def lang_status_handler(params):
            # TODO: Should we wait for
            # server -> client: {'jsonrpc': '2.0', 'method': 'language/status', 'params': {'type': 'ProjectStatus', 'message': 'OK'}}
            # Before proceeding?
            if params["type"] == "ServiceReady" and params["message"] == "ServiceReady":
                self.service_ready_event.set()

        def execute_client_command_handler(params):
            return []

        def do_nothing(params):
            return

        def check_experimental_status(params):
            if params["quiescent"] == True:
                self.server_ready.set()

        def window_log_message(msg):
            self.logger.log(f"LSP: window/logMessage: {msg}", logging.INFO)

        self.server.on_request("client/registerCapability", register_capability_handler)
        self.server.on_notification("language/status", lang_status_handler)
        self.server.on_notification("window/logMessage", window_log_message)
        self.server.on_request("workspace/executeClientCommand", execute_client_command_handler)
        self.server.on_notification("$/progress", do_nothing)
        self.server.on_notification("textDocument/publishDiagnostics", do_nothing)
        self.server.on_notification("language/actionableNotification", do_nothing)
        self.server.on_notification("experimental/serverStatus", check_experimental_status)

        self.logger.log("Starting RustAnalyzer server process", logging.INFO)
        self.server.start()
        initialize_params = self._get_initialize_params(self.repository_root_path)

        self.logger.log(
            "Sending initialize request from LSP client to LSP server and awaiting response",
            logging.INFO,
        )
        init_response = self.server.send.initialize(initialize_params)
        assert init_response["capabilities"]["textDocumentSync"]["change"] == 2
        assert "completionProvider" in init_response["capabilities"]
        assert init_response["capabilities"]["completionProvider"] == {
            "resolveProvider": True,
            "triggerCharacters": [":", ".", "'", "("],
            "completionItem": {"labelDetailsSupport": True},
        }
        self.server.notify.initialized({})
        self.completions_available.set()

        self.server_ready.wait()

```
Page 8/12FirstPrevNextLast