#
tokens: 48881/50000 11/296 files (page 6/14)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 6 of 14. Use http://codebase.md/oraios/serena?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .devcontainer
│   └── devcontainer.json
├── .dockerignore
├── .env.example
├── .github
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── config.yml
│   │   ├── feature_request.md
│   │   └── issue--bug--performance-problem--question-.md
│   └── workflows
│       ├── codespell.yml
│       ├── docker.yml
│       ├── junie.yml
│       ├── lint_and_docs.yaml
│       ├── publish.yml
│       └── pytest.yml
├── .gitignore
├── .serena
│   ├── memories
│   │   ├── adding_new_language_support_guide.md
│   │   ├── serena_core_concepts_and_architecture.md
│   │   ├── serena_repository_structure.md
│   │   └── suggested_commands.md
│   └── project.yml
├── .vscode
│   └── settings.json
├── CHANGELOG.md
├── CLAUDE.md
├── compose.yaml
├── CONTRIBUTING.md
├── docker_build_and_run.sh
├── DOCKER.md
├── Dockerfile
├── docs
│   ├── custom_agent.md
│   └── serena_on_chatgpt.md
├── flake.lock
├── flake.nix
├── lessons_learned.md
├── LICENSE
├── llms-install.md
├── public
│   └── .gitignore
├── pyproject.toml
├── README.md
├── resources
│   ├── serena-icons.cdr
│   ├── serena-logo-dark-mode.svg
│   ├── serena-logo.cdr
│   ├── serena-logo.svg
│   └── vscode_sponsor_logo.png
├── roadmap.md
├── scripts
│   ├── agno_agent.py
│   ├── demo_run_tools.py
│   ├── gen_prompt_factory.py
│   ├── mcp_server.py
│   ├── print_mode_context_options.py
│   └── print_tool_overview.py
├── src
│   ├── interprompt
│   │   ├── __init__.py
│   │   ├── .syncCommitId.remote
│   │   ├── .syncCommitId.this
│   │   ├── jinja_template.py
│   │   ├── multilang_prompt.py
│   │   ├── prompt_factory.py
│   │   └── util
│   │       ├── __init__.py
│   │       └── class_decorators.py
│   ├── README.md
│   ├── serena
│   │   ├── __init__.py
│   │   ├── agent.py
│   │   ├── agno.py
│   │   ├── analytics.py
│   │   ├── cli.py
│   │   ├── code_editor.py
│   │   ├── config
│   │   │   ├── __init__.py
│   │   │   ├── context_mode.py
│   │   │   └── serena_config.py
│   │   ├── constants.py
│   │   ├── dashboard.py
│   │   ├── generated
│   │   │   └── generated_prompt_factory.py
│   │   ├── gui_log_viewer.py
│   │   ├── mcp.py
│   │   ├── project.py
│   │   ├── prompt_factory.py
│   │   ├── resources
│   │   │   ├── config
│   │   │   │   ├── contexts
│   │   │   │   │   ├── agent.yml
│   │   │   │   │   ├── chatgpt.yml
│   │   │   │   │   ├── codex.yml
│   │   │   │   │   ├── context.template.yml
│   │   │   │   │   ├── desktop-app.yml
│   │   │   │   │   ├── ide-assistant.yml
│   │   │   │   │   └── oaicompat-agent.yml
│   │   │   │   ├── internal_modes
│   │   │   │   │   └── jetbrains.yml
│   │   │   │   ├── modes
│   │   │   │   │   ├── editing.yml
│   │   │   │   │   ├── interactive.yml
│   │   │   │   │   ├── mode.template.yml
│   │   │   │   │   ├── no-onboarding.yml
│   │   │   │   │   ├── onboarding.yml
│   │   │   │   │   ├── one-shot.yml
│   │   │   │   │   └── planning.yml
│   │   │   │   └── prompt_templates
│   │   │   │       ├── simple_tool_outputs.yml
│   │   │   │       └── system_prompt.yml
│   │   │   ├── dashboard
│   │   │   │   ├── dashboard.js
│   │   │   │   ├── index.html
│   │   │   │   ├── jquery.min.js
│   │   │   │   ├── serena-icon-16.png
│   │   │   │   ├── serena-icon-32.png
│   │   │   │   ├── serena-icon-48.png
│   │   │   │   ├── serena-logs-dark-mode.png
│   │   │   │   └── serena-logs.png
│   │   │   ├── project.template.yml
│   │   │   └── serena_config.template.yml
│   │   ├── symbol.py
│   │   ├── text_utils.py
│   │   ├── tools
│   │   │   ├── __init__.py
│   │   │   ├── cmd_tools.py
│   │   │   ├── config_tools.py
│   │   │   ├── file_tools.py
│   │   │   ├── jetbrains_plugin_client.py
│   │   │   ├── jetbrains_tools.py
│   │   │   ├── memory_tools.py
│   │   │   ├── symbol_tools.py
│   │   │   ├── tools_base.py
│   │   │   └── workflow_tools.py
│   │   └── util
│   │       ├── class_decorators.py
│   │       ├── exception.py
│   │       ├── file_system.py
│   │       ├── general.py
│   │       ├── git.py
│   │       ├── inspection.py
│   │       ├── logging.py
│   │       ├── shell.py
│   │       └── thread.py
│   └── solidlsp
│       ├── __init__.py
│       ├── .gitignore
│       ├── language_servers
│       │   ├── al_language_server.py
│       │   ├── bash_language_server.py
│       │   ├── clangd_language_server.py
│       │   ├── clojure_lsp.py
│       │   ├── common.py
│       │   ├── csharp_language_server.py
│       │   ├── dart_language_server.py
│       │   ├── eclipse_jdtls.py
│       │   ├── elixir_tools
│       │   │   ├── __init__.py
│       │   │   ├── elixir_tools.py
│       │   │   └── README.md
│       │   ├── elm_language_server.py
│       │   ├── erlang_language_server.py
│       │   ├── gopls.py
│       │   ├── intelephense.py
│       │   ├── jedi_server.py
│       │   ├── kotlin_language_server.py
│       │   ├── lua_ls.py
│       │   ├── marksman.py
│       │   ├── nixd_ls.py
│       │   ├── omnisharp
│       │   │   ├── initialize_params.json
│       │   │   ├── runtime_dependencies.json
│       │   │   └── workspace_did_change_configuration.json
│       │   ├── omnisharp.py
│       │   ├── perl_language_server.py
│       │   ├── pyright_server.py
│       │   ├── r_language_server.py
│       │   ├── regal_server.py
│       │   ├── ruby_lsp.py
│       │   ├── rust_analyzer.py
│       │   ├── solargraph.py
│       │   ├── sourcekit_lsp.py
│       │   ├── terraform_ls.py
│       │   ├── typescript_language_server.py
│       │   ├── vts_language_server.py
│       │   └── zls.py
│       ├── ls_config.py
│       ├── ls_exceptions.py
│       ├── ls_handler.py
│       ├── ls_logger.py
│       ├── ls_request.py
│       ├── ls_types.py
│       ├── ls_utils.py
│       ├── ls.py
│       ├── lsp_protocol_handler
│       │   ├── lsp_constants.py
│       │   ├── lsp_requests.py
│       │   ├── lsp_types.py
│       │   └── server.py
│       ├── settings.py
│       └── util
│           ├── subprocess_util.py
│           └── zip.py
├── test
│   ├── __init__.py
│   ├── conftest.py
│   ├── resources
│   │   └── repos
│   │       ├── al
│   │       │   └── test_repo
│   │       │       ├── app.json
│   │       │       └── src
│   │       │           ├── Codeunits
│   │       │           │   ├── CustomerMgt.Codeunit.al
│   │       │           │   └── PaymentProcessorImpl.Codeunit.al
│   │       │           ├── Enums
│   │       │           │   └── CustomerType.Enum.al
│   │       │           ├── Interfaces
│   │       │           │   └── IPaymentProcessor.Interface.al
│   │       │           ├── Pages
│   │       │           │   ├── CustomerCard.Page.al
│   │       │           │   └── CustomerList.Page.al
│   │       │           ├── TableExtensions
│   │       │           │   └── Item.TableExt.al
│   │       │           └── Tables
│   │       │               └── Customer.Table.al
│   │       ├── bash
│   │       │   └── test_repo
│   │       │       ├── config.sh
│   │       │       ├── main.sh
│   │       │       └── utils.sh
│   │       ├── clojure
│   │       │   └── test_repo
│   │       │       ├── deps.edn
│   │       │       └── src
│   │       │           └── test_app
│   │       │               ├── core.clj
│   │       │               └── utils.clj
│   │       ├── csharp
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── Models
│   │       │       │   └── Person.cs
│   │       │       ├── Program.cs
│   │       │       ├── serena.sln
│   │       │       └── TestProject.csproj
│   │       ├── dart
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── lib
│   │       │       │   ├── helper.dart
│   │       │       │   ├── main.dart
│   │       │       │   └── models.dart
│   │       │       └── pubspec.yaml
│   │       ├── elixir
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── lib
│   │       │       │   ├── examples.ex
│   │       │       │   ├── ignored_dir
│   │       │       │   │   └── ignored_module.ex
│   │       │       │   ├── models.ex
│   │       │       │   ├── services.ex
│   │       │       │   ├── test_repo.ex
│   │       │       │   └── utils.ex
│   │       │       ├── mix.exs
│   │       │       ├── mix.lock
│   │       │       ├── scripts
│   │       │       │   └── build_script.ex
│   │       │       └── test
│   │       │           ├── models_test.exs
│   │       │           └── test_repo_test.exs
│   │       ├── elm
│   │       │   └── test_repo
│   │       │       ├── elm.json
│   │       │       ├── Main.elm
│   │       │       └── Utils.elm
│   │       ├── erlang
│   │       │   └── test_repo
│   │       │       ├── hello.erl
│   │       │       ├── ignored_dir
│   │       │       │   └── ignored_module.erl
│   │       │       ├── include
│   │       │       │   ├── records.hrl
│   │       │       │   └── types.hrl
│   │       │       ├── math_utils.erl
│   │       │       ├── rebar.config
│   │       │       ├── src
│   │       │       │   ├── app.erl
│   │       │       │   ├── models.erl
│   │       │       │   ├── services.erl
│   │       │       │   └── utils.erl
│   │       │       └── test
│   │       │           ├── models_tests.erl
│   │       │           └── utils_tests.erl
│   │       ├── go
│   │       │   └── test_repo
│   │       │       └── main.go
│   │       ├── java
│   │       │   └── test_repo
│   │       │       ├── pom.xml
│   │       │       └── src
│   │       │           └── main
│   │       │               └── java
│   │       │                   └── test_repo
│   │       │                       ├── Main.java
│   │       │                       ├── Model.java
│   │       │                       ├── ModelUser.java
│   │       │                       └── Utils.java
│   │       ├── kotlin
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── build.gradle.kts
│   │       │       └── src
│   │       │           └── main
│   │       │               └── kotlin
│   │       │                   └── test_repo
│   │       │                       ├── Main.kt
│   │       │                       ├── Model.kt
│   │       │                       ├── ModelUser.kt
│   │       │                       └── Utils.kt
│   │       ├── lua
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── main.lua
│   │       │       ├── src
│   │       │       │   ├── calculator.lua
│   │       │       │   └── utils.lua
│   │       │       └── tests
│   │       │           └── test_calculator.lua
│   │       ├── markdown
│   │       │   └── test_repo
│   │       │       ├── api.md
│   │       │       ├── CONTRIBUTING.md
│   │       │       ├── guide.md
│   │       │       └── README.md
│   │       ├── nix
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── default.nix
│   │       │       ├── flake.nix
│   │       │       ├── lib
│   │       │       │   └── utils.nix
│   │       │       ├── modules
│   │       │       │   └── example.nix
│   │       │       └── scripts
│   │       │           └── hello.sh
│   │       ├── perl
│   │       │   └── test_repo
│   │       │       ├── helper.pl
│   │       │       └── main.pl
│   │       ├── php
│   │       │   └── test_repo
│   │       │       ├── helper.php
│   │       │       ├── index.php
│   │       │       └── simple_var.php
│   │       ├── python
│   │       │   └── test_repo
│   │       │       ├── .gitignore
│   │       │       ├── custom_test
│   │       │       │   ├── __init__.py
│   │       │       │   └── advanced_features.py
│   │       │       ├── examples
│   │       │       │   ├── __init__.py
│   │       │       │   └── user_management.py
│   │       │       ├── ignore_this_dir_with_postfix
│   │       │       │   └── ignored_module.py
│   │       │       ├── scripts
│   │       │       │   ├── __init__.py
│   │       │       │   └── run_app.py
│   │       │       └── test_repo
│   │       │           ├── __init__.py
│   │       │           ├── complex_types.py
│   │       │           ├── models.py
│   │       │           ├── name_collisions.py
│   │       │           ├── nested_base.py
│   │       │           ├── nested.py
│   │       │           ├── overloaded.py
│   │       │           ├── services.py
│   │       │           ├── utils.py
│   │       │           └── variables.py
│   │       ├── r
│   │       │   └── test_repo
│   │       │       ├── .Rbuildignore
│   │       │       ├── DESCRIPTION
│   │       │       ├── examples
│   │       │       │   └── analysis.R
│   │       │       ├── NAMESPACE
│   │       │       └── R
│   │       │           ├── models.R
│   │       │           └── utils.R
│   │       ├── rego
│   │       │   └── test_repo
│   │       │       ├── policies
│   │       │       │   ├── authz.rego
│   │       │       │   └── validation.rego
│   │       │       └── utils
│   │       │           └── helpers.rego
│   │       ├── ruby
│   │       │   └── test_repo
│   │       │       ├── .solargraph.yml
│   │       │       ├── examples
│   │       │       │   └── user_management.rb
│   │       │       ├── lib.rb
│   │       │       ├── main.rb
│   │       │       ├── models.rb
│   │       │       ├── nested.rb
│   │       │       ├── services.rb
│   │       │       └── variables.rb
│   │       ├── rust
│   │       │   ├── test_repo
│   │       │   │   ├── Cargo.lock
│   │       │   │   ├── Cargo.toml
│   │       │   │   └── src
│   │       │   │       ├── lib.rs
│   │       │   │       └── main.rs
│   │       │   └── test_repo_2024
│   │       │       ├── Cargo.lock
│   │       │       ├── Cargo.toml
│   │       │       └── src
│   │       │           ├── lib.rs
│   │       │           └── main.rs
│   │       ├── swift
│   │       │   └── test_repo
│   │       │       ├── Package.swift
│   │       │       └── src
│   │       │           ├── main.swift
│   │       │           └── utils.swift
│   │       ├── terraform
│   │       │   └── test_repo
│   │       │       ├── data.tf
│   │       │       ├── main.tf
│   │       │       ├── outputs.tf
│   │       │       └── variables.tf
│   │       ├── typescript
│   │       │   └── test_repo
│   │       │       ├── .serena
│   │       │       │   └── project.yml
│   │       │       ├── index.ts
│   │       │       ├── tsconfig.json
│   │       │       └── use_helper.ts
│   │       └── zig
│   │           └── test_repo
│   │               ├── .gitignore
│   │               ├── build.zig
│   │               ├── src
│   │               │   ├── calculator.zig
│   │               │   ├── main.zig
│   │               │   └── math_utils.zig
│   │               └── zls.json
│   ├── serena
│   │   ├── __init__.py
│   │   ├── __snapshots__
│   │   │   └── test_symbol_editing.ambr
│   │   ├── config
│   │   │   ├── __init__.py
│   │   │   └── test_serena_config.py
│   │   ├── test_edit_marker.py
│   │   ├── test_mcp.py
│   │   ├── test_serena_agent.py
│   │   ├── test_symbol_editing.py
│   │   ├── test_symbol.py
│   │   ├── test_text_utils.py
│   │   ├── test_tool_parameter_types.py
│   │   └── util
│   │       ├── test_exception.py
│   │       └── test_file_system.py
│   └── solidlsp
│       ├── al
│       │   └── test_al_basic.py
│       ├── bash
│       │   ├── __init__.py
│       │   └── test_bash_basic.py
│       ├── clojure
│       │   ├── __init__.py
│       │   └── test_clojure_basic.py
│       ├── csharp
│       │   └── test_csharp_basic.py
│       ├── dart
│       │   ├── __init__.py
│       │   └── test_dart_basic.py
│       ├── elixir
│       │   ├── __init__.py
│       │   ├── conftest.py
│       │   ├── test_elixir_basic.py
│       │   ├── test_elixir_ignored_dirs.py
│       │   ├── test_elixir_integration.py
│       │   └── test_elixir_symbol_retrieval.py
│       ├── elm
│       │   └── test_elm_basic.py
│       ├── erlang
│       │   ├── __init__.py
│       │   ├── conftest.py
│       │   ├── test_erlang_basic.py
│       │   ├── test_erlang_ignored_dirs.py
│       │   └── test_erlang_symbol_retrieval.py
│       ├── go
│       │   └── test_go_basic.py
│       ├── java
│       │   └── test_java_basic.py
│       ├── kotlin
│       │   └── test_kotlin_basic.py
│       ├── lua
│       │   └── test_lua_basic.py
│       ├── markdown
│       │   ├── __init__.py
│       │   └── test_markdown_basic.py
│       ├── nix
│       │   └── test_nix_basic.py
│       ├── perl
│       │   └── test_perl_basic.py
│       ├── php
│       │   └── test_php_basic.py
│       ├── python
│       │   ├── test_python_basic.py
│       │   ├── test_retrieval_with_ignored_dirs.py
│       │   └── test_symbol_retrieval.py
│       ├── r
│       │   ├── __init__.py
│       │   └── test_r_basic.py
│       ├── rego
│       │   └── test_rego_basic.py
│       ├── ruby
│       │   ├── test_ruby_basic.py
│       │   └── test_ruby_symbol_retrieval.py
│       ├── rust
│       │   ├── test_rust_2024_edition.py
│       │   └── test_rust_basic.py
│       ├── swift
│       │   └── test_swift_basic.py
│       ├── terraform
│       │   └── test_terraform_basic.py
│       ├── typescript
│       │   └── test_typescript_basic.py
│       ├── util
│       │   └── test_zip.py
│       └── zig
│           └── test_zig_basic.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/test/solidlsp/swift/test_swift_basic.py:
--------------------------------------------------------------------------------

```python
  1 | """
  2 | Basic integration tests for the Swift language server functionality.
  3 | 
  4 | These tests validate the functionality of the language server APIs
  5 | like request_references using the Swift test repository.
  6 | """
  7 | 
  8 | import os
  9 | import platform
 10 | 
 11 | import pytest
 12 | 
 13 | from serena.project import Project
 14 | from serena.text_utils import LineType
 15 | from solidlsp import SolidLanguageServer
 16 | from solidlsp.ls_config import Language
 17 | 
 18 | # Skip Swift tests on Windows due to complex GitHub Actions configuration
 19 | WINDOWS_SKIP = platform.system() == "Windows"
 20 | WINDOWS_SKIP_REASON = "GitHub Actions configuration for Swift on Windows is complex, skipping for now."
 21 | 
 22 | pytestmark = [pytest.mark.swift, pytest.mark.skipif(WINDOWS_SKIP, reason=WINDOWS_SKIP_REASON)]
 23 | 
 24 | 
 25 | class TestSwiftLanguageServerBasics:
 26 |     """Test basic functionality of the Swift language server."""
 27 | 
 28 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
 29 |     def test_goto_definition_calculator_class(self, language_server: SolidLanguageServer) -> None:
 30 |         """Test goto_definition on Calculator class usage."""
 31 |         file_path = os.path.join("src", "main.swift")
 32 | 
 33 |         # Find the Calculator usage at line 5: let calculator = Calculator()
 34 |         # Position should be at the "Calculator()" call
 35 |         definitions = language_server.request_definition(file_path, 4, 23)  # Position at Calculator() call
 36 |         assert isinstance(definitions, list), "Definitions should be a list"
 37 |         assert len(definitions) > 0, "Should find definition for Calculator class"
 38 | 
 39 |         # Verify the definition points to the Calculator class definition
 40 |         calculator_def = definitions[0]
 41 |         assert calculator_def.get("uri", "").endswith("main.swift"), "Definition should be in main.swift"
 42 | 
 43 |         # The Calculator class is defined starting at line 16
 44 |         start_line = calculator_def.get("range", {}).get("start", {}).get("line")
 45 |         assert start_line == 15, f"Calculator class definition should be at line 16, got {start_line + 1}"
 46 | 
 47 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
 48 |     def test_goto_definition_user_struct(self, language_server: SolidLanguageServer) -> None:
 49 |         """Test goto_definition on User struct usage."""
 50 |         file_path = os.path.join("src", "main.swift")
 51 | 
 52 |         # Find the User usage at line 9: let user = User(name: "Alice", age: 30)
 53 |         # Position should be at the "User(...)" call
 54 |         definitions = language_server.request_definition(file_path, 8, 18)  # Position at User(...) call
 55 |         assert isinstance(definitions, list), "Definitions should be a list"
 56 |         assert len(definitions) > 0, "Should find definition for User struct"
 57 | 
 58 |         # Verify the definition points to the User struct definition
 59 |         user_def = definitions[0]
 60 |         assert user_def.get("uri", "").endswith("main.swift"), "Definition should be in main.swift"
 61 | 
 62 |         # The User struct is defined starting at line 26
 63 |         start_line = user_def.get("range", {}).get("start", {}).get("line")
 64 |         assert start_line == 25, f"User struct definition should be at line 26, got {start_line + 1}"
 65 | 
 66 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
 67 |     def test_goto_definition_calculator_method(self, language_server: SolidLanguageServer) -> None:
 68 |         """Test goto_definition on Calculator method usage."""
 69 |         file_path = os.path.join("src", "main.swift")
 70 | 
 71 |         # Find the add method usage at line 6: let result = calculator.add(5, 3)
 72 |         # Position should be at the "add" method call
 73 |         definitions = language_server.request_definition(file_path, 5, 28)  # Position at add method call
 74 |         assert isinstance(definitions, list), "Definitions should be a list"
 75 | 
 76 |         # Verify the definition points to the add method definition
 77 |         add_def = definitions[0]
 78 |         assert add_def.get("uri", "").endswith("main.swift"), "Definition should be in main.swift"
 79 | 
 80 |         # The add method is defined starting at line 17
 81 |         start_line = add_def.get("range", {}).get("start", {}).get("line")
 82 |         assert start_line == 16, f"add method definition should be at line 17, got {start_line + 1}"
 83 | 
 84 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
 85 |     def test_goto_definition_cross_file(self, language_server: SolidLanguageServer) -> None:
 86 |         """Test goto_definition across files - Utils struct."""
 87 |         utils_file = os.path.join("src", "utils.swift")
 88 | 
 89 |         # First, let's check if Utils is used anywhere (it might not be in this simple test)
 90 |         # We'll test goto_definition on Utils struct itself
 91 |         symbols = language_server.request_document_symbols(utils_file)
 92 |         utils_symbol = next((s for s in symbols[0] if s.get("name") == "Utils"), None)
 93 | 
 94 |         sel_start = utils_symbol["selectionRange"]["start"]
 95 |         definitions = language_server.request_definition(utils_file, sel_start["line"], sel_start["character"])
 96 |         assert isinstance(definitions, list), "Definitions should be a list"
 97 | 
 98 |         # Should find the Utils struct definition itself
 99 |         utils_def = definitions[0]
100 |         assert utils_def.get("uri", "").endswith("utils.swift"), "Definition should be in utils.swift"
101 | 
102 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
103 |     def test_request_references_calculator_class(self, language_server: SolidLanguageServer) -> None:
104 |         """Test request_references on the Calculator class."""
105 |         # Get references to the Calculator class in main.swift
106 |         file_path = os.path.join("src", "main.swift")
107 |         symbols = language_server.request_document_symbols(file_path)
108 | 
109 |         calculator_symbol = next((s for s in symbols[0] if s.get("name") == "Calculator"), None)
110 | 
111 |         sel_start = calculator_symbol["selectionRange"]["start"]
112 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
113 |         assert isinstance(references, list), "References should be a list"
114 |         assert len(references) > 0, "Calculator class should be referenced"
115 | 
116 |         # Validate that Calculator is referenced in the main function
117 |         calculator_refs = [ref for ref in references if ref.get("uri", "").endswith("main.swift")]
118 |         assert len(calculator_refs) > 0, "Calculator class should be referenced in main.swift"
119 | 
120 |         # Check that one reference is at line 5 (let calculator = Calculator())
121 |         line_5_refs = [ref for ref in calculator_refs if ref.get("range", {}).get("start", {}).get("line") == 4]
122 |         assert len(line_5_refs) > 0, "Calculator should be referenced at line 5"
123 | 
124 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
125 |     def test_request_references_user_struct(self, language_server: SolidLanguageServer) -> None:
126 |         """Test request_references on the User struct."""
127 |         # Get references to the User struct in main.swift
128 |         file_path = os.path.join("src", "main.swift")
129 |         symbols = language_server.request_document_symbols(file_path)
130 | 
131 |         user_symbol = next((s for s in symbols[0] if s.get("name") == "User"), None)
132 | 
133 |         sel_start = user_symbol["selectionRange"]["start"]
134 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
135 |         assert isinstance(references, list), "References should be a list"
136 | 
137 |         # Validate that User is referenced in the main function
138 |         user_refs = [ref for ref in references if ref.get("uri", "").endswith("main.swift")]
139 |         assert len(user_refs) > 0, "User struct should be referenced in main.swift"
140 | 
141 |         # Check that one reference is at line 9 (let user = User(...))
142 |         line_9_refs = [ref for ref in user_refs if ref.get("range", {}).get("start", {}).get("line") == 8]
143 |         assert len(line_9_refs) > 0, "User should be referenced at line 9"
144 | 
145 |     @pytest.mark.parametrize("language_server", [Language.SWIFT], indirect=True)
146 |     def test_request_references_utils_struct(self, language_server: SolidLanguageServer) -> None:
147 |         """Test request_references on the Utils struct."""
148 |         # Get references to the Utils struct in utils.swift
149 |         file_path = os.path.join("src", "utils.swift")
150 |         symbols = language_server.request_document_symbols(file_path)
151 |         utils_symbol = next((s for s in symbols[0] if s.get("name") == "Utils"), None)
152 |         if not utils_symbol or "selectionRange" not in utils_symbol:
153 |             raise AssertionError("Utils symbol or its selectionRange not found")
154 |         sel_start = utils_symbol["selectionRange"]["start"]
155 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
156 |         assert isinstance(references, list), "References should be a list"
157 |         assert len(references) > 0, "Utils struct should be referenced"
158 | 
159 |         # Validate that Utils is referenced in main.swift
160 |         utils_refs = [ref for ref in references if ref.get("uri", "").endswith("main.swift")]
161 |         assert len(utils_refs) > 0, "Utils struct should be referenced in main.swift"
162 | 
163 |         # Check that one reference is at line 12 (Utils.calculateArea call)
164 |         line_12_refs = [ref for ref in utils_refs if ref.get("range", {}).get("start", {}).get("line") == 11]
165 |         assert len(line_12_refs) > 0, "Utils should be referenced at line 12"
166 | 
167 | 
168 | class TestSwiftProjectBasics:
169 |     @pytest.mark.parametrize("project", [Language.SWIFT], indirect=True)
170 |     def test_retrieve_content_around_line(self, project: Project) -> None:
171 |         """Test retrieve_content_around_line functionality with various scenarios."""
172 |         file_path = os.path.join("src", "main.swift")
173 | 
174 |         # Scenario 1: Find Calculator class definition
175 |         calculator_line = None
176 |         for line_num in range(1, 50):  # Search first 50 lines
177 |             try:
178 |                 line_content = project.retrieve_content_around_line(file_path, line_num)
179 |                 if line_content.lines and "class Calculator" in line_content.lines[0].line_content:
180 |                     calculator_line = line_num
181 |                     break
182 |             except:
183 |                 continue
184 | 
185 |         assert calculator_line is not None, "Calculator class not found"
186 |         line_calc = project.retrieve_content_around_line(file_path, calculator_line)
187 |         assert len(line_calc.lines) == 1
188 |         assert "class Calculator" in line_calc.lines[0].line_content
189 |         assert line_calc.lines[0].line_number == calculator_line
190 |         assert line_calc.lines[0].match_type == LineType.MATCH
191 | 
192 |         # Scenario 2: Context above and below Calculator class
193 |         with_context_around_calculator = project.retrieve_content_around_line(file_path, calculator_line, 2, 2)
194 |         assert len(with_context_around_calculator.lines) == 5
195 |         assert "class Calculator" in with_context_around_calculator.matched_lines[0].line_content
196 |         assert with_context_around_calculator.num_matched_lines == 1
197 | 
198 |         # Scenario 3: Search for struct definitions
199 |         struct_pattern = r"struct\s+\w+"
200 |         matches = project.search_source_files_for_pattern(struct_pattern)
201 |         assert len(matches) > 0, "Should find struct definitions"
202 |         # Should find User struct
203 |         user_matches = [m for m in matches if "User" in str(m)]
204 |         assert len(user_matches) > 0, "Should find User struct"
205 | 
206 |         # Scenario 4: Search for class definitions
207 |         class_pattern = r"class\s+\w+"
208 |         matches = project.search_source_files_for_pattern(class_pattern)
209 |         assert len(matches) > 0, "Should find class definitions"
210 |         # Should find Calculator and Circle classes
211 |         calculator_matches = [m for m in matches if "Calculator" in str(m)]
212 |         circle_matches = [m for m in matches if "Circle" in str(m)]
213 |         assert len(calculator_matches) > 0, "Should find Calculator class"
214 |         assert len(circle_matches) > 0, "Should find Circle class"
215 | 
216 |         # Scenario 5: Search for enum definitions
217 |         enum_pattern = r"enum\s+\w+"
218 |         matches = project.search_source_files_for_pattern(enum_pattern)
219 |         assert len(matches) > 0, "Should find enum definitions"
220 |         # Should find Status enum
221 |         status_matches = [m for m in matches if "Status" in str(m)]
222 |         assert len(status_matches) > 0, "Should find Status enum"
223 | 
```

--------------------------------------------------------------------------------
/test/resources/repos/python/test_repo/custom_test/advanced_features.py:
--------------------------------------------------------------------------------

```python
  1 | """
  2 | Advanced Python features for testing code parsing capabilities.
  3 | 
  4 | This module contains various advanced Python code patterns to ensure
  5 | that the code parser can correctly handle them.
  6 | """
  7 | 
  8 | from __future__ import annotations
  9 | 
 10 | import asyncio
 11 | import os
 12 | from abc import ABC, abstractmethod
 13 | from collections.abc import Callable, Iterable
 14 | from contextlib import contextmanager
 15 | from dataclasses import dataclass, field
 16 | from enum import Enum, Flag, IntEnum, auto
 17 | from functools import wraps
 18 | from typing import (
 19 |     Annotated,
 20 |     Any,
 21 |     ClassVar,
 22 |     Final,
 23 |     Generic,
 24 |     Literal,
 25 |     NewType,
 26 |     Protocol,
 27 |     TypedDict,
 28 |     TypeVar,
 29 | )
 30 | 
 31 | # Type variables for generics
 32 | T = TypeVar("T")
 33 | K = TypeVar("K")
 34 | V = TypeVar("V")
 35 | 
 36 | # Custom types using NewType
 37 | UserId = NewType("UserId", str)
 38 | ItemId = NewType("ItemId", int)
 39 | 
 40 | # Type aliases
 41 | PathLike = str | os.PathLike
 42 | JsonDict = dict[str, Any]
 43 | 
 44 | 
 45 | # TypedDict
 46 | class UserDict(TypedDict):
 47 |     """TypedDict representing user data."""
 48 | 
 49 |     id: str
 50 |     name: str
 51 |     email: str
 52 |     age: int
 53 |     roles: list[str]
 54 | 
 55 | 
 56 | # Enums
 57 | class Status(Enum):
 58 |     """Status enum for process states."""
 59 | 
 60 |     PENDING = "pending"
 61 |     RUNNING = "running"
 62 |     COMPLETED = "completed"
 63 |     FAILED = "failed"
 64 | 
 65 | 
 66 | class Priority(IntEnum):
 67 |     """Priority levels for tasks."""
 68 | 
 69 |     LOW = 0
 70 |     MEDIUM = 5
 71 |     HIGH = 10
 72 |     CRITICAL = auto()
 73 | 
 74 | 
 75 | class Permissions(Flag):
 76 |     """Permission flags for access control."""
 77 | 
 78 |     NONE = 0
 79 |     READ = 1
 80 |     WRITE = 2
 81 |     EXECUTE = 4
 82 |     ALL = READ | WRITE | EXECUTE
 83 | 
 84 | 
 85 | # Abstract class with various method types
 86 | class BaseProcessor(ABC):
 87 |     """Abstract base class for processors with various method patterns."""
 88 | 
 89 |     # Class variable with type annotation
 90 |     DEFAULT_TIMEOUT: ClassVar[int] = 30
 91 |     MAX_RETRIES: Final[int] = 3
 92 | 
 93 |     def __init__(self, name: str, config: dict[str, Any] | None = None):
 94 |         self.name = name
 95 |         self.config = config or {}
 96 |         self._status = Status.PENDING
 97 | 
 98 |     @property
 99 |     def status(self) -> Status:
100 |         """Status property getter."""
101 |         return self._status
102 | 
103 |     @status.setter
104 |     def status(self, value: Status) -> None:
105 |         """Status property setter."""
106 |         if not isinstance(value, Status):
107 |             raise TypeError(f"Expected Status enum, got {type(value)}")
108 |         self._status = value
109 | 
110 |     @abstractmethod
111 |     def process(self, data: Any) -> Any:
112 |         """Process the input data."""
113 | 
114 |     @classmethod
115 |     def create_from_config(cls, config: dict[str, Any]) -> BaseProcessor:
116 |         """Factory classmethod."""
117 |         name = config.get("name", "default")
118 |         return cls(name=name, config=config)
119 | 
120 |     @staticmethod
121 |     def validate_config(config: dict[str, Any]) -> bool:
122 |         """Static method for config validation."""
123 |         return "name" in config
124 | 
125 |     def __str__(self) -> str:
126 |         return f"{self.__class__.__name__}(name={self.name})"
127 | 
128 | 
129 | # Concrete implementation of abstract class
130 | class DataProcessor(BaseProcessor):
131 |     """Concrete implementation of BaseProcessor."""
132 | 
133 |     def __init__(self, name: str, config: dict[str, Any] | None = None, priority: Priority = Priority.MEDIUM):
134 |         super().__init__(name, config)
135 |         self.priority = priority
136 |         self.processed_count = 0
137 | 
138 |     def process(self, data: Any) -> Any:
139 |         """Process the data."""
140 | 
141 |         # Nested function definition
142 |         def transform(item: Any) -> Any:
143 |             # Nested function within a nested function
144 |             def apply_rules(x: Any) -> Any:
145 |                 return x
146 | 
147 |             return apply_rules(item)
148 | 
149 |         # Lambda function
150 |         normalize = lambda x: x / max(x) if hasattr(x, "__iter__") and len(x) > 0 else x  # noqa: F841
151 | 
152 |         result = transform(data)
153 |         self.processed_count += 1
154 |         return result
155 | 
156 |     # Method with complex type hints
157 |     def batch_process(self, items: list[str | dict[str, Any] | tuple[Any, ...]]) -> dict[str, list[Any]]:
158 |         """Process multiple items in a batch."""
159 |         results: dict[str, list[Any]] = {"success": [], "error": []}
160 | 
161 |         for item in items:
162 |             try:
163 |                 result = self.process(item)
164 |                 results["success"].append(result)
165 |             except Exception as e:
166 |                 results["error"].append((item, str(e)))
167 | 
168 |         return results
169 | 
170 |     # Generator method
171 |     def process_stream(self, data_stream: Iterable[T]) -> Iterable[T]:
172 |         """Process a stream of data, yielding results as they're processed."""
173 |         for item in data_stream:
174 |             yield self.process(item)
175 | 
176 |     # Async method
177 |     async def async_process(self, data: Any) -> Any:
178 |         """Process data asynchronously."""
179 |         await asyncio.sleep(0.1)
180 |         return self.process(data)
181 | 
182 |     # Method with function parameters
183 |     def apply_transform(self, data: Any, transform_func: Callable[[Any], Any]) -> Any:
184 |         """Apply a custom transform function to the data."""
185 |         return transform_func(data)
186 | 
187 | 
188 | # Dataclass
189 | @dataclass
190 | class Task:
191 |     """Task dataclass for tracking work items."""
192 | 
193 |     id: str
194 |     name: str
195 |     status: Status = Status.PENDING
196 |     priority: Priority = Priority.MEDIUM
197 |     metadata: dict[str, Any] = field(default_factory=dict)
198 |     dependencies: list[str] = field(default_factory=list)
199 |     created_at: float | None = None
200 | 
201 |     def __post_init__(self):
202 |         if self.created_at is None:
203 |             import time
204 | 
205 |             self.created_at = time.time()
206 | 
207 |     def has_dependencies(self) -> bool:
208 |         """Check if task has dependencies."""
209 |         return len(self.dependencies) > 0
210 | 
211 | 
212 | # Generic class
213 | class Repository(Generic[T]):
214 |     """Generic repository for managing collections of items."""
215 | 
216 |     def __init__(self):
217 |         self.items: dict[str, T] = {}
218 | 
219 |     def add(self, id: str, item: T) -> None:
220 |         """Add an item to the repository."""
221 |         self.items[id] = item
222 | 
223 |     def get(self, id: str) -> T | None:
224 |         """Get an item by id."""
225 |         return self.items.get(id)
226 | 
227 |     def remove(self, id: str) -> bool:
228 |         """Remove an item by id."""
229 |         if id in self.items:
230 |             del self.items[id]
231 |             return True
232 |         return False
233 | 
234 |     def list_all(self) -> list[T]:
235 |         """List all items."""
236 |         return list(self.items.values())
237 | 
238 | 
239 | # Type with Protocol (structural subtyping)
240 | class Serializable(Protocol):
241 |     """Protocol for objects that can be serialized to dict."""
242 | 
243 |     def to_dict(self) -> dict[str, Any]: ...
244 | 
245 | 
246 | #
247 | # Decorator function
248 | def log_execution(func: Callable) -> Callable:
249 |     """Decorator to log function execution."""
250 | 
251 |     @wraps(func)
252 |     def wrapper(*args, **kwargs):
253 |         print(f"Executing {func.__name__}")
254 |         result = func(*args, **kwargs)
255 |         print(f"Finished {func.__name__}")
256 |         return result
257 | 
258 |     return wrapper
259 | 
260 | 
261 | # Context manager
262 | @contextmanager
263 | def transaction_context(name: str = "default"):
264 |     """Context manager for transaction-like operations."""
265 |     print(f"Starting transaction: {name}")
266 |     try:
267 |         yield name
268 |         print(f"Committing transaction: {name}")
269 |     except Exception as e:
270 |         print(f"Rolling back transaction: {name}, error: {e}")
271 |         raise
272 | 
273 | 
274 | # Function with complex parameter annotations
275 | def advanced_search(
276 |     query: str,
277 |     filters: dict[str, Any] | None = None,
278 |     sort_by: str | None = None,
279 |     sort_order: Literal["asc", "desc"] = "asc",
280 |     page: int = 1,
281 |     page_size: int = 10,
282 |     include_metadata: bool = False,
283 | ) -> tuple[list[dict[str, Any]], int]:
284 |     """
285 |     Advanced search function with many parameters.
286 | 
287 |     Returns search results and total count.
288 |     """
289 |     results = []
290 |     total = 0
291 |     # Simulating search functionality
292 |     return results, total
293 | 
294 | 
295 | # Class with nested classes
296 | class OuterClass:
297 |     """Outer class with nested classes and methods."""
298 | 
299 |     class NestedClass:
300 |         """Nested class inside OuterClass."""
301 | 
302 |         def __init__(self, value: Any):
303 |             self.value = value
304 | 
305 |         def get_value(self) -> Any:
306 |             """Get the stored value."""
307 |             return self.value
308 | 
309 |         class DeeplyNestedClass:
310 |             """Deeply nested class for testing parser depth capabilities."""
311 | 
312 |             def deep_method(self) -> str:
313 |                 """Method in deeply nested class."""
314 |                 return "deep"
315 | 
316 |     def __init__(self, name: str):
317 |         self.name = name
318 |         self.nested = self.NestedClass(name)
319 | 
320 |     def get_nested(self) -> NestedClass:
321 |         """Get the nested class instance."""
322 |         return self.nested
323 | 
324 |     # Method with nested functions
325 |     def process_with_nested(self, data: Any) -> Any:
326 |         """Method demonstrating deeply nested function definitions."""
327 | 
328 |         def level1(x: Any) -> Any:
329 |             """First level nested function."""
330 | 
331 |             def level2(y: Any) -> Any:
332 |                 """Second level nested function."""
333 | 
334 |                 def level3(z: Any) -> Any:
335 |                     """Third level nested function."""
336 |                     return z
337 | 
338 |                 return level3(y)
339 | 
340 |             return level2(x)
341 | 
342 |         return level1(data)
343 | 
344 | 
345 | # Metaclass example
346 | class Meta(type):
347 |     """Metaclass example for testing advanced class handling."""
348 | 
349 |     def __new__(mcs, name, bases, attrs):
350 |         print(f"Creating class: {name}")
351 |         return super().__new__(mcs, name, bases, attrs)
352 | 
353 |     def __init__(cls, name, bases, attrs):
354 |         print(f"Initializing class: {name}")
355 |         super().__init__(name, bases, attrs)
356 | 
357 | 
358 | class WithMeta(metaclass=Meta):
359 |     """Class that uses a metaclass."""
360 | 
361 |     def __init__(self, value: str):
362 |         self.value = value
363 | 
364 | 
365 | # Factory function that creates and returns instances
366 | def create_processor(processor_type: str, name: str, config: dict[str, Any] | None = None) -> BaseProcessor:
367 |     """Factory function that creates and returns processor instances."""
368 |     if processor_type == "data":
369 |         return DataProcessor(name, config)
370 |     else:
371 |         raise ValueError(f"Unknown processor type: {processor_type}")
372 | 
373 | 
374 | # Nested decorator example
375 | def with_retry(max_retries: int = 3):
376 |     """Decorator factory that creates a retry decorator."""
377 | 
378 |     def decorator(func):
379 |         @wraps(func)
380 |         def wrapper(*args, **kwargs):
381 |             for attempt in range(max_retries):
382 |                 try:
383 |                     return func(*args, **kwargs)
384 |                 except Exception as e:
385 |                     if attempt == max_retries - 1:
386 |                         raise
387 |                     print(f"Retrying {func.__name__} after error: {e}")
388 |             return None
389 | 
390 |         return wrapper
391 | 
392 |     return decorator
393 | 
394 | 
395 | @with_retry(max_retries=5)
396 | def unreliable_operation(data: Any) -> Any:
397 |     """Function that might fail and uses the retry decorator."""
398 |     import random
399 | 
400 |     if random.random() < 0.5:
401 |         raise RuntimeError("Random failure")
402 |     return data
403 | 
404 | 
405 | # Complex type annotation with Annotated
406 | ValidatedString = Annotated[str, "A string that has been validated"]
407 | PositiveInt = Annotated[int, lambda x: x > 0]
408 | 
409 | 
410 | def process_validated_data(data: ValidatedString, count: PositiveInt) -> list[str]:
411 |     """Process data with Annotated type hints."""
412 |     return [data] * count
413 | 
414 | 
415 | # Example of forward references and string literals in type annotations
416 | class TreeNode:
417 |     """Tree node with forward reference to itself in annotations."""
418 | 
419 |     def __init__(self, value: Any):
420 |         self.value = value
421 |         self.children: list[TreeNode] = []
422 | 
423 |     def add_child(self, child: TreeNode) -> None:
424 |         """Add a child node."""
425 |         self.children.append(child)
426 | 
427 |     def traverse(self) -> list[Any]:
428 |         """Traverse the tree and return all values."""
429 |         result = [self.value]
430 |         for child in self.children:
431 |             result.extend(child.traverse())
432 |         return result
433 | 
434 | 
435 | # Main entry point for demonstration
436 | def main() -> None:
437 |     """Main function demonstrating the use of various features."""
438 |     # Create processor
439 |     processor = DataProcessor("test-processor", {"debug": True})
440 | 
441 |     # Create tasks
442 |     task1 = Task(id="task1", name="First Task")
443 |     task2 = Task(id="task2", name="Second Task", dependencies=["task1"])
444 | 
445 |     # Create repository
446 |     repo: Repository[Task] = Repository()
447 |     repo.add(task1.id, task1)
448 |     repo.add(task2.id, task2)
449 | 
450 |     # Process some data
451 |     data = [1, 2, 3, 4, 5]
452 |     result = processor.process(data)  # noqa: F841
453 | 
454 |     # Use context manager
455 |     with transaction_context("main"):
456 |         # Process more data
457 |         for task in repo.list_all():
458 |             processor.process(task.name)
459 | 
460 |     # Use advanced search
461 |     _results, _total = advanced_search(query="test", filters={"status": Status.PENDING}, sort_by="priority", page=1, include_metadata=True)
462 | 
463 |     # Create a tree
464 |     root = TreeNode("root")
465 |     child1 = TreeNode("child1")
466 |     child2 = TreeNode("child2")
467 |     root.add_child(child1)
468 |     root.add_child(child2)
469 |     child1.add_child(TreeNode("grandchild1"))
470 | 
471 |     print("Done!")
472 | 
473 | 
474 | if __name__ == "__main__":
475 |     main()
476 | 
```

--------------------------------------------------------------------------------
/src/solidlsp/ls_types.py:
--------------------------------------------------------------------------------

```python
  1 | """
  2 | Defines wrapper objects around the types returned by LSP to ensure decoupling between LSP versions and multilspy
  3 | """
  4 | 
  5 | from __future__ import annotations
  6 | 
  7 | from enum import Enum, IntEnum
  8 | from typing import NotRequired, Union
  9 | 
 10 | from typing_extensions import TypedDict
 11 | 
 12 | URI = str
 13 | DocumentUri = str
 14 | Uint = int
 15 | RegExp = str
 16 | 
 17 | 
 18 | class Position(TypedDict):
 19 |     r"""Position in a text document expressed as zero-based line and character
 20 |     offset. Prior to 3.17 the offsets were always based on a UTF-16 string
 21 |     representation. So a string of the form `a𐐀b` the character offset of the
 22 |     character `a` is 0, the character offset of `𐐀` is 1 and the character
 23 |     offset of b is 3 since `𐐀` is represented using two code units in UTF-16.
 24 |     Since 3.17 clients and servers can agree on a different string encoding
 25 |     representation (e.g. UTF-8). The client announces it's supported encoding
 26 |     via the client capability [`general.positionEncodings`](#clientCapabilities).
 27 |     The value is an array of position encodings the client supports, with
 28 |     decreasing preference (e.g. the encoding at index `0` is the most preferred
 29 |     one). To stay backwards compatible the only mandatory encoding is UTF-16
 30 |     represented via the string `utf-16`. The server can pick one of the
 31 |     encodings offered by the client and signals that encoding back to the
 32 |     client via the initialize result's property
 33 |     [`capabilities.positionEncoding`](#serverCapabilities). If the string value
 34 |     `utf-16` is missing from the client's capability `general.positionEncodings`
 35 |     servers can safely assume that the client supports UTF-16. If the server
 36 |     omits the position encoding in its initialize result the encoding defaults
 37 |     to the string value `utf-16`. Implementation considerations: since the
 38 |     conversion from one encoding into another requires the content of the
 39 |     file / line the conversion is best done where the file is read which is
 40 |     usually on the server side.
 41 | 
 42 |     Positions are line end character agnostic. So you can not specify a position
 43 |     that denotes `\r|\n` or `\n|` where `|` represents the character offset.
 44 | 
 45 |     @since 3.17.0 - support for negotiated position encoding.
 46 |     """
 47 | 
 48 |     line: Uint
 49 |     """ Line position in a document (zero-based).
 50 | 
 51 |     If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document.
 52 |     If a line number is negative, it defaults to 0. """
 53 |     character: Uint
 54 |     """ Character offset on a line in a document (zero-based).
 55 | 
 56 |     The meaning of this offset is determined by the negotiated
 57 |     `PositionEncodingKind`.
 58 | 
 59 |     If the character value is greater than the line length it defaults back to the
 60 |     line length. """
 61 | 
 62 | 
 63 | class Range(TypedDict):
 64 |     """A range in a text document expressed as (zero-based) start and end positions.
 65 | 
 66 |     If you want to specify a range that contains a line including the line ending
 67 |     character(s) then use an end position denoting the start of the next line.
 68 |     For example:
 69 |     ```ts
 70 |     {
 71 |         start: { line: 5, character: 23 }
 72 |         end : { line 6, character : 0 }
 73 |     }
 74 |     ```
 75 |     """
 76 | 
 77 |     start: Position
 78 |     """ The range's start position. """
 79 |     end: Position
 80 |     """ The range's end position. """
 81 | 
 82 | 
 83 | class Location(TypedDict):
 84 |     """Represents a location inside a resource, such as a line
 85 |     inside a text file.
 86 |     """
 87 | 
 88 |     uri: DocumentUri
 89 |     range: Range
 90 |     absolutePath: str
 91 |     relativePath: str | None
 92 | 
 93 | 
 94 | class CompletionItemKind(IntEnum):
 95 |     """The kind of a completion entry."""
 96 | 
 97 |     Text = 1
 98 |     Method = 2
 99 |     Function = 3
100 |     Constructor = 4
101 |     Field = 5
102 |     Variable = 6
103 |     Class = 7
104 |     Interface = 8
105 |     Module = 9
106 |     Property = 10
107 |     Unit = 11
108 |     Value = 12
109 |     Enum = 13
110 |     Keyword = 14
111 |     Snippet = 15
112 |     Color = 16
113 |     File = 17
114 |     Reference = 18
115 |     Folder = 19
116 |     EnumMember = 20
117 |     Constant = 21
118 |     Struct = 22
119 |     Event = 23
120 |     Operator = 24
121 |     TypeParameter = 25
122 | 
123 | 
124 | class CompletionItem(TypedDict):
125 |     """A completion item represents a text snippet that is
126 |     proposed to complete text that is being typed.
127 |     """
128 | 
129 |     completionText: str
130 |     """ The completionText of this completion item.
131 | 
132 |     The completionText property is also by default the text that
133 |     is inserted when selecting this completion."""
134 | 
135 |     kind: CompletionItemKind
136 |     """ The kind of this completion item. Based of the kind
137 |     an icon is chosen by the editor. """
138 | 
139 |     detail: NotRequired[str]
140 |     """ A human-readable string with additional information
141 |     about this item, like type or symbol information. """
142 | 
143 | 
144 | class SymbolKind(IntEnum):
145 |     """A symbol kind."""
146 | 
147 |     File = 1
148 |     Module = 2
149 |     Namespace = 3
150 |     Package = 4
151 |     Class = 5
152 |     Method = 6
153 |     Property = 7
154 |     Field = 8
155 |     Constructor = 9
156 |     Enum = 10
157 |     Interface = 11
158 |     Function = 12
159 |     Variable = 13
160 |     Constant = 14
161 |     String = 15
162 |     Number = 16
163 |     Boolean = 17
164 |     Array = 18
165 |     Object = 19
166 |     Key = 20
167 |     Null = 21
168 |     EnumMember = 22
169 |     Struct = 23
170 |     Event = 24
171 |     Operator = 25
172 |     TypeParameter = 26
173 | 
174 | 
175 | class SymbolTag(IntEnum):
176 |     """Symbol tags are extra annotations that tweak the rendering of a symbol.
177 | 
178 |     @since 3.16
179 |     """
180 | 
181 |     Deprecated = 1
182 |     """ Render a symbol as obsolete, usually using a strike-out. """
183 | 
184 | 
185 | class UnifiedSymbolInformation(TypedDict):
186 |     """Represents information about programming constructs like variables, classes,
187 |     interfaces etc.
188 |     """
189 | 
190 |     deprecated: NotRequired[bool]
191 |     """ Indicates if this symbol is deprecated.
192 | 
193 |     @deprecated Use tags instead """
194 |     location: NotRequired[Location]
195 |     """ The location of this symbol. The location's range is used by a tool
196 |     to reveal the location in the editor. If the symbol is selected in the
197 |     tool the range's start information is used to position the cursor. So
198 |     the range usually spans more than the actual symbol's name and does
199 |     normally include things like visibility modifiers.
200 | 
201 |     The range doesn't have to denote a node range in the sense of an abstract
202 |     syntax tree. It can therefore not be used to re-construct a hierarchy of
203 |     the symbols. """
204 |     name: str
205 |     """ The name of this symbol. """
206 |     kind: SymbolKind
207 |     """ The kind of this symbol. """
208 |     tags: NotRequired[list[SymbolTag]]
209 |     """ Tags for this symbol.
210 | 
211 |     @since 3.16.0 """
212 |     containerName: NotRequired[str]
213 |     """ The name of the symbol containing this symbol. This information is for
214 |     user interface purposes (e.g. to render a qualifier in the user interface
215 |     if necessary). It can't be used to re-infer a hierarchy for the document
216 |     symbols. 
217 |     
218 |     Note: within Serena, the parent attribute was added and should be used instead. 
219 |     Most LS don't provide containerName.
220 |     """
221 | 
222 |     detail: NotRequired[str]
223 |     """ More detail for this symbol, e.g the signature of a function. """
224 | 
225 |     range: NotRequired[Range]
226 |     """ The range enclosing this symbol not including leading/trailing whitespace but everything else
227 |     like comments. This information is typically used to determine if the clients cursor is
228 |     inside the symbol to reveal in the symbol in the UI. """
229 |     selectionRange: NotRequired[Range]
230 |     """ The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
231 |     Must be contained by the `range`. """
232 | 
233 |     body: NotRequired[str]
234 |     """ The body of the symbol. """
235 | 
236 |     children: list[UnifiedSymbolInformation]
237 |     """ The children of the symbol. 
238 |     Added to be compatible with `lsp_types.DocumentSymbol`, 
239 |     since it is sometimes useful to have the children of the symbol as a user-facing feature."""
240 | 
241 |     parent: NotRequired[UnifiedSymbolInformation | None]
242 |     """The parent of the symbol, if there is any. Added with Serena, not part of the LSP.
243 |     All symbols except the root packages will have a parent.
244 |     """
245 | 
246 | 
247 | class MarkupKind(Enum):
248 |     """Describes the content type that a client supports in various
249 |     result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
250 | 
251 |     Please note that `MarkupKinds` must not start with a `$`. This kinds
252 |     are reserved for internal usage.
253 |     """
254 | 
255 |     PlainText = "plaintext"
256 |     """ Plain text is supported as a content format """
257 |     Markdown = "markdown"
258 |     """ Markdown is supported as a content format """
259 | 
260 | 
261 | class __MarkedString_Type_1(TypedDict):
262 |     language: str
263 |     value: str
264 | 
265 | 
266 | MarkedString = Union[str, "__MarkedString_Type_1"]
267 | """ MarkedString can be used to render human readable text. It is either a markdown string
268 | or a code-block that provides a language and a code snippet. The language identifier
269 | is semantically equal to the optional language identifier in fenced code blocks in GitHub
270 | issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
271 | 
272 | The pair of a language and a value is an equivalent to markdown:
273 | ```${language}
274 | ${value}
275 | ```
276 | 
277 | Note that markdown strings will be sanitized - that means html will be escaped.
278 | @deprecated use MarkupContent instead. """
279 | 
280 | 
281 | class MarkupContent(TypedDict):
282 |     r"""A `MarkupContent` literal represents a string value which content is interpreted base on its
283 |     kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds.
284 | 
285 |     If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues.
286 |     See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
287 | 
288 |     Here is an example how such a string can be constructed using JavaScript / TypeScript:
289 |     ```ts
290 |     let markdown: MarkdownContent = {
291 |      kind: MarkupKind.Markdown,
292 |      value: [
293 |        '# Header',
294 |        'Some text',
295 |        '```typescript',
296 |        'someCode();',
297 |        '```'
298 |      ].join('\n')
299 |     };
300 |     ```
301 | 
302 |     *Please Note* that clients might sanitize the return markdown. A client could decide to
303 |     remove HTML from the markdown to avoid script execution.
304 |     """
305 | 
306 |     kind: MarkupKind
307 |     """ The type of the Markup """
308 |     value: str
309 |     """ The content itself """
310 | 
311 | 
312 | class Hover(TypedDict):
313 |     """The result of a hover request."""
314 | 
315 |     contents: MarkupContent | MarkedString | list[MarkedString]
316 |     """ The hover's content """
317 |     range: NotRequired[Range]
318 |     """ An optional range inside the text document that is used to
319 |     visualize the hover, e.g. by changing the background color. """
320 | 
321 | 
322 | class DiagnosticsSeverity(IntEnum):
323 |     ERROR = 1
324 |     WARNING = 2
325 |     INFORMATION = 3
326 |     HINT = 4
327 | 
328 | 
329 | class TextDocumentIdentifier(TypedDict):
330 |     """A literal to identify a text document in the client."""
331 | 
332 |     uri: DocumentUri
333 |     """ The text document's uri. """
334 | 
335 | 
336 | class TextEdit(TypedDict):
337 |     """A textual edit applicable to a text document."""
338 | 
339 |     range: Range
340 |     """ The range of the text document to be manipulated. """
341 |     newText: str
342 |     """ The string to be inserted. For delete operations use an empty string. """
343 | 
344 | 
345 | class WorkspaceEdit(TypedDict):
346 |     """A workspace edit represents changes to many resources managed in the workspace."""
347 | 
348 |     changes: NotRequired[dict[DocumentUri, list[TextEdit]]]
349 |     """ Holds changes to existing resources. """
350 |     documentChanges: NotRequired[list]
351 |     """ Document changes array for versioned edits. """
352 | 
353 | 
354 | class RenameParams(TypedDict):
355 |     """The parameters of a RenameRequest."""
356 | 
357 |     textDocument: TextDocumentIdentifier
358 |     """ The document to rename. """
359 |     position: Position
360 |     """ The position at which this request was sent. """
361 |     newName: str
362 |     """ The new name of the symbol. """
363 | 
364 | 
365 | class Diagnostic(TypedDict):
366 |     """Diagnostic information for a text document."""
367 | 
368 |     uri: DocumentUri
369 |     """ The URI of the text document to which the diagnostics apply. """
370 |     range: Range
371 |     """ The range of the text document to which the diagnostics apply. """
372 |     severity: NotRequired[DiagnosticsSeverity]
373 |     """ The severity of the diagnostic. """
374 |     message: str
375 |     """ The diagnostic message. """
376 |     code: str
377 |     """ The code of the diagnostic. """
378 |     source: NotRequired[str]
379 |     """ The source of the diagnostic, e.g. the name of the tool that produced it. """
380 | 
381 | 
382 | def extract_text_edits(workspace_edit: WorkspaceEdit) -> dict[str, list[TextEdit]]:
383 |     """
384 |     Extracts the text changes from a WorkspaceEdit object.
385 | 
386 |     Args:
387 |         workspace_edit (WorkspaceEdit): The WorkspaceEdit object to extract text changes from.
388 | 
389 |     Returns:
390 |         dict[str, list[TextEdit]]: A dictionary mapping document URIs to lists of TextEdit objects.
391 | 
392 |     """
393 |     if "changes" in workspace_edit:
394 |         return workspace_edit["changes"]
395 |     elif "documentChanges" in workspace_edit:
396 |         changes = {}
397 |         for change in workspace_edit["documentChanges"]:
398 |             if "textDocument" in change and "edits" in change:
399 |                 uri = change["textDocument"]["uri"]
400 |                 edits = change["edits"]
401 |                 changes[uri] = edits
402 |         return changes
403 |     else:
404 |         raise f"Invalid WorkspaceEdit (expected 'changes' or 'documentChanges' key):\n{workspace_edit}"
405 | 
```

--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------

```markdown
  1 | # latest
  2 | Status of the `main` branch. Changes prior to the next official version change will appear here.
  3 | 
  4 | * Language support:
  5 |   * **Add support for Elm** via @elm-tooling/elm-language-server (automatically downloads if not installed; requires Elm compiler)
  6 |   * **Add support for Perl** via Perl::LanguageServer with LSP integration for .pl, .pm, and .t files
  7 |   * **Add support for AL (Application Language)** for Microsoft Dynamics 365 Business Central development. Requires VS Code AL extension (ms-dynamics-smb.al).
  8 |   * **Add support for R** via the R languageserver package with LSP integration, performance optimizations, and fallback symbol extraction
  9 |   * **Add support for Zig** via ZLS (cross-file references may not fully work on Windows)
 10 |   * **Add support for Lua** via lua-language-server
 11 |   * **Add support for Nix** requires nixd installation (Windows not supported)
 12 |   * **Dart now officially supported**: Dart was always working, but now tests were added, and it is promoted to "officially supported"
 13 |   * **Rust now uses already installed rustup**: The rust-analyzer is no longer bundled with Serena. Instead, it uses the rust-analyzer from your Rust toolchain managed by rustup. This ensures compatibility with your Rust version and eliminates outdated bundled binaries.
 14 |   * **Kotlin now officially supported**: We now use the official Kotlin LS, tests run through and performance is good, even though the LS is in an early development stage. 
 15 |   * **Add support for Erlang** experimental, may hang or be slow, uses the recently archived [erlang_ls](https://github.com/erlang-ls/erlang_ls)
 16 |   * **Ruby dual language server support**: Added ruby-lsp as the modern primary Ruby language server. Solargraph remains available as an experimental legacy option. ruby-lsp supports both .rb and .erb files, while Solargraph supports .rb files only.
 17 | 
 18 | * Client support:
 19 |   * New mode `oaicompat-agent` and extensions in the openai tool compatibility, **permitting Serena to work with llama.cpp**
 20 | 
 21 | * General:
 22 |   * Various fixes related to indexing, special paths and determation of ignored paths
 23 |   * Decreased `TOOL_DEFAULT_MAX_ANSWER_LENGTH` to be in accordance with (below) typical max-tokens configurations
 24 |   * Allow passing language server specific settings through `ls_specific_settings` field (in `serena_config.yml`)
 25 | 
 26 | # 0.1.4
 27 | 
 28 | ## Summary
 29 | 
 30 | This likely is the last release before the stable version 1.0.0 which will come together with the jetbrains IDE extension.
 31 | We release it for users who install Serena from a tag, since the last tag cannot be installed due to a breaking change in the mcp dependency (see #381).
 32 | 
 33 | Since the last release, several new languages were supported, and the Serena CLI and configurability were significantly extended.
 34 | We thank all external contributors who made a lot of the improvements possible!
 35 | 
 36 | * General:
 37 |   * **Initial instructions no longer need to be loaded by the user**
 38 |   * Significantly extended CLI
 39 |   * Removed `replace_regex` tool from `ide-assistant` and `codex` contexts.
 40 |     The current string replacement tool in Claude Code seems to be sufficiently efficient and is better
 41 |     integrated with the IDE. Users who want to enable `replace_regex` can do so by customizing the context.
 42 | 
 43 | * Configuration:
 44 |   * Simplify customization of modes and contexts, including CLI support.
 45 |   * Possibility to customize the system prompt and outputs of simple tools, including CLI support.
 46 |   * Possibility to override tool descriptions through the context YAML.
 47 |   * Prompt templates are now automatically adapted to the enabled tools.
 48 |   * Several tools are now excluded by default, need to be included explicitly.
 49 |   * New context for ChatGPT
 50 | 
 51 | * Language servers:
 52 |   * Reliably detect language server termination and propagate the respective error all the way
 53 |     back to the tool application, where an unexpected termination is handled by restarting the language server
 54 |     and subsequently retrying the tool application.
 55 |   * **Add support for Swift**
 56 |   * **Add support for Bash**
 57 |   * Enhance Solargraph (Ruby) integration
 58 |     * Automatic Rails project detection via config/application.rb, Rakefile, and Gemfile analysis
 59 |     * Ruby/Rails-specific exclude patterns for improved indexing performance (vendor/, .bundle/, tmp/, log/, coverage/)
 60 |     * Enhanced error handling with detailed diagnostics and Ruby manager-specific installation instructions (rbenv, RVM, asdf)
 61 |     * Improved LSP capability negotiation and analysis completion detection
 62 |     * Better Bundler and Solargraph installation error messages with clear resolution steps
 63 | 
 64 | Fixes:
 65 | * Ignore `.git` in check for ignored paths and improve performance of `find_all_non_ignored_files`
 66 | * Fix language server startup issues on Windows when using Claude Code (which was due to
 67 |   default shell reconfiguration imposed by Claude Code)
 68 | * Additional wait for initialization in C# language server before requesting references, allowing cross-file references to be found.
 69 | 
 70 | # 0.1.3
 71 | 
 72 | ## Summary
 73 | 
 74 | This is the first release of Serena to pypi. Since the last release, we have greatly improved 
 75 | stability and performance, as well as extended functionality, improved editing tools and included support for several new languages. 
 76 | 
 77 | * **Reduce the use of asyncio to a minimum**, improving stability and reducing the need for workarounds
 78 |    * Switch to newly developed fully synchronous LSP library `solidlsp` (derived from `multilspy`),
 79 |      removing our fork of `multilspy` (src/multilspy)
 80 |    * Switch from fastapi (which uses asyncio) to Flask in the Serena dashboard
 81 |    * The MCP server is the only asyncio-based component now, which resolves cross-component loop contamination,
 82 |      such that process isolation is no longer required.
 83 |      Neither are non-graceful shutdowns on Windows.
 84 | * **Improved editing tools**: The editing logic was simplified and improved, making it more robust.
 85 |    * The "minimal indentation" logic was removed, because LLMs did not understand it.
 86 |    * The logic for the insertion of empty lines was improved (mostly controlled by the LLM now)
 87 | * Add a task queue for the agent, which is executed in a separate and thread and
 88 |    * allows the language server to be initialized in the background, making the MCP server respond to requests
 89 |      immediately upon startup,
 90 |    * ensures that all tool executions are fully synchronized (executed linearly).
 91 | * `SearchForPatternTool`: Better default, extended parameters and description for restricting the search
 92 | * Language support:
 93 |    * Better support for C# by switching from `omnisharp` to Microsoft's official C# language server.
 94 |    * **Add support for Clojure, Elixir and Terraform. New language servers for C# and typescript.**
 95 |    * Experimental language server implementations can now be accessed by users through configuring the `language` field
 96 | * Configuration:
 97 |    * Add option `web_dashboard_open_on_launch` (allowing the dashboard to be enabled without opening a browser window) 
 98 |    * Add options `record_tool_usage_stats` and `token_count_estimator`
 99 |    * Serena config, modes and contexts can now be adjusted from the user's home directory.
100 |    * Extended CLI to help with configuration
101 | * Dashboard:
102 |   * Displaying tool usage statistics if enabled in the config
103 | 
104 | Fixes:
105 | * Fix `ExecuteShellCommandTool` and `GetCurrentConfigTool` hanging on Windows
106 | * Fix project activation by name via `--project` not working (was broken in previous release) 
107 | * Improve handling of indentation and newlines in symbolic editing tools
108 | * Fix `InsertAfterSymbolTool` failing for insertions at the end of a file that did not end with a newline
109 | * Fix `InsertBeforeSymbolTool` inserting in the wrong place in the absence of empty lines above the reference symbol
110 | * Fix `ReplaceSymbolBodyTool` changing whitespace before/after the symbol
111 | * Fix repository indexing not following links and catch exceptions during indexing, allowing indexing
112 |   to continue even if unexpected errors occur for individual files.
113 | * Fix `ImportError` in Ruby language server.
114 | * Fix some issues with gitignore matching and interpreting of regexes in `search_for_pattern` tool.
115 | 
116 | # 2025-06-20
117 | 
118 | * **Overhaul and major improvement of editing tools!**
119 |   This represents a very important change in Serena. Symbols can now be addressed by their `name_path` (including nested ones)
120 |   and we introduced a regex-based replaced tools. We tuned the prompts and tested the new editing mechanism.
121 |   It is much more reliable, flexible, and at the same time uses fewer tokens.
122 |   The line-replacement tools are disabled by default and deprecated, we will likely remove them soon.
123 | * **Better multi-project support and zero-config setup**: We significantly simplified the config setup, you no longer need to manually
124 |   create `project.yaml` for each project. Project activation is now always available. 
125 |   Any project can now be activated by just asking the LLM to do so and passing the path to a repo.
126 | * Dashboard as web app and possibility to shut down Serena from it (or the old log GUI).
127 | * Possibility to index your project beforehand, accelerating Serena's tools.
128 | * Initial prompt for project supported (has to be added manually for the moment)
129 | * Massive performance improvement of pattern search tool
130 | * Use **process isolation** to fix stability issues and deadlocks (see #170). 
131 |   This uses separate process for the MCP server, the Serena agent and the dashboard in order to fix asyncio-related issues.
132 | 
133 | # 2025-05-24
134 | 
135 | * Important new feature: **configurability of mode and context**, allowing better integration in a variety of clients.
136 |   See corresponding section in readme - Serena can now be integrated in IDE assistants in a more productive way. 
137 |   You can now also do things like switching to one-shot planning mode, ask to plan something (which will create a memory),
138 |   then switch to interactive editing mode in the next conversation and work through the plan read from the memory.
139 | * Some improvements to prompts.
140 | 
141 | # 2025-05-21
142 | 
143 | **Significant improvement in symbol finding!**
144 | 
145 | * Serena core:
146 |     * `FindSymbolTool` now can look for symbols by specifying paths to them, not just the symbol name
147 | * Language Servers:
148 |     * Fixed `gopls` initialization
149 |     * Symbols retrieved through the symbol tree or through overview methods now are linked to their parents
150 | 
151 | 
152 | # 2025-05-19
153 | 
154 | * Serena core:
155 |     * Bugfix in `FindSymbolTool` (a bug fixed in LS)
156 |     * Fix in `ListDirTool`: Do not ignore files with extensions not understood by the language server, only skip ignored directories
157 |       (error introduced in previous version)
158 |     * Merged the two overview tools (for directories and files) into a single one: `GetSymbolsOverviewTool`
159 |     * One-click setup for Cline enabled
160 |     * `SearchForPatternTool` can now (optionally) search in the entire project
161 |     * New tool `RestartLanguageServerTool` for restarting the language server (in case of other sources of editing apart from Serena)
162 |     * Fix `CheckOnboardingPerformedTool`:
163 |         * Tool description was incompatible with project change
164 |         * Returned result was not as useful as it could be (now added list of memories)
165 | 
166 | * Language Servers:
167 |     * Add further file extensions considered by the language servers for Python (.pyi), JavaScript (.jsx) and TypeScript (.tsx, .jsx)
168 |     * Updated multilspy, adding support for Kotlin, Dart and C/C++ and several improvements.
169 |     * Added support for PHP
170 |     
171 | 
172 | # 2025-04-07
173 | 
174 | > **Breaking Config Changes**: make sure to set `ignore_all_files_in_gitignore`, remove `ignore_dirs`
175 | >  and (optionally) set `ignore_paths` in your project configs. See [updated config template](myproject.template.yml)
176 | 
177 | * Serena core:
178 |     * New tool: FindReferencingCodeSnippets
179 |     * Adjusted prompt in CreateTextFileTool to prevent writing partial content (see [here](https://www.reddit.com/r/ClaudeAI/comments/1jpavtm/comment/mloek1x/?utm_source=share&utm_medium=web3x&utm_name=web3xcss&utm_term=1&utm_content=share_button)).
180 |     * FindSymbolTool: allow passing a file for restricting search, not just a directory (Gemini was too dumb to pass directories)
181 |     * Native support for gitignore files for configuring files to be ignored by serena. See also
182 |       in *Language Servers* section below.
183 |     * **Major Feature**: Allow Serena to switch between projects (project activation)
184 |         * Add central Serena configuration in `serena_config.yml`, which 
185 |             * contains the list of available projects
186 |             * allows to configure whether project activation is enabled
187 |             * now contains the GUI logging configuration (project configurations no longer do)
188 |         * Add new tools `activate_project` and `get_active_project`
189 |         * Providing a project configuration file in the launch parameters is now optional
190 | * Logging:
191 |     * Improve error reporting in case of initialization failure: 
192 |       open a new GUI log window showing the error or ensure that the existing log window remains visible for some time
193 | * Language Servers:
194 |     * Fix C# language server initialization issue when the project path contains spaces
195 |     * Native support for gitignore in overview, document-tree and find_references operations.
196 |       This is an **important** addition, since previously things like `venv` and `node_modules` were scanned
197 |       and were likely responsible for slowness of tools and even server crashes (presumably due to OOM errors).
198 | * Agno: 
199 |     * Fix Agno reloading mechanism causing failures when initializing the sqlite memory database #8
200 |     * Fix Serena GUI log window not capturing logs after initialization
201 | 
202 | # 2025-04-01
203 | 
204 | Initial public version
205 | 
```

--------------------------------------------------------------------------------
/test/solidlsp/csharp/test_csharp_basic.py:
--------------------------------------------------------------------------------

```python
  1 | import os
  2 | import tempfile
  3 | from pathlib import Path
  4 | from typing import cast
  5 | from unittest.mock import Mock, patch
  6 | 
  7 | import pytest
  8 | 
  9 | from solidlsp import SolidLanguageServer
 10 | from solidlsp.language_servers.csharp_language_server import (
 11 |     CSharpLanguageServer,
 12 |     breadth_first_file_scan,
 13 |     find_solution_or_project_file,
 14 | )
 15 | from solidlsp.ls_config import Language, LanguageServerConfig
 16 | from solidlsp.ls_utils import SymbolUtils
 17 | from solidlsp.settings import SolidLSPSettings
 18 | 
 19 | 
 20 | @pytest.mark.csharp
 21 | class TestCSharpLanguageServer:
 22 |     @pytest.mark.parametrize("language_server", [Language.CSHARP], indirect=True)
 23 |     def test_find_symbol(self, language_server: SolidLanguageServer) -> None:
 24 |         """Test finding symbols in the full symbol tree."""
 25 |         symbols = language_server.request_full_symbol_tree()
 26 |         assert SymbolUtils.symbol_tree_contains_name(symbols, "Program"), "Program class not found in symbol tree"
 27 |         assert SymbolUtils.symbol_tree_contains_name(symbols, "Calculator"), "Calculator class not found in symbol tree"
 28 |         assert SymbolUtils.symbol_tree_contains_name(symbols, "Add"), "Add method not found in symbol tree"
 29 | 
 30 |     @pytest.mark.parametrize("language_server", [Language.CSHARP], indirect=True)
 31 |     def test_get_document_symbols(self, language_server: SolidLanguageServer) -> None:
 32 |         """Test getting document symbols from a C# file."""
 33 |         file_path = os.path.join("Program.cs")
 34 |         symbols = language_server.request_document_symbols(file_path)
 35 | 
 36 |         # Check that we have symbols
 37 |         assert len(symbols) > 0
 38 | 
 39 |         # Flatten the symbols if they're nested
 40 |         if isinstance(symbols[0], list):
 41 |             symbols = symbols[0]
 42 | 
 43 |         # Look for expected classes
 44 |         class_names = [s.get("name") for s in symbols if s.get("kind") == 5]  # 5 is class
 45 |         assert "Program" in class_names
 46 |         assert "Calculator" in class_names
 47 | 
 48 |     @pytest.mark.parametrize("language_server", [Language.CSHARP], indirect=True)
 49 |     def test_find_referencing_symbols(self, language_server: SolidLanguageServer) -> None:
 50 |         """Test finding references using symbol selection range."""
 51 |         file_path = os.path.join("Program.cs")
 52 |         symbols = language_server.request_document_symbols(file_path)
 53 |         add_symbol = None
 54 |         # Handle nested symbol structure
 55 |         symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
 56 |         for sym in symbol_list:
 57 |             if sym.get("name") == "Add":
 58 |                 add_symbol = sym
 59 |                 break
 60 |         assert add_symbol is not None, "Could not find 'Add' method symbol in Program.cs"
 61 |         sel_start = add_symbol["selectionRange"]["start"]
 62 |         refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"] + 1)
 63 |         assert any(
 64 |             "Program.cs" in ref.get("relativePath", "") for ref in refs
 65 |         ), "Program.cs should reference Add method (tried all positions in selectionRange)"
 66 | 
 67 |     @pytest.mark.parametrize("language_server", [Language.CSHARP], indirect=True)
 68 |     def test_nested_namespace_symbols(self, language_server: SolidLanguageServer) -> None:
 69 |         """Test getting symbols from nested namespace."""
 70 |         file_path = os.path.join("Models", "Person.cs")
 71 |         symbols = language_server.request_document_symbols(file_path)
 72 | 
 73 |         # Check that we have symbols
 74 |         assert len(symbols) > 0
 75 | 
 76 |         # Flatten the symbols if they're nested
 77 |         if isinstance(symbols[0], list):
 78 |             symbols = symbols[0]
 79 | 
 80 |         # Check that we have the Person class
 81 |         assert any(s.get("name") == "Person" and s.get("kind") == 5 for s in symbols)
 82 | 
 83 |         # Check for properties and methods
 84 |         symbol_names = [s.get("name") for s in symbols]
 85 |         assert "Name" in symbol_names
 86 |         assert "Age" in symbol_names
 87 |         assert "Email" in symbol_names
 88 |         assert "ToString" in symbol_names
 89 |         assert "IsAdult" in symbol_names
 90 | 
 91 |     @pytest.mark.parametrize("language_server", [Language.CSHARP], indirect=True)
 92 |     def test_find_referencing_symbols_across_files(self, language_server: SolidLanguageServer) -> None:
 93 |         """Test finding references to Calculator.Subtract method across files."""
 94 |         # First, find the Subtract method in Program.cs
 95 |         file_path = os.path.join("Program.cs")
 96 |         symbols = language_server.request_document_symbols(file_path)
 97 | 
 98 |         # Flatten the symbols if they're nested
 99 |         symbol_list = symbols[0] if symbols and isinstance(symbols[0], list) else symbols
100 | 
101 |         subtract_symbol = None
102 |         for sym in symbol_list:
103 |             if sym.get("name") == "Subtract":
104 |                 subtract_symbol = sym
105 |                 break
106 | 
107 |         assert subtract_symbol is not None, "Could not find 'Subtract' method symbol in Program.cs"
108 | 
109 |         # Get references to the Subtract method
110 |         sel_start = subtract_symbol["selectionRange"]["start"]
111 |         refs = language_server.request_references(file_path, sel_start["line"], sel_start["character"] + 1)
112 | 
113 |         # Should find references in both Program.cs and Models/Person.cs
114 |         ref_files = cast(list[str], [ref.get("relativePath", "") for ref in refs])
115 |         print(f"Found references: {refs}")
116 |         print(f"Reference files: {ref_files}")
117 | 
118 |         # Check that we have references from both files
119 |         assert any("Program.cs" in ref_file for ref_file in ref_files), "Should find reference in Program.cs"
120 |         assert any(
121 |             os.path.join("Models", "Person.cs") in ref_file for ref_file in ref_files
122 |         ), "Should find reference in Models/Person.cs where Calculator.Subtract is called"
123 | 
124 |         # check for a second time, since the first call may trigger initialization and change the state of the LS
125 |         refs_second_call = language_server.request_references(file_path, sel_start["line"], sel_start["character"] + 1)
126 |         assert refs_second_call == refs, "Second call to request_references should return the same results"
127 | 
128 | 
129 | @pytest.mark.csharp
130 | class TestCSharpSolutionProjectOpening:
131 |     """Test C# language server solution and project opening functionality."""
132 | 
133 |     def test_breadth_first_file_scan(self):
134 |         """Test that breadth_first_file_scan finds files in breadth-first order."""
135 |         with tempfile.TemporaryDirectory() as temp_dir:
136 |             temp_path = Path(temp_dir)
137 | 
138 |             # Create test directory structure
139 |             (temp_path / "file1.txt").touch()
140 |             (temp_path / "subdir1").mkdir()
141 |             (temp_path / "subdir1" / "file2.txt").touch()
142 |             (temp_path / "subdir2").mkdir()
143 |             (temp_path / "subdir2" / "file3.txt").touch()
144 |             (temp_path / "subdir1" / "subdir3").mkdir()
145 |             (temp_path / "subdir1" / "subdir3" / "file4.txt").touch()
146 | 
147 |             # Scan files
148 |             files = list(breadth_first_file_scan(str(temp_path)))
149 |             filenames = [os.path.basename(f) for f in files]
150 | 
151 |             # Should find all files
152 |             assert len(files) == 4
153 |             assert "file1.txt" in filenames
154 |             assert "file2.txt" in filenames
155 |             assert "file3.txt" in filenames
156 |             assert "file4.txt" in filenames
157 | 
158 |             # file1.txt should be found first (breadth-first)
159 |             assert filenames[0] == "file1.txt"
160 | 
161 |     def test_find_solution_or_project_file_with_solution(self):
162 |         """Test that find_solution_or_project_file prefers .sln files."""
163 |         with tempfile.TemporaryDirectory() as temp_dir:
164 |             temp_path = Path(temp_dir)
165 | 
166 |             # Create both .sln and .csproj files
167 |             solution_file = temp_path / "MySolution.sln"
168 |             project_file = temp_path / "MyProject.csproj"
169 |             solution_file.touch()
170 |             project_file.touch()
171 | 
172 |             result = find_solution_or_project_file(str(temp_path))
173 | 
174 |             # Should prefer .sln file
175 |             assert result == str(solution_file)
176 | 
177 |     def test_find_solution_or_project_file_with_project_only(self):
178 |         """Test that find_solution_or_project_file falls back to .csproj files."""
179 |         with tempfile.TemporaryDirectory() as temp_dir:
180 |             temp_path = Path(temp_dir)
181 | 
182 |             # Create only .csproj file
183 |             project_file = temp_path / "MyProject.csproj"
184 |             project_file.touch()
185 | 
186 |             result = find_solution_or_project_file(str(temp_path))
187 | 
188 |             # Should return .csproj file
189 |             assert result == str(project_file)
190 | 
191 |     def test_find_solution_or_project_file_with_nested_files(self):
192 |         """Test that find_solution_or_project_file finds files in subdirectories."""
193 |         with tempfile.TemporaryDirectory() as temp_dir:
194 |             temp_path = Path(temp_dir)
195 | 
196 |             # Create nested structure
197 |             (temp_path / "src").mkdir()
198 |             solution_file = temp_path / "src" / "MySolution.sln"
199 |             solution_file.touch()
200 | 
201 |             result = find_solution_or_project_file(str(temp_path))
202 | 
203 |             # Should find nested .sln file
204 |             assert result == str(solution_file)
205 | 
206 |     def test_find_solution_or_project_file_returns_none_when_no_files(self):
207 |         """Test that find_solution_or_project_file returns None when no .sln or .csproj files exist."""
208 |         with tempfile.TemporaryDirectory() as temp_dir:
209 |             temp_path = Path(temp_dir)
210 | 
211 |             # Create some other files
212 |             (temp_path / "readme.txt").touch()
213 |             (temp_path / "other.cs").touch()
214 | 
215 |             result = find_solution_or_project_file(str(temp_path))
216 | 
217 |             # Should return None
218 |             assert result is None
219 | 
220 |     def test_find_solution_or_project_file_prefers_solution_breadth_first(self):
221 |         """Test that solution files are preferred even when deeper in the tree."""
222 |         with tempfile.TemporaryDirectory() as temp_dir:
223 |             temp_path = Path(temp_dir)
224 | 
225 |             # Create .csproj at root and .sln in subdirectory
226 |             project_file = temp_path / "MyProject.csproj"
227 |             project_file.touch()
228 | 
229 |             (temp_path / "src").mkdir()
230 |             solution_file = temp_path / "src" / "MySolution.sln"
231 |             solution_file.touch()
232 | 
233 |             result = find_solution_or_project_file(str(temp_path))
234 | 
235 |             # Should still prefer .sln file even though it's deeper
236 |             assert result == str(solution_file)
237 | 
238 |     @patch("solidlsp.language_servers.csharp_language_server.CSharpLanguageServer._ensure_server_installed")
239 |     @patch("solidlsp.language_servers.csharp_language_server.CSharpLanguageServer._start_server")
240 |     def test_csharp_language_server_logs_solution_discovery(self, mock_start_server, mock_ensure_server_installed):
241 |         """Test that CSharpLanguageServer logs solution/project discovery during initialization."""
242 |         mock_ensure_server_installed.return_value = ("/usr/bin/dotnet", "/path/to/server.dll")
243 | 
244 |         # Create test directory with solution file
245 |         with tempfile.TemporaryDirectory() as temp_dir:
246 |             temp_path = Path(temp_dir)
247 |             solution_file = temp_path / "TestSolution.sln"
248 |             solution_file.touch()
249 | 
250 |             # Mock logger to capture log messages
251 |             mock_logger = Mock()
252 |             mock_config = Mock(spec=LanguageServerConfig)
253 |             mock_config.ignored_paths = []
254 | 
255 |             # Create CSharpLanguageServer instance
256 |             mock_settings = Mock(spec=SolidLSPSettings)
257 |             mock_settings.ls_resources_dir = "/tmp/test_ls_resources"
258 |             mock_settings.project_data_relative_path = "project_data"
259 |             CSharpLanguageServer(mock_config, mock_logger, str(temp_path), mock_settings)
260 | 
261 |             # Verify that logger was called with solution file discovery
262 |             mock_logger.log.assert_any_call(f"Found solution/project file: {solution_file}", 20)  # logging.INFO
263 | 
264 |     @patch("solidlsp.language_servers.csharp_language_server.CSharpLanguageServer._ensure_server_installed")
265 |     @patch("solidlsp.language_servers.csharp_language_server.CSharpLanguageServer._start_server")
266 |     def test_csharp_language_server_logs_no_solution_warning(self, mock_start_server, mock_ensure_server_installed):
267 |         """Test that CSharpLanguageServer logs warning when no solution/project files are found."""
268 |         # Mock the server installation
269 |         mock_ensure_server_installed.return_value = ("/usr/bin/dotnet", "/path/to/server.dll")
270 | 
271 |         # Create empty test directory
272 |         with tempfile.TemporaryDirectory() as temp_dir:
273 |             temp_path = Path(temp_dir)
274 | 
275 |             # Mock logger to capture log messages
276 |             mock_logger = Mock()
277 |             mock_config = Mock(spec=LanguageServerConfig)
278 |             mock_config.ignored_paths = []
279 | 
280 |             # Create CSharpLanguageServer instance
281 |             mock_settings = Mock(spec=SolidLSPSettings)
282 |             mock_settings.ls_resources_dir = "/tmp/test_ls_resources"
283 |             mock_settings.project_data_relative_path = "project_data"
284 |             CSharpLanguageServer(mock_config, mock_logger, str(temp_path), mock_settings)
285 | 
286 |             # Verify that logger was called with warning about no solution/project files
287 |             mock_logger.log.assert_any_call(
288 |                 "No .sln or .csproj file found, language server will attempt auto-discovery", 30  # logging.WARNING
289 |             )
290 | 
291 |     def test_solution_and_project_opening_with_real_test_repo(self):
292 |         """Test solution and project opening with the actual C# test repository."""
293 |         # Get the C# test repo path
294 |         test_repo_path = Path(__file__).parent.parent.parent / "resources" / "repos" / "csharp" / "test_repo"
295 | 
296 |         if not test_repo_path.exists():
297 |             pytest.skip("C# test repository not found")
298 | 
299 |         # Test solution/project discovery in the real test repo
300 |         result = find_solution_or_project_file(str(test_repo_path))
301 | 
302 |         # Should find either .sln or .csproj file
303 |         assert result is not None
304 |         assert result.endswith((".sln", ".csproj"))
305 | 
306 |         # Verify the file actually exists
307 |         assert os.path.exists(result)
308 | 
```

--------------------------------------------------------------------------------
/test/solidlsp/python/test_python_basic.py:
--------------------------------------------------------------------------------

```python
  1 | """
  2 | Basic integration tests for the language server functionality.
  3 | 
  4 | These tests validate the functionality of the language server APIs
  5 | like request_references using the test repository.
  6 | """
  7 | 
  8 | import os
  9 | 
 10 | import pytest
 11 | 
 12 | from serena.project import Project
 13 | from serena.text_utils import LineType
 14 | from solidlsp import SolidLanguageServer
 15 | from solidlsp.ls_config import Language
 16 | 
 17 | 
 18 | @pytest.mark.python
 19 | class TestLanguageServerBasics:
 20 |     """Test basic functionality of the language server."""
 21 | 
 22 |     @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
 23 |     def test_request_references_user_class(self, language_server: SolidLanguageServer) -> None:
 24 |         """Test request_references on the User class."""
 25 |         # Get references to the User class in models.py
 26 |         file_path = os.path.join("test_repo", "models.py")
 27 |         # Line 31 contains the User class definition
 28 |         # Use selectionRange only
 29 |         symbols = language_server.request_document_symbols(file_path)
 30 |         user_symbol = next((s for s in symbols[0] if s.get("name") == "User"), None)
 31 |         if not user_symbol or "selectionRange" not in user_symbol:
 32 |             raise AssertionError("User symbol or its selectionRange not found")
 33 |         sel_start = user_symbol["selectionRange"]["start"]
 34 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
 35 |         assert len(references) > 1, "User class should be referenced in multiple files (using selectionRange if present)"
 36 | 
 37 |     @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
 38 |     def test_request_references_item_class(self, language_server: SolidLanguageServer) -> None:
 39 |         """Test request_references on the Item class."""
 40 |         # Get references to the Item class in models.py
 41 |         file_path = os.path.join("test_repo", "models.py")
 42 |         # Line 56 contains the Item class definition
 43 |         # Use selectionRange only
 44 |         symbols = language_server.request_document_symbols(file_path)
 45 |         item_symbol = next((s for s in symbols[0] if s.get("name") == "Item"), None)
 46 |         if not item_symbol or "selectionRange" not in item_symbol:
 47 |             raise AssertionError("Item symbol or its selectionRange not found")
 48 |         sel_start = item_symbol["selectionRange"]["start"]
 49 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
 50 |         services_references = [ref for ref in references if "services.py" in ref["uri"]]
 51 |         assert len(services_references) > 0, "At least one reference should be in services.py (using selectionRange if present)"
 52 | 
 53 |     @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
 54 |     def test_request_references_function_parameter(self, language_server: SolidLanguageServer) -> None:
 55 |         """Test request_references on a function parameter."""
 56 |         # Get references to the id parameter in get_user method
 57 |         file_path = os.path.join("test_repo", "services.py")
 58 |         # Line 24 contains the get_user method with id parameter
 59 |         # Use selectionRange only
 60 |         symbols = language_server.request_document_symbols(file_path)
 61 |         get_user_symbol = next((s for s in symbols[0] if s.get("name") == "get_user"), None)
 62 |         if not get_user_symbol or "selectionRange" not in get_user_symbol:
 63 |             raise AssertionError("get_user symbol or its selectionRange not found")
 64 |         sel_start = get_user_symbol["selectionRange"]["start"]
 65 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
 66 |         assert len(references) > 0, "id parameter should be referenced within the method (using selectionRange if present)"
 67 | 
 68 |     @pytest.mark.parametrize("language_server", [Language.PYTHON], indirect=True)
 69 |     def test_request_references_create_user_method(self, language_server: SolidLanguageServer) -> None:
 70 |         # Get references to the create_user method in UserService
 71 |         file_path = os.path.join("test_repo", "services.py")
 72 |         # Line 15 contains the create_user method definition
 73 |         # Use selectionRange only
 74 |         symbols = language_server.request_document_symbols(file_path)
 75 |         create_user_symbol = next((s for s in symbols[0] if s.get("name") == "create_user"), None)
 76 |         if not create_user_symbol or "selectionRange" not in create_user_symbol:
 77 |             raise AssertionError("create_user symbol or its selectionRange not found")
 78 |         sel_start = create_user_symbol["selectionRange"]["start"]
 79 |         references = language_server.request_references(file_path, sel_start["line"], sel_start["character"])
 80 |         assert len(references) > 1, "Should get valid references for create_user (using selectionRange if present)"
 81 | 
 82 | 
 83 | class TestProjectBasics:
 84 |     @pytest.mark.parametrize("project", [Language.PYTHON], indirect=True)
 85 |     def test_retrieve_content_around_line(self, project: Project) -> None:
 86 |         """Test retrieve_content_around_line functionality with various scenarios."""
 87 |         file_path = os.path.join("test_repo", "models.py")
 88 | 
 89 |         # Scenario 1: Just a single line (User class definition)
 90 |         line_31 = project.retrieve_content_around_line(file_path, 31)
 91 |         assert len(line_31.lines) == 1
 92 |         assert "class User(BaseModel):" in line_31.lines[0].line_content
 93 |         assert line_31.lines[0].line_number == 31
 94 |         assert line_31.lines[0].match_type == LineType.MATCH
 95 | 
 96 |         # Scenario 2: Context above and below
 97 |         with_context_around_user = project.retrieve_content_around_line(file_path, 31, 2, 2)
 98 |         assert len(with_context_around_user.lines) == 5
 99 |         # Check line content
100 |         assert "class User(BaseModel):" in with_context_around_user.matched_lines[0].line_content
101 |         assert with_context_around_user.num_matched_lines == 1
102 |         assert "    User model representing a system user." in with_context_around_user.lines[4].line_content
103 |         # Check line numbers
104 |         assert with_context_around_user.lines[0].line_number == 29
105 |         assert with_context_around_user.lines[1].line_number == 30
106 |         assert with_context_around_user.lines[2].line_number == 31
107 |         assert with_context_around_user.lines[3].line_number == 32
108 |         assert with_context_around_user.lines[4].line_number == 33
109 |         # Check match types
110 |         assert with_context_around_user.lines[0].match_type == LineType.BEFORE_MATCH
111 |         assert with_context_around_user.lines[1].match_type == LineType.BEFORE_MATCH
112 |         assert with_context_around_user.lines[2].match_type == LineType.MATCH
113 |         assert with_context_around_user.lines[3].match_type == LineType.AFTER_MATCH
114 |         assert with_context_around_user.lines[4].match_type == LineType.AFTER_MATCH
115 | 
116 |         # Scenario 3a: Only context above
117 |         with_context_above = project.retrieve_content_around_line(file_path, 31, 3, 0)
118 |         assert len(with_context_above.lines) == 4
119 |         assert "return cls(id=id, name=name)" in with_context_above.lines[0].line_content
120 |         assert "class User(BaseModel):" in with_context_above.matched_lines[0].line_content
121 |         assert with_context_above.num_matched_lines == 1
122 |         # Check line numbers
123 |         assert with_context_above.lines[0].line_number == 28
124 |         assert with_context_above.lines[1].line_number == 29
125 |         assert with_context_above.lines[2].line_number == 30
126 |         assert with_context_above.lines[3].line_number == 31
127 |         # Check match types
128 |         assert with_context_above.lines[0].match_type == LineType.BEFORE_MATCH
129 |         assert with_context_above.lines[1].match_type == LineType.BEFORE_MATCH
130 |         assert with_context_above.lines[2].match_type == LineType.BEFORE_MATCH
131 |         assert with_context_above.lines[3].match_type == LineType.MATCH
132 | 
133 |         # Scenario 3b: Only context below
134 |         with_context_below = project.retrieve_content_around_line(file_path, 31, 0, 3)
135 |         assert len(with_context_below.lines) == 4
136 |         assert "class User(BaseModel):" in with_context_below.matched_lines[0].line_content
137 |         assert with_context_below.num_matched_lines == 1
138 |         assert with_context_below.lines[0].line_number == 31
139 |         assert with_context_below.lines[1].line_number == 32
140 |         assert with_context_below.lines[2].line_number == 33
141 |         assert with_context_below.lines[3].line_number == 34
142 |         # Check match types
143 |         assert with_context_below.lines[0].match_type == LineType.MATCH
144 |         assert with_context_below.lines[1].match_type == LineType.AFTER_MATCH
145 |         assert with_context_below.lines[2].match_type == LineType.AFTER_MATCH
146 |         assert with_context_below.lines[3].match_type == LineType.AFTER_MATCH
147 | 
148 |         # Scenario 4a: Edge case - context above but line is at 0
149 |         first_line_with_context_around = project.retrieve_content_around_line(file_path, 0, 2, 1)
150 |         assert len(first_line_with_context_around.lines) <= 4  # Should have at most 4 lines (line 0 + 1 below + up to 2 above)
151 |         assert first_line_with_context_around.lines[0].line_number <= 2  # First line should be at most line 2
152 |         # Check match type for the target line
153 |         for line in first_line_with_context_around.lines:
154 |             if line.line_number == 0:
155 |                 assert line.match_type == LineType.MATCH
156 |             elif line.line_number < 0:
157 |                 assert line.match_type == LineType.BEFORE_MATCH
158 |             else:
159 |                 assert line.match_type == LineType.AFTER_MATCH
160 | 
161 |         # Scenario 4b: Edge case - context above but line is at 1
162 |         second_line_with_context_above = project.retrieve_content_around_line(file_path, 1, 3, 1)
163 |         assert len(second_line_with_context_above.lines) <= 5  # Should have at most 5 lines (line 1 + 1 below + up to 3 above)
164 |         assert second_line_with_context_above.lines[0].line_number <= 1  # First line should be at most line 1
165 |         # Check match type for the target line
166 |         for line in second_line_with_context_above.lines:
167 |             if line.line_number == 1:
168 |                 assert line.match_type == LineType.MATCH
169 |             elif line.line_number < 1:
170 |                 assert line.match_type == LineType.BEFORE_MATCH
171 |             else:
172 |                 assert line.match_type == LineType.AFTER_MATCH
173 | 
174 |         # Scenario 4c: Edge case - context below but line is at the end of file
175 |         # First get the total number of lines in the file
176 |         all_content = project.read_file(file_path)
177 |         total_lines = len(all_content.split("\n"))
178 | 
179 |         last_line_with_context_around = project.retrieve_content_around_line(file_path, total_lines - 1, 1, 3)
180 |         assert len(last_line_with_context_around.lines) <= 5  # Should have at most 5 lines (last line + 1 above + up to 3 below)
181 |         assert last_line_with_context_around.lines[-1].line_number >= total_lines - 4  # Last line should be at least total_lines - 4
182 |         # Check match type for the target line
183 |         for line in last_line_with_context_around.lines:
184 |             if line.line_number == total_lines - 1:
185 |                 assert line.match_type == LineType.MATCH
186 |             elif line.line_number < total_lines - 1:
187 |                 assert line.match_type == LineType.BEFORE_MATCH
188 |             else:
189 |                 assert line.match_type == LineType.AFTER_MATCH
190 | 
191 |     @pytest.mark.parametrize("project", [Language.PYTHON], indirect=True)
192 |     def test_search_files_for_pattern(self, project: Project) -> None:
193 |         """Test search_files_for_pattern with various patterns and glob filters."""
194 |         # Test 1: Search for class definitions across all files
195 |         class_pattern = r"class\s+\w+\s*(?:\([^{]*\)|:)"
196 |         matches = project.search_source_files_for_pattern(class_pattern)
197 |         assert len(matches) > 0
198 |         # Should find multiple classes like User, Item, BaseModel, etc.
199 |         assert len(matches) >= 5
200 | 
201 |         # Test 2: Search for specific class with include glob
202 |         user_class_pattern = r"class\s+User\s*(?:\([^{]*\)|:)"
203 |         matches = project.search_source_files_for_pattern(user_class_pattern, paths_include_glob="**/models.py")
204 |         assert len(matches) == 1  # Should only find User class in models.py
205 |         assert matches[0].source_file_path is not None
206 |         assert "models.py" in matches[0].source_file_path
207 | 
208 |         # Test 3: Search for method definitions with exclude glob
209 |         method_pattern = r"def\s+\w+\s*\([^)]*\):"
210 |         matches = project.search_source_files_for_pattern(method_pattern, paths_exclude_glob="**/models.py")
211 |         assert len(matches) > 0
212 |         # Should find methods in services.py but not in models.py
213 |         assert all(match.source_file_path is not None and "models.py" not in match.source_file_path for match in matches)
214 | 
215 |         # Test 4: Search for specific method with both include and exclude globs
216 |         create_user_pattern = r"def\s+create_user\s*\([^)]*\)(?:\s*->[^:]+)?:"
217 |         matches = project.search_source_files_for_pattern(
218 |             create_user_pattern, paths_include_glob="**/*.py", paths_exclude_glob="**/models.py"
219 |         )
220 |         assert len(matches) == 1  # Should only find create_user in services.py
221 |         assert matches[0].source_file_path is not None
222 |         assert "services.py" in matches[0].source_file_path
223 | 
224 |         # Test 5: Search for a pattern that should appear in multiple files
225 |         init_pattern = r"def\s+__init__\s*\([^)]*\):"
226 |         matches = project.search_source_files_for_pattern(init_pattern)
227 |         assert len(matches) > 1  # Should find __init__ in multiple classes
228 |         # Should find __init__ in both models.py and services.py
229 |         assert any(match.source_file_path is not None and "models.py" in match.source_file_path for match in matches)
230 |         assert any(match.source_file_path is not None and "services.py" in match.source_file_path for match in matches)
231 | 
232 |         # Test 6: Search with a pattern that should have no matches
233 |         no_match_pattern = r"def\s+this_method_does_not_exist\s*\([^)]*\):"
234 |         matches = project.search_source_files_for_pattern(no_match_pattern)
235 |         assert len(matches) == 0
236 | 
```

--------------------------------------------------------------------------------
/src/serena/util/file_system.py:
--------------------------------------------------------------------------------

```python
  1 | import logging
  2 | import os
  3 | from collections.abc import Callable, Iterator
  4 | from dataclasses import dataclass, field
  5 | from pathlib import Path
  6 | from typing import NamedTuple
  7 | 
  8 | import pathspec
  9 | from pathspec import PathSpec
 10 | from sensai.util.logging import LogTime
 11 | 
 12 | log = logging.getLogger(__name__)
 13 | 
 14 | 
 15 | class ScanResult(NamedTuple):
 16 |     """Result of scanning a directory."""
 17 | 
 18 |     directories: list[str]
 19 |     files: list[str]
 20 | 
 21 | 
 22 | def scan_directory(
 23 |     path: str,
 24 |     recursive: bool = False,
 25 |     relative_to: str | None = None,
 26 |     is_ignored_dir: Callable[[str], bool] | None = None,
 27 |     is_ignored_file: Callable[[str], bool] | None = None,
 28 | ) -> ScanResult:
 29 |     """
 30 |     :param path: the path to scan
 31 |     :param recursive: whether to recursively scan subdirectories
 32 |     :param relative_to: the path to which the results should be relative to; if None, provide absolute paths
 33 |     :param is_ignored_dir: a function with which to determine whether the given directory (abs. path) shall be ignored
 34 |     :param is_ignored_file: a function with which to determine whether the given file (abs. path) shall be ignored
 35 |     :return: the list of directories and files
 36 |     """
 37 |     if is_ignored_file is None:
 38 |         is_ignored_file = lambda x: False
 39 |     if is_ignored_dir is None:
 40 |         is_ignored_dir = lambda x: False
 41 | 
 42 |     files = []
 43 |     directories = []
 44 | 
 45 |     abs_path = os.path.abspath(path)
 46 |     rel_base = os.path.abspath(relative_to) if relative_to else None
 47 | 
 48 |     try:
 49 |         with os.scandir(abs_path) as entries:
 50 |             for entry in entries:
 51 |                 try:
 52 |                     entry_path = entry.path
 53 | 
 54 |                     if rel_base:
 55 |                         result_path = os.path.relpath(entry_path, rel_base)
 56 |                     else:
 57 |                         result_path = entry_path
 58 | 
 59 |                     if entry.is_file():
 60 |                         if not is_ignored_file(entry_path):
 61 |                             files.append(result_path)
 62 |                     elif entry.is_dir():
 63 |                         if not is_ignored_dir(entry_path):
 64 |                             directories.append(result_path)
 65 |                             if recursive:
 66 |                                 sub_result = scan_directory(
 67 |                                     entry_path,
 68 |                                     recursive=True,
 69 |                                     relative_to=relative_to,
 70 |                                     is_ignored_dir=is_ignored_dir,
 71 |                                     is_ignored_file=is_ignored_file,
 72 |                                 )
 73 |                                 files.extend(sub_result.files)
 74 |                                 directories.extend(sub_result.directories)
 75 |                 except PermissionError as ex:
 76 |                     # Skip files/directories that cannot be accessed due to permission issues
 77 |                     log.debug(f"Skipping entry due to permission error: {entry.path}", exc_info=ex)
 78 |                     continue
 79 |     except PermissionError as ex:
 80 |         # Skip the entire directory if it cannot be accessed
 81 |         log.debug(f"Skipping directory due to permission error: {abs_path}", exc_info=ex)
 82 |         return ScanResult([], [])
 83 | 
 84 |     return ScanResult(directories, files)
 85 | 
 86 | 
 87 | def find_all_non_ignored_files(repo_root: str) -> list[str]:
 88 |     """
 89 |     Find all non-ignored files in the repository, respecting all gitignore files in the repository.
 90 | 
 91 |     :param repo_root: The root directory of the repository
 92 |     :return: A list of all non-ignored files in the repository
 93 |     """
 94 |     gitignore_parser = GitignoreParser(repo_root)
 95 |     _, files = scan_directory(
 96 |         repo_root, recursive=True, is_ignored_dir=gitignore_parser.should_ignore, is_ignored_file=gitignore_parser.should_ignore
 97 |     )
 98 |     return files
 99 | 
100 | 
101 | @dataclass
102 | class GitignoreSpec:
103 |     file_path: str
104 |     """Path to the gitignore file."""
105 |     patterns: list[str] = field(default_factory=list)
106 |     """List of patterns from the gitignore file.
107 |     The patterns are adjusted based on the gitignore file location.
108 |     """
109 |     pathspec: PathSpec = field(init=False)
110 |     """Compiled PathSpec object for pattern matching."""
111 | 
112 |     def __post_init__(self) -> None:
113 |         """Initialize the PathSpec from patterns."""
114 |         self.pathspec = PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, self.patterns)
115 | 
116 |     def matches(self, relative_path: str) -> bool:
117 |         """
118 |         Check if the given path matches any pattern in this gitignore spec.
119 | 
120 |         :param relative_path: Path to check (should be relative to repo root)
121 |         :return: True if path matches any pattern
122 |         """
123 |         return match_path(relative_path, self.pathspec, root_path=os.path.dirname(self.file_path))
124 | 
125 | 
126 | class GitignoreParser:
127 |     """
128 |     Parser for gitignore files in a repository.
129 | 
130 |     This class handles parsing multiple gitignore files throughout a repository
131 |     and provides methods to check if paths should be ignored.
132 |     """
133 | 
134 |     def __init__(self, repo_root: str) -> None:
135 |         """
136 |         Initialize the parser for a repository.
137 | 
138 |         :param repo_root: Root directory of the repository
139 |         """
140 |         self.repo_root = os.path.abspath(repo_root)
141 |         self.ignore_specs: list[GitignoreSpec] = []
142 |         self._load_gitignore_files()
143 | 
144 |     def _load_gitignore_files(self) -> None:
145 |         """Load all gitignore files from the repository."""
146 |         with LogTime("Loading of .gitignore files", logger=log):
147 |             for gitignore_path in self._iter_gitignore_files():
148 |                 log.info("Processing .gitignore file: %s", gitignore_path)
149 |                 spec = self._create_ignore_spec(gitignore_path)
150 |                 if spec.patterns:  # Only add non-empty specs
151 |                     self.ignore_specs.append(spec)
152 | 
153 |     def _iter_gitignore_files(self, follow_symlinks: bool = False) -> Iterator[str]:
154 |         """
155 |         Iteratively discover .gitignore files in a top-down fashion, starting from the repository root.
156 |         Directory paths are skipped if they match any already loaded ignore patterns.
157 | 
158 |         :return: an iterator yielding paths to .gitignore files (top-down)
159 |         """
160 |         queue: list[str] = [self.repo_root]
161 | 
162 |         def scan(abs_path: str | None) -> Iterator[str]:
163 |             for entry in os.scandir(abs_path):
164 |                 if entry.is_dir(follow_symlinks=follow_symlinks):
165 |                     queue.append(entry.path)
166 |                 elif entry.is_file(follow_symlinks=follow_symlinks) and entry.name == ".gitignore":
167 |                     yield entry.path
168 | 
169 |         while queue:
170 |             next_abs_path = queue.pop(0)
171 |             if next_abs_path != self.repo_root:
172 |                 rel_path = os.path.relpath(next_abs_path, self.repo_root)
173 |                 if self.should_ignore(rel_path):
174 |                     continue
175 |             yield from scan(next_abs_path)
176 | 
177 |     def _create_ignore_spec(self, gitignore_file_path: str) -> GitignoreSpec:
178 |         """
179 |         Create a GitignoreSpec from a single gitignore file.
180 | 
181 |         :param gitignore_file_path: Path to the .gitignore file
182 |         :return: GitignoreSpec object for the gitignore patterns
183 |         """
184 |         try:
185 |             with open(gitignore_file_path, encoding="utf-8") as f:
186 |                 content = f.read()
187 |         except (OSError, UnicodeDecodeError):
188 |             # If we can't read the file, return an empty spec
189 |             return GitignoreSpec(gitignore_file_path, [])
190 | 
191 |         gitignore_dir = os.path.dirname(gitignore_file_path)
192 |         patterns = self._parse_gitignore_content(content, gitignore_dir)
193 | 
194 |         return GitignoreSpec(gitignore_file_path, patterns)
195 | 
196 |     def _parse_gitignore_content(self, content: str, gitignore_dir: str) -> list[str]:
197 |         """
198 |         Parse gitignore content and adjust patterns based on the gitignore file location.
199 | 
200 |         :param content: Content of the .gitignore file
201 |         :param gitignore_dir: Directory containing the .gitignore file (absolute path)
202 |         :return: List of adjusted patterns
203 |         """
204 |         patterns = []
205 | 
206 |         # Get the relative path from repo root to the gitignore directory
207 |         rel_dir = os.path.relpath(gitignore_dir, self.repo_root)
208 |         if rel_dir == ".":
209 |             rel_dir = ""
210 | 
211 |         for line in content.splitlines():
212 |             # Strip trailing whitespace (but preserve leading whitespace for now)
213 |             line = line.rstrip()
214 | 
215 |             # Skip empty lines and comments
216 |             if not line or line.lstrip().startswith("#"):
217 |                 continue
218 | 
219 |             # Store whether this is a negation pattern
220 |             is_negation = line.startswith("!")
221 |             if is_negation:
222 |                 line = line[1:]
223 | 
224 |             # Strip leading/trailing whitespace after removing negation
225 |             line = line.strip()
226 | 
227 |             if not line:
228 |                 continue
229 | 
230 |             # Handle escaped characters at the beginning
231 |             if line.startswith(("\\#", "\\!")):
232 |                 line = line[1:]
233 | 
234 |             # Determine if pattern is anchored to the gitignore directory and remove leading slash for processing
235 |             is_anchored = line.startswith("/")
236 |             if is_anchored:
237 |                 line = line[1:]
238 | 
239 |             # Adjust pattern based on gitignore file location
240 |             if rel_dir:
241 |                 if is_anchored:
242 |                     # Anchored patterns are relative to the gitignore directory
243 |                     adjusted_pattern = os.path.join(rel_dir, line)
244 |                 else:
245 |                     # Non-anchored patterns can match anywhere below the gitignore directory
246 |                     # We need to preserve this behavior
247 |                     if line.startswith("**/"):
248 |                         # Even if pattern starts with **, it should still be scoped to the subdirectory
249 |                         adjusted_pattern = os.path.join(rel_dir, line)
250 |                     else:
251 |                         # Add the directory prefix but also allow matching in subdirectories
252 |                         adjusted_pattern = os.path.join(rel_dir, "**", line)
253 |             else:
254 |                 if is_anchored:
255 |                     # Anchored patterns in root should only match at root level
256 |                     # Add leading slash back to indicate root-only matching
257 |                     adjusted_pattern = "/" + line
258 |                 else:
259 |                     # Non-anchored patterns can match anywhere
260 |                     adjusted_pattern = line
261 | 
262 |             # Re-add negation if needed
263 |             if is_negation:
264 |                 adjusted_pattern = "!" + adjusted_pattern
265 | 
266 |             # Normalize path separators to forward slashes (gitignore uses forward slashes)
267 |             adjusted_pattern = adjusted_pattern.replace(os.sep, "/")
268 | 
269 |             patterns.append(adjusted_pattern)
270 | 
271 |         return patterns
272 | 
273 |     def should_ignore(self, path: str) -> bool:
274 |         """
275 |         Check if a path should be ignored based on the gitignore rules.
276 | 
277 |         :param path: Path to check (absolute or relative to repo_root)
278 |         :return: True if the path should be ignored, False otherwise
279 |         """
280 |         # Convert to relative path from repo root
281 |         if os.path.isabs(path):
282 |             try:
283 |                 rel_path = os.path.relpath(path, self.repo_root)
284 |             except Exception as e:
285 |                 # If the path could not be converted to a relative path,
286 |                 # it is outside the repository root, so we ignore it
287 |                 log.info("Ignoring path '%s' which is outside of the repository root (%s)", path, e)
288 |                 return True
289 |         else:
290 |             rel_path = path
291 | 
292 |         # Ignore paths inside .git
293 |         rel_path_first_path = Path(rel_path).parts[0]
294 |         if rel_path_first_path == ".git":
295 |             return True
296 | 
297 |         abs_path = os.path.join(self.repo_root, rel_path)
298 | 
299 |         # Normalize path separators
300 |         rel_path = rel_path.replace(os.sep, "/")
301 | 
302 |         if os.path.exists(abs_path) and os.path.isdir(abs_path) and not rel_path.endswith("/"):
303 |             rel_path = rel_path + "/"
304 | 
305 |         # Check against each ignore spec
306 |         for spec in self.ignore_specs:
307 |             if spec.matches(rel_path):
308 |                 return True
309 | 
310 |         return False
311 | 
312 |     def get_ignore_specs(self) -> list[GitignoreSpec]:
313 |         """
314 |         Get all loaded gitignore specs.
315 | 
316 |         :return: List of GitignoreSpec objects
317 |         """
318 |         return self.ignore_specs
319 | 
320 |     def reload(self) -> None:
321 |         """Reload all gitignore files from the repository."""
322 |         self.ignore_specs.clear()
323 |         self._load_gitignore_files()
324 | 
325 | 
326 | def match_path(relative_path: str, path_spec: PathSpec, root_path: str = "") -> bool:
327 |     """
328 |     Match a relative path against a given pathspec. Just pathspec.match_file() is not enough,
329 |     we need to do some massaging to fix issues with pathspec matching.
330 | 
331 |     :param relative_path: relative path to match against the pathspec
332 |     :param path_spec: the pathspec to match against
333 |     :param root_path: the root path from which the relative path is derived
334 |     :return:
335 |     """
336 |     normalized_path = str(relative_path).replace(os.path.sep, "/")
337 | 
338 |     # We can have patterns like /src/..., which would only match corresponding paths from the repo root
339 |     # Unfortunately, pathspec can't know whether a relative path is relative to the repo root or not,
340 |     # so it will never match src/...
341 |     # The fix is to just always assume that the input path is relative to the repo root and to
342 |     # prefix it with /.
343 |     if not normalized_path.startswith("/"):
344 |         normalized_path = "/" + normalized_path
345 | 
346 |     # pathspec can't handle the matching of directories if they don't end with a slash!
347 |     # see https://github.com/cpburnz/python-pathspec/issues/89
348 |     abs_path = os.path.abspath(os.path.join(root_path, relative_path))
349 |     if os.path.isdir(abs_path) and not normalized_path.endswith("/"):
350 |         normalized_path = normalized_path + "/"
351 |     return path_spec.match_file(normalized_path)
352 | 
```

--------------------------------------------------------------------------------
/.github/workflows/pytest.yml:
--------------------------------------------------------------------------------

```yaml
  1 | name: Tests on CI
  2 | 
  3 | on:
  4 |   pull_request:
  5 |   push:
  6 |     branches:
  7 |       - main
  8 | 
  9 | concurrency:
 10 |   group: ci-${{ github.workflow }}-${{ github.ref }}
 11 |   cancel-in-progress: true
 12 | 
 13 | jobs:
 14 |   cpu:
 15 |     name: Tests on ${{ matrix.os }}
 16 |     runs-on: ${{ matrix.os }}
 17 |     strategy:
 18 |       fail-fast: false
 19 |       matrix:
 20 |         os: [ubuntu-latest, windows-latest, macos-latest]
 21 |         python-version: ["3.11"]
 22 |     steps:
 23 |       - uses: actions/checkout@v3
 24 |       - name: Set up Python ${{ matrix.python-version }}
 25 |         uses: actions/setup-python@v4
 26 |         with:
 27 |           python-version: "${{ matrix.python-version }}"
 28 |       - uses: actions/setup-go@v5
 29 |         with:
 30 |           go-version: ">=1.17.0"
 31 |       - name: Set up Node.js
 32 |         uses: actions/setup-node@v4
 33 |         with:
 34 |           node-version: '20.x'
 35 |       - name: Ensure cached directory exist before calling cache-related actions
 36 |         shell: bash
 37 |         run: |
 38 |           mkdir -p $HOME/.serena/language_servers/static
 39 |           mkdir -p $HOME/.cache/go-build
 40 |           mkdir -p $HOME/go/bin
 41 |       # Add Go bin directory to PATH for this workflow
 42 |       # GITHUB_PATH is a special file that GitHub Actions uses to modify PATH
 43 |       # Writing to this file adds the directory to the PATH for subsequent steps
 44 |       - name: Cache Go binaries
 45 |         id: cache-go-binaries
 46 |         uses: actions/cache@v3
 47 |         with:
 48 |           path: |
 49 |             ~/go/bin
 50 |             ~/.cache/go-build
 51 |           key: go-binaries-${{ runner.os }}-gopls-latest
 52 |       - name: Install gopls
 53 |         if: steps.cache-go-binaries.outputs.cache-hit != 'true'
 54 |         shell: bash
 55 |         run: go install golang.org/x/tools/gopls@latest
 56 |       - name: Set up Elixir
 57 |         if: runner.os != 'Windows'
 58 |         uses: erlef/setup-beam@v1
 59 |         with:
 60 |           elixir-version: "1.18.4"
 61 |           otp-version: "26.1"
 62 | #      Erlang currently not tested in CI, random hangings on macos, always hangs on ubuntu
 63 | #      In local tests, erlang seems to work though
 64 | #      - name: Install Erlang Language Server
 65 | #        if: runner.os != 'Windows'
 66 | #        shell: bash
 67 | #        run: |
 68 | #          # Install rebar3 if not already available
 69 | #          which rebar3 || (curl -fsSL https://github.com/erlang/rebar3/releases/download/3.23.0/rebar3 -o /tmp/rebar3 && chmod +x /tmp/rebar3 && sudo mv /tmp/rebar3 /usr/local/bin/rebar3)
 70 | #          # Clone and build erlang_ls
 71 | #          git clone https://github.com/erlang-ls/erlang_ls.git /tmp/erlang_ls
 72 | #          cd /tmp/erlang_ls
 73 | #          make install PREFIX=/usr/local
 74 | #          # Ensure erlang_ls is in PATH
 75 | #          echo "$HOME/.local/bin" >> $GITHUB_PATH
 76 |       - name: Install clojure tools
 77 |         uses: DeLaGuardo/[email protected]
 78 |         with:
 79 |           cli: latest
 80 |       - name: Setup Java (for JVM based languages)
 81 |         uses: actions/setup-java@v4
 82 |         with:
 83 |           distribution: 'temurin'
 84 |           java-version: '17'
 85 |       - name: Install Terraform
 86 |         uses: hashicorp/setup-terraform@v3
 87 |         with:
 88 |           terraform_version: "1.5.0"
 89 |           terraform_wrapper: false
 90 |       # - name: Install swift
 91 |       #   if: runner.os != 'Windows'
 92 |       #   uses: swift-actions/setup-swift@v2
 93 |       # Installation of swift with the action screws with installation of ruby on macOS for some reason
 94 |       # We can try again when version 3 of the action is released, where they will also use swiftly
 95 |       # Until then, we use custom code to install swift. Sourcekit-lsp is installed automatically with swift
 96 |       - name: Install Swift with swiftly (macOS)
 97 |         if: runner.os == 'macOS'
 98 |         run: |
 99 |           echo "=== Installing swiftly on macOS ==="
100 |           curl -O https://download.swift.org/swiftly/darwin/swiftly.pkg && \
101 |           installer -pkg swiftly.pkg -target CurrentUserHomeDirectory && \
102 |           ~/.swiftly/bin/swiftly init --quiet-shell-followup && \
103 |           . "${SWIFTLY_HOME_DIR:-$HOME/.swiftly}/env.sh" && \
104 |           hash -r
105 |           swiftly install --use 6.1.2
106 |           swiftly use 6.1.2
107 |           echo "~/.swiftly/bin" >> $GITHUB_PATH
108 |           echo "Swiftly installed successfully"
109 |           # Verify sourcekit-lsp is working before proceeding
110 |           echo "=== Verifying sourcekit-lsp installation ==="
111 |           which sourcekit-lsp || echo "Warning: sourcekit-lsp not found in PATH"
112 |           sourcekit-lsp --help || echo "Warning: sourcekit-lsp not responding"
113 |       - name: Install Swift with swiftly (Ubuntu)
114 |         if: runner.os == 'Linux'
115 |         run: |
116 |           echo "=== Installing swiftly on Ubuntu ==="
117 |           # Install dependencies BEFORE Swift to avoid exit code 1
118 |           sudo apt-get update
119 |           sudo apt-get -y install libcurl4-openssl-dev
120 |           curl -O https://download.swift.org/swiftly/linux/swiftly-$(uname -m).tar.gz && \
121 |           tar zxf swiftly-$(uname -m).tar.gz && \
122 |           ./swiftly init --quiet-shell-followup && \
123 |           . "${SWIFTLY_HOME_DIR:-$HOME/.local/share/swiftly}/env.sh" && \
124 |           hash -r
125 |           swiftly install --use 6.1.2
126 |           swiftly use 6.1.2
127 |           echo "=== Adding Swift toolchain to PATH ==="
128 |           echo "$HOME/.local/share/swiftly/bin" >> $GITHUB_PATH
129 |           echo "Swiftly installed successfully!"
130 |           # Verify sourcekit-lsp is working before proceeding
131 |           echo "=== Verifying sourcekit-lsp installation ==="
132 |           which sourcekit-lsp || echo "Warning: sourcekit-lsp not found in PATH"
133 |           sourcekit-lsp --help || echo "Warning: sourcekit-lsp not responding"
134 |       - name: Install Ruby
135 |         uses: ruby/setup-ruby@v1
136 |         with:
137 |           ruby-version: '3.4'
138 |       - name: Install Ruby language server
139 |         shell: bash
140 |         run: gem install ruby-lsp
141 |       - name: Install R
142 |         uses: r-lib/actions/setup-r@v2
143 |         with:
144 |           r-version: '4.4.2'
145 |           use-public-rspm: true
146 |       - name: Install R language server
147 |         shell: bash
148 |         run: |
149 |           Rscript -e "install.packages('languageserver', repos='https://cloud.r-project.org')"
150 |       - name: Install Zig
151 |         uses: goto-bus-stop/setup-zig@v2
152 |         with:
153 |           version: 0.14.1
154 |       - name: Install ZLS (Zig Language Server)
155 |         shell: bash
156 |         run: |
157 |           if [[ "${{ runner.os }}" == "Linux" ]]; then
158 |             wget https://github.com/zigtools/zls/releases/download/0.14.0/zls-x86_64-linux.tar.xz
159 |             tar -xf zls-x86_64-linux.tar.xz
160 |             sudo mv zls /usr/local/bin/
161 |             rm zls-x86_64-linux.tar.xz
162 |           elif [[ "${{ runner.os }}" == "macOS" ]]; then
163 |             wget https://github.com/zigtools/zls/releases/download/0.14.0/zls-x86_64-macos.tar.xz
164 |             tar -xf zls-x86_64-macos.tar.xz
165 |             sudo mv zls /usr/local/bin/
166 |             rm zls-x86_64-macos.tar.xz
167 |           elif [[ "${{ runner.os }}" == "Windows" ]]; then
168 |             curl -L -o zls.zip https://github.com/zigtools/zls/releases/download/0.14.0/zls-x86_64-windows.zip
169 |             unzip -o zls.zip
170 |             mkdir -p "$HOME/bin"
171 |             mv zls.exe "$HOME/bin/"
172 |             echo "$HOME/bin" >> $GITHUB_PATH
173 |             rm zls.zip
174 |           fi
175 |       - name: Install Lua Language Server
176 |         shell: bash
177 |         run: |
178 |           LUA_LS_VERSION="3.15.0"
179 |           LUA_LS_DIR="$HOME/.serena/language_servers/lua"
180 |           mkdir -p "$LUA_LS_DIR"
181 |           
182 |           if [[ "${{ runner.os }}" == "Linux" ]]; then
183 |             if [[ "$(uname -m)" == "x86_64" ]]; then
184 |               wget https://github.com/LuaLS/lua-language-server/releases/download/${LUA_LS_VERSION}/lua-language-server-${LUA_LS_VERSION}-linux-x64.tar.gz
185 |               tar -xzf lua-language-server-${LUA_LS_VERSION}-linux-x64.tar.gz -C "$LUA_LS_DIR"
186 |             else
187 |               wget https://github.com/LuaLS/lua-language-server/releases/download/${LUA_LS_VERSION}/lua-language-server-${LUA_LS_VERSION}-linux-arm64.tar.gz
188 |               tar -xzf lua-language-server-${LUA_LS_VERSION}-linux-arm64.tar.gz -C "$LUA_LS_DIR"
189 |             fi
190 |             chmod +x "$LUA_LS_DIR/bin/lua-language-server"
191 |             # Create wrapper script instead of symlink to ensure supporting files are found
192 |             echo '#!/bin/bash' | sudo tee /usr/local/bin/lua-language-server > /dev/null
193 |             echo 'cd "${HOME}/.serena/language_servers/lua/bin"' | sudo tee -a /usr/local/bin/lua-language-server > /dev/null
194 |             echo 'exec ./lua-language-server "$@"' | sudo tee -a /usr/local/bin/lua-language-server > /dev/null
195 |             sudo chmod +x /usr/local/bin/lua-language-server
196 |             rm lua-language-server-*.tar.gz
197 |           elif [[ "${{ runner.os }}" == "macOS" ]]; then
198 |             if [[ "$(uname -m)" == "x86_64" ]]; then
199 |               wget https://github.com/LuaLS/lua-language-server/releases/download/${LUA_LS_VERSION}/lua-language-server-${LUA_LS_VERSION}-darwin-x64.tar.gz
200 |               tar -xzf lua-language-server-${LUA_LS_VERSION}-darwin-x64.tar.gz -C "$LUA_LS_DIR"
201 |             else
202 |               wget https://github.com/LuaLS/lua-language-server/releases/download/${LUA_LS_VERSION}/lua-language-server-${LUA_LS_VERSION}-darwin-arm64.tar.gz
203 |               tar -xzf lua-language-server-${LUA_LS_VERSION}-darwin-arm64.tar.gz -C "$LUA_LS_DIR"
204 |             fi
205 |             chmod +x "$LUA_LS_DIR/bin/lua-language-server"
206 |             # Create wrapper script instead of symlink to ensure supporting files are found
207 |             echo '#!/bin/bash' | sudo tee /usr/local/bin/lua-language-server > /dev/null
208 |             echo 'cd "${HOME}/.serena/language_servers/lua/bin"' | sudo tee -a /usr/local/bin/lua-language-server > /dev/null
209 |             echo 'exec ./lua-language-server "$@"' | sudo tee -a /usr/local/bin/lua-language-server > /dev/null
210 |             sudo chmod +x /usr/local/bin/lua-language-server
211 |             rm lua-language-server-*.tar.gz
212 |           elif [[ "${{ runner.os }}" == "Windows" ]]; then
213 |             curl -L -o lua-ls.zip https://github.com/LuaLS/lua-language-server/releases/download/${LUA_LS_VERSION}/lua-language-server-${LUA_LS_VERSION}-win32-x64.zip
214 |             unzip -o lua-ls.zip -d "$LUA_LS_DIR"
215 |             # For Windows, we'll add the bin directory directly to PATH
216 |             # The lua-language-server.exe can find its supporting files relative to its location
217 |             echo "$LUA_LS_DIR/bin" >> $GITHUB_PATH
218 |             rm lua-ls.zip
219 |           fi
220 |       - name: Install Perl::LanguageServer
221 |         if: runner.os != 'Windows'
222 |         shell: bash
223 |         run: |
224 |           if [[ "${{ runner.os }}" == "Linux" ]]; then
225 |             sudo apt-get update
226 |             sudo apt-get install -y cpanminus build-essential libanyevent-perl libio-aio-perl
227 |           elif [[ "${{ runner.os }}" == "macOS" ]]; then
228 |             brew install cpanminus
229 |           fi
230 |           PERL_MM_USE_DEFAULT=1 cpanm --notest --force Perl::LanguageServer
231 |           # Set up Perl local::lib environment for subsequent steps
232 |           echo "PERL5LIB=$HOME/perl5/lib/perl5${PERL5LIB:+:${PERL5LIB}}" >> $GITHUB_ENV
233 |           echo "PERL_LOCAL_LIB_ROOT=$HOME/perl5${PERL_LOCAL_LIB_ROOT:+:${PERL_LOCAL_LIB_ROOT}}" >> $GITHUB_ENV
234 |           echo "PERL_MB_OPT=--install_base \"$HOME/perl5\"" >> $GITHUB_ENV
235 |           echo "PERL_MM_OPT=INSTALL_BASE=$HOME/perl5" >> $GITHUB_ENV
236 |           echo "$HOME/perl5/bin" >> $GITHUB_PATH
237 |       - name: Install Elm
238 |         shell: bash
239 |         run: npm install -g [email protected]
240 |       - name: Install Nix
241 |         if: runner.os != 'Windows'  # Nix doesn't support Windows natively
242 |         uses: cachix/install-nix-action@v30
243 |         with:
244 |           nix_path: nixpkgs=channel:nixos-unstable
245 |       - name: Install nixd (Nix Language Server)
246 |         if: runner.os != 'Windows'  # Skip on Windows since Nix isn't available
247 |         shell: bash
248 |         run: |
249 |           # Install nixd using nix
250 |           nix profile install github:nix-community/nixd
251 | 
252 |           # Verify nixd is installed and working
253 |           if ! command -v nixd &> /dev/null; then
254 |             echo "nixd installation failed or not in PATH"
255 |             exit 1
256 |           fi
257 | 
258 |           echo "$HOME/.nix-profile/bin" >> $GITHUB_PATH
259 |       - name: Verify Nix package build
260 |         if: runner.os != 'Windows'  # Nix only supported on Linux/macOS
261 |         shell: bash
262 |         run: |
263 |           # Verify the flake builds successfully
264 |           nix build --no-link
265 |       - name: Install Regal (Rego Language Server)
266 |         shell: bash
267 |         run: |
268 |           REGAL_VERSION="0.36.1"
269 | 
270 |           if [[ "${{ runner.os }}" == "Linux" ]]; then
271 |             if [[ "$(uname -m)" == "x86_64" ]]; then
272 |               curl -L -o regal https://github.com/StyraInc/regal/releases/download/v${REGAL_VERSION}/regal_Linux_x86_64
273 |             else
274 |               curl -L -o regal https://github.com/StyraInc/regal/releases/download/v${REGAL_VERSION}/regal_Linux_arm64
275 |             fi
276 |             chmod +x regal
277 |             sudo mv regal /usr/local/bin/
278 |           elif [[ "${{ runner.os }}" == "macOS" ]]; then
279 |             if [[ "$(uname -m)" == "x86_64" ]]; then
280 |               curl -L -o regal https://github.com/StyraInc/regal/releases/download/v${REGAL_VERSION}/regal_Darwin_x86_64
281 |             else
282 |               curl -L -o regal https://github.com/StyraInc/regal/releases/download/v${REGAL_VERSION}/regal_Darwin_arm64
283 |             fi
284 |             chmod +x regal
285 |             sudo mv regal /usr/local/bin/
286 |           elif [[ "${{ runner.os }}" == "Windows" ]]; then
287 |             curl -L -o regal.exe https://github.com/StyraInc/regal/releases/download/v${REGAL_VERSION}/regal_Windows_x86_64.exe
288 |             mkdir -p "$HOME/bin"
289 |             mv regal.exe "$HOME/bin/"
290 |             echo "$HOME/bin" >> $GITHUB_PATH
291 |           fi
292 |       - name: Install uv
293 |         shell: bash
294 |         run: curl -LsSf https://astral.sh/uv/install.sh | sh
295 |       - name: Cache uv virtualenv
296 |         id: cache-uv
297 |         uses: actions/cache@v3
298 |         with:
299 |           path: .venv
300 |           key: uv-venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('uv.lock') }}
301 |       - name: Cache language servers
302 |         id: cache-language-servers
303 |         uses: actions/cache@v3
304 |         with:
305 |           path: ~/.serena/language_servers/static
306 |           key: language-servers-${{ runner.os }}-v1
307 |           restore-keys: |
308 |             language-servers-${{ runner.os }}-
309 |       - name: Create virtual environment
310 |         shell: bash
311 |         run: |
312 |           if [ ! -d ".venv" ]; then
313 |             uv venv
314 |           fi
315 |       - name: Install dependencies
316 |         shell: bash
317 |         run: uv pip install -e ".[dev]"
318 |       - name: Check formatting
319 |         shell: bash
320 |         run: uv run poe lint
321 |       - name: Test with pytest
322 |         shell: bash
323 |         run: uv run poe test
324 | 
```

--------------------------------------------------------------------------------
/src/serena/gui_log_viewer.py:
--------------------------------------------------------------------------------

```python
  1 | # mypy: ignore-errors
  2 | import logging
  3 | import os
  4 | import queue
  5 | import sys
  6 | import threading
  7 | import tkinter as tk
  8 | import traceback
  9 | from enum import Enum, auto
 10 | from pathlib import Path
 11 | from typing import Literal
 12 | 
 13 | from serena import constants
 14 | from serena.util.logging import MemoryLogHandler
 15 | 
 16 | log = logging.getLogger(__name__)
 17 | 
 18 | 
 19 | class LogLevel(Enum):
 20 |     DEBUG = auto()
 21 |     INFO = auto()
 22 |     WARNING = auto()
 23 |     ERROR = auto()
 24 |     DEFAULT = auto()
 25 | 
 26 | 
 27 | class GuiLogViewer:
 28 |     """
 29 |     A class that creates a Tkinter GUI for displaying log messages in a separate thread.
 30 |     The log viewer supports coloring based on log levels (DEBUG, INFO, WARNING, ERROR).
 31 |     It can also highlight tool names in boldface when they appear in log messages.
 32 |     """
 33 | 
 34 |     def __init__(
 35 |         self,
 36 |         mode: Literal["dashboard", "error"],
 37 |         title="Log Viewer",
 38 |         memory_log_handler: MemoryLogHandler | None = None,
 39 |         width=800,
 40 |         height=600,
 41 |     ):
 42 |         """
 43 |         :param mode: the mode; if "dashboard", run a dashboard with logs and some control options; if "error", run
 44 |             a simple error log viewer (for fatal exceptions)
 45 |         :param title: the window title
 46 |         :param memory_log_handler: an optional log handler from which to obtain log messages; If not provided,
 47 |             must pass the instance to a `GuiLogViewerHandler` to add log messages.
 48 |         :param width: the initial window width
 49 |         :param height: the initial window height
 50 |         """
 51 |         self.mode = mode
 52 |         self.title = title
 53 |         self.width = width
 54 |         self.height = height
 55 |         self.message_queue = queue.Queue()
 56 |         self.running = False
 57 |         self.log_thread = None
 58 |         self.tool_names = []  # List to store tool names for highlighting
 59 | 
 60 |         # Define colors for different log levels
 61 |         self.log_colors = {
 62 |             LogLevel.DEBUG: "#808080",  # Gray
 63 |             LogLevel.INFO: "#000000",  # Black
 64 |             LogLevel.WARNING: "#FF8C00",  # Dark Orange
 65 |             LogLevel.ERROR: "#FF0000",  # Red
 66 |             LogLevel.DEFAULT: "#000000",  # Black
 67 |         }
 68 | 
 69 |         if memory_log_handler is not None:
 70 |             for msg in memory_log_handler.get_log_messages():
 71 |                 self.message_queue.put(msg)
 72 |             memory_log_handler.add_emit_callback(lambda msg: self.message_queue.put(msg))
 73 | 
 74 |     def start(self):
 75 |         """Start the log viewer in a separate thread."""
 76 |         if not self.running:
 77 |             self.log_thread = threading.Thread(target=self.run_gui)
 78 |             self.log_thread.daemon = True
 79 |             self.log_thread.start()
 80 |             return True
 81 |         return False
 82 | 
 83 |     def stop(self):
 84 |         """Stop the log viewer."""
 85 |         if self.running:
 86 |             # Add a sentinel value to the queue to signal the GUI to exit
 87 |             self.message_queue.put(None)
 88 |             return True
 89 |         return False
 90 | 
 91 |     def set_tool_names(self, tool_names):
 92 |         """
 93 |         Set or update the list of tool names to be highlighted in log messages.
 94 | 
 95 |         Args:
 96 |             tool_names (list): A list of tool name strings to highlight
 97 | 
 98 |         """
 99 |         self.tool_names = tool_names
100 | 
101 |     def add_log(self, message):
102 |         """
103 |         Add a log message to the viewer.
104 | 
105 |         Args:
106 |             message (str): The log message to display
107 | 
108 |         """
109 |         self.message_queue.put(message)
110 | 
111 |     def _determine_log_level(self, message):
112 |         """
113 |         Determine the log level from the message.
114 | 
115 |         Args:
116 |             message (str): The log message
117 | 
118 |         Returns:
119 |             LogLevel: The determined log level
120 | 
121 |         """
122 |         message_upper = message.upper()
123 |         if message_upper.startswith("DEBUG"):
124 |             return LogLevel.DEBUG
125 |         elif message_upper.startswith("INFO"):
126 |             return LogLevel.INFO
127 |         elif message_upper.startswith("WARNING"):
128 |             return LogLevel.WARNING
129 |         elif message_upper.startswith("ERROR"):
130 |             return LogLevel.ERROR
131 |         else:
132 |             return LogLevel.DEFAULT
133 | 
134 |     def _process_queue(self):
135 |         """Process messages from the queue and update the text widget."""
136 |         try:
137 |             while not self.message_queue.empty():
138 |                 message = self.message_queue.get_nowait()
139 | 
140 |                 # Check for sentinel value to exit
141 |                 if message is None:
142 |                     self.root.quit()
143 |                     return
144 | 
145 |                 # Check if scrollbar is at the bottom before adding new text
146 |                 # Get current scroll position
147 |                 current_position = self.text_widget.yview()
148 |                 # If near the bottom (allowing for small floating point differences)
149 |                 was_at_bottom = current_position[1] > 0.99
150 | 
151 |                 log_level = self._determine_log_level(message)
152 | 
153 |                 # Insert the message at the end of the text with appropriate log level tag
154 |                 self.text_widget.configure(state=tk.NORMAL)
155 | 
156 |                 # Find tool names in the message and highlight them
157 |                 if self.tool_names:
158 |                     # Capture start position (before insertion)
159 |                     start_index = self.text_widget.index("end-1c")
160 | 
161 |                     # Insert the message
162 |                     self.text_widget.insert(tk.END, message + "\n", log_level.name)
163 | 
164 |                     # Convert start index to line/char format
165 |                     line, char = map(int, start_index.split("."))
166 | 
167 |                     # Search for tool names in the message string directly
168 |                     for tool_name in self.tool_names:
169 |                         start_offset = 0
170 |                         while True:
171 |                             found_at = message.find(tool_name, start_offset)
172 |                             if found_at == -1:
173 |                                 break
174 | 
175 |                             # Calculate line/column from offset
176 |                             offset_line = line
177 |                             offset_char = char
178 |                             for c in message[:found_at]:
179 |                                 if c == "\n":
180 |                                     offset_line += 1
181 |                                     offset_char = 0
182 |                                 else:
183 |                                     offset_char += 1
184 | 
185 |                             # Construct index positions
186 |                             start_pos = f"{offset_line}.{offset_char}"
187 |                             end_pos = f"{offset_line}.{offset_char + len(tool_name)}"
188 | 
189 |                             # Add tag to highlight the tool name
190 |                             self.text_widget.tag_add("TOOL_NAME", start_pos, end_pos)
191 | 
192 |                             start_offset = found_at + len(tool_name)
193 | 
194 |                 else:
195 |                     # No tool names to highlight, just insert the message
196 |                     self.text_widget.insert(tk.END, message + "\n", log_level.name)
197 | 
198 |                 self.text_widget.configure(state=tk.DISABLED)
199 | 
200 |                 # Auto-scroll to the bottom only if it was already at the bottom
201 |                 if was_at_bottom:
202 |                     self.text_widget.see(tk.END)
203 | 
204 |             # Schedule to check the queue again
205 |             if self.running:
206 |                 self.root.after(100, self._process_queue)
207 | 
208 |         except Exception as e:
209 |             print(f"Error processing message queue: {e}", file=sys.stderr)
210 |             if self.running:
211 |                 self.root.after(100, self._process_queue)
212 | 
213 |     def run_gui(self):
214 |         """Run the GUI"""
215 |         self.running = True
216 |         try:
217 |             # Set app id (avoid app being lumped together with other Python-based apps in Windows taskbar)
218 |             if sys.platform == "win32":
219 |                 import ctypes
220 | 
221 |                 ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID("oraios.serena")
222 | 
223 |             self.root = tk.Tk()
224 |             self.root.title(self.title)
225 |             self.root.geometry(f"{self.width}x{self.height}")
226 | 
227 |             # Make the window resizable
228 |             self.root.columnconfigure(0, weight=1)
229 |             # We now have two rows - one for logo and one for text
230 |             self.root.rowconfigure(0, weight=0)  # Logo row
231 |             self.root.rowconfigure(1, weight=1)  # Text content row
232 | 
233 |             dashboard_path = Path(constants.SERENA_DASHBOARD_DIR)
234 | 
235 |             # Load and display the logo image
236 |             try:
237 |                 # construct path relative to path of this file
238 |                 image_path = dashboard_path / "serena-logs.png"
239 |                 self.logo_image = tk.PhotoImage(file=image_path)
240 | 
241 |                 # Create a label to display the logo
242 |                 self.logo_label = tk.Label(self.root, image=self.logo_image)
243 |                 self.logo_label.grid(row=0, column=0, sticky="ew")
244 |             except Exception as e:
245 |                 print(f"Error loading logo image: {e}", file=sys.stderr)
246 | 
247 |             # Create frame to hold text widget and scrollbars
248 |             frame = tk.Frame(self.root)
249 |             frame.grid(row=1, column=0, sticky="nsew")
250 |             frame.columnconfigure(0, weight=1)
251 |             frame.rowconfigure(0, weight=1)
252 | 
253 |             # Create horizontal scrollbar
254 |             h_scrollbar = tk.Scrollbar(frame, orient=tk.HORIZONTAL)
255 |             h_scrollbar.grid(row=1, column=0, sticky="ew")
256 | 
257 |             # Create vertical scrollbar
258 |             v_scrollbar = tk.Scrollbar(frame, orient=tk.VERTICAL)
259 |             v_scrollbar.grid(row=0, column=1, sticky="ns")
260 | 
261 |             # Create text widget with horizontal scrolling
262 |             self.text_widget = tk.Text(
263 |                 frame, wrap=tk.NONE, width=self.width, height=self.height, xscrollcommand=h_scrollbar.set, yscrollcommand=v_scrollbar.set
264 |             )
265 |             self.text_widget.grid(row=0, column=0, sticky="nsew")
266 |             self.text_widget.configure(state=tk.DISABLED)  # Make it read-only
267 | 
268 |             # Configure scrollbars
269 |             h_scrollbar.config(command=self.text_widget.xview)
270 |             v_scrollbar.config(command=self.text_widget.yview)
271 | 
272 |             # Configure tags for different log levels with appropriate colors
273 |             for level, color in self.log_colors.items():
274 |                 self.text_widget.tag_configure(level.name, foreground=color)
275 | 
276 |             # Configure tag for tool names
277 |             self.text_widget.tag_configure("TOOL_NAME", background="#ffff00")
278 | 
279 |             # Set up the queue processing
280 |             self.root.after(100, self._process_queue)
281 | 
282 |             # Handle window close event depending on mode
283 |             if self.mode == "dashboard":
284 |                 self.root.protocol("WM_DELETE_WINDOW", lambda: self.root.iconify())
285 |             else:
286 |                 self.root.protocol("WM_DELETE_WINDOW", self.stop)
287 | 
288 |             # Create menu bar
289 |             if self.mode == "dashboard":
290 |                 menubar = tk.Menu(self.root)
291 |                 server_menu = tk.Menu(menubar, tearoff=0)
292 |                 server_menu.add_command(label="Shutdown", command=self._shutdown_server)  # type: ignore
293 |                 menubar.add_cascade(label="Server", menu=server_menu)
294 |                 self.root.config(menu=menubar)
295 | 
296 |             # Configure icons
297 |             icon_16 = tk.PhotoImage(file=dashboard_path / "serena-icon-16.png")
298 |             icon_32 = tk.PhotoImage(file=dashboard_path / "serena-icon-32.png")
299 |             icon_48 = tk.PhotoImage(file=dashboard_path / "serena-icon-48.png")
300 |             self.root.iconphoto(False, icon_48, icon_32, icon_16)
301 | 
302 |             # Start the Tkinter event loop
303 |             self.root.mainloop()
304 | 
305 |         except Exception as e:
306 |             print(f"Error in GUI thread: {e}", file=sys.stderr)
307 |         finally:
308 |             self.running = False
309 | 
310 |     def _shutdown_server(self) -> None:
311 |         log.info("Shutting down Serena")
312 |         # noinspection PyUnresolvedReferences
313 |         # noinspection PyProtectedMember
314 |         os._exit(0)
315 | 
316 | 
317 | class GuiLogViewerHandler(logging.Handler):
318 |     """
319 |     A logging handler that sends log records to a ThreadedLogViewer instance.
320 |     This handler can be integrated with Python's standard logging module
321 |     to direct log entries to a GUI log viewer.
322 |     """
323 | 
324 |     def __init__(
325 |         self,
326 |         log_viewer: GuiLogViewer,
327 |         level=logging.NOTSET,
328 |         format_string: str | None = "%(levelname)-5s %(asctime)-15s %(name)s:%(funcName)s:%(lineno)d - %(message)s",
329 |     ):
330 |         """
331 |         Initialize the handler with a ThreadedLogViewer instance.
332 | 
333 |         Args:
334 |             log_viewer: A ThreadedLogViewer instance that will display the logs
335 |             level: The logging level (default: NOTSET which captures all logs)
336 |             format_string: the format string
337 | 
338 |         """
339 |         super().__init__(level)
340 |         self.log_viewer = log_viewer
341 |         self.formatter = logging.Formatter(format_string)
342 | 
343 |         # Start the log viewer if it's not already running
344 |         if not self.log_viewer.running:
345 |             self.log_viewer.start()
346 | 
347 |     @classmethod
348 |     def is_instance_registered(cls) -> bool:
349 |         for h in logging.Logger.root.handlers:
350 |             if isinstance(h, cls):
351 |                 return True
352 |         return False
353 | 
354 |     def emit(self, record):
355 |         """
356 |         Emit a log record to the ThreadedLogViewer.
357 | 
358 |         Args:
359 |             record: The log record to emit
360 | 
361 |         """
362 |         try:
363 |             # Format the record according to the formatter
364 |             msg = self.format(record)
365 | 
366 |             # Convert the level name to a standard format for the viewer
367 |             level_prefix = record.levelname
368 | 
369 |             # Add the appropriate prefix if it's not already there
370 |             if not msg.startswith(level_prefix):
371 |                 msg = f"{level_prefix}: {msg}"
372 | 
373 |             self.log_viewer.add_log(msg)
374 | 
375 |         except Exception:
376 |             self.handleError(record)
377 | 
378 |     def close(self):
379 |         """
380 |         Close the handler and optionally stop the log viewer.
381 |         """
382 |         # We don't automatically stop the log viewer here as it might
383 |         # be used by other handlers or directly by the application
384 |         super().close()
385 | 
386 |     def stop_viewer(self):
387 |         """
388 |         Explicitly stop the associated log viewer.
389 |         """
390 |         if self.log_viewer.running:
391 |             self.log_viewer.stop()
392 | 
393 | 
394 | def show_fatal_exception(e: Exception):
395 |     """
396 |     Makes sure the given exception is shown in the GUI log viewer,
397 |     either an existing instance or a new one.
398 | 
399 |     :param e: the exception to display
400 |     """
401 |     # show in new window in main thread (user must close it)
402 |     log_viewer = GuiLogViewer("error")
403 |     exc_info = "".join(traceback.format_exception(type(e), e, e.__traceback__))
404 |     log_viewer.add_log(f"ERROR Fatal exception: {e}\n{exc_info}")
405 |     log_viewer.run_gui()
406 | 
```

--------------------------------------------------------------------------------
/src/serena/project.py:
--------------------------------------------------------------------------------

```python
  1 | import logging
  2 | import os
  3 | from pathlib import Path
  4 | from typing import Any
  5 | 
  6 | import pathspec
  7 | 
  8 | from serena.config.serena_config import DEFAULT_TOOL_TIMEOUT, ProjectConfig
  9 | from serena.constants import SERENA_MANAGED_DIR_IN_HOME, SERENA_MANAGED_DIR_NAME
 10 | from serena.text_utils import MatchedConsecutiveLines, search_files
 11 | from serena.util.file_system import GitignoreParser, match_path
 12 | from solidlsp import SolidLanguageServer
 13 | from solidlsp.ls_config import Language, LanguageServerConfig
 14 | from solidlsp.ls_logger import LanguageServerLogger
 15 | from solidlsp.settings import SolidLSPSettings
 16 | 
 17 | log = logging.getLogger(__name__)
 18 | 
 19 | 
 20 | class Project:
 21 |     def __init__(self, project_root: str, project_config: ProjectConfig, is_newly_created: bool = False):
 22 |         self.project_root = project_root
 23 |         self.project_config = project_config
 24 |         self.is_newly_created = is_newly_created
 25 | 
 26 |         # create .gitignore file in the project's Serena data folder if not yet present
 27 |         serena_data_gitignore_path = os.path.join(self.path_to_serena_data_folder(), ".gitignore")
 28 |         if not os.path.exists(serena_data_gitignore_path):
 29 |             os.makedirs(os.path.dirname(serena_data_gitignore_path), exist_ok=True)
 30 |             log.info(f"Creating .gitignore file in {serena_data_gitignore_path}")
 31 |             with open(serena_data_gitignore_path, "w", encoding="utf-8") as f:
 32 |                 f.write(f"/{SolidLanguageServer.CACHE_FOLDER_NAME}\n")
 33 | 
 34 |         # gather ignored paths from the project configuration and gitignore files
 35 |         ignored_patterns = project_config.ignored_paths
 36 |         if len(ignored_patterns) > 0:
 37 |             log.info(f"Using {len(ignored_patterns)} ignored paths from the explicit project configuration.")
 38 |             log.debug(f"Ignored paths: {ignored_patterns}")
 39 |         if project_config.ignore_all_files_in_gitignore:
 40 |             gitignore_parser = GitignoreParser(self.project_root)
 41 |             for spec in gitignore_parser.get_ignore_specs():
 42 |                 log.debug(f"Adding {len(spec.patterns)} patterns from {spec.file_path} to the ignored paths.")
 43 |                 ignored_patterns.extend(spec.patterns)
 44 |         self._ignored_patterns = ignored_patterns
 45 | 
 46 |         # Set up the pathspec matcher for the ignored paths
 47 |         # for all absolute paths in ignored_paths, convert them to relative paths
 48 |         processed_patterns = []
 49 |         for pattern in set(ignored_patterns):
 50 |             # Normalize separators (pathspec expects forward slashes)
 51 |             pattern = pattern.replace(os.path.sep, "/")
 52 |             processed_patterns.append(pattern)
 53 |         log.debug(f"Processing {len(processed_patterns)} ignored paths")
 54 |         self._ignore_spec = pathspec.PathSpec.from_lines(pathspec.patterns.GitWildMatchPattern, processed_patterns)
 55 | 
 56 |     @property
 57 |     def project_name(self) -> str:
 58 |         return self.project_config.project_name
 59 | 
 60 |     @property
 61 |     def language(self) -> Language:
 62 |         return self.project_config.language
 63 | 
 64 |     @classmethod
 65 |     def load(cls, project_root: str | Path, autogenerate: bool = True) -> "Project":
 66 |         project_root = Path(project_root).resolve()
 67 |         if not project_root.exists():
 68 |             raise FileNotFoundError(f"Project root not found: {project_root}")
 69 |         project_config = ProjectConfig.load(project_root, autogenerate=autogenerate)
 70 |         return Project(project_root=str(project_root), project_config=project_config)
 71 | 
 72 |     def path_to_serena_data_folder(self) -> str:
 73 |         return os.path.join(self.project_root, SERENA_MANAGED_DIR_NAME)
 74 | 
 75 |     def path_to_project_yml(self) -> str:
 76 |         return os.path.join(self.project_root, self.project_config.rel_path_to_project_yml())
 77 | 
 78 |     def read_file(self, relative_path: str) -> str:
 79 |         """
 80 |         Reads a file relative to the project root.
 81 | 
 82 |         :param relative_path: the path to the file relative to the project root
 83 |         :return: the content of the file
 84 |         """
 85 |         abs_path = Path(self.project_root) / relative_path
 86 |         if not abs_path.exists():
 87 |             raise FileNotFoundError(f"File not found: {abs_path}")
 88 |         return abs_path.read_text(encoding=self.project_config.encoding)
 89 | 
 90 |     def get_ignore_spec(self) -> pathspec.PathSpec:
 91 |         """
 92 |         :return: the pathspec matcher for the paths that were configured to be ignored,
 93 |             either explicitly or implicitly through .gitignore files.
 94 |         """
 95 |         return self._ignore_spec
 96 | 
 97 |     def _is_ignored_relative_path(self, relative_path: str | Path, ignore_non_source_files: bool = True) -> bool:
 98 |         """
 99 |         Determine whether an existing path should be ignored based on file type and ignore patterns.
100 |         Raises `FileNotFoundError` if the path does not exist.
101 | 
102 |         :param relative_path: Relative path to check
103 |         :param ignore_non_source_files: whether files that are not source files (according to the file masks
104 |             determined by the project's programming language) shall be ignored
105 | 
106 |         :return: whether the path should be ignored
107 |         """
108 |         # special case, never ignore the project root itself
109 |         # If the user ignores hidden files, "." might match against the corresponding PathSpec pattern.
110 |         # The empty string also points to the project root and should never be ignored.
111 |         if str(relative_path) in [".", ""]:
112 |             return False
113 | 
114 |         abs_path = os.path.join(self.project_root, relative_path)
115 |         if not os.path.exists(abs_path):
116 |             raise FileNotFoundError(f"File {abs_path} not found, the ignore check cannot be performed")
117 | 
118 |         # Check file extension if it's a file
119 |         is_file = os.path.isfile(abs_path)
120 |         if is_file and ignore_non_source_files:
121 |             fn_matcher = self.language.get_source_fn_matcher()
122 |             if not fn_matcher.is_relevant_filename(abs_path):
123 |                 return True
124 | 
125 |         # Create normalized path for consistent handling
126 |         rel_path = Path(relative_path)
127 | 
128 |         # always ignore paths inside .git
129 |         if len(rel_path.parts) > 0 and rel_path.parts[0] == ".git":
130 |             return True
131 | 
132 |         return match_path(str(relative_path), self.get_ignore_spec(), root_path=self.project_root)
133 | 
134 |     def is_ignored_path(self, path: str | Path, ignore_non_source_files: bool = False) -> bool:
135 |         """
136 |         Checks whether the given path is ignored
137 | 
138 |         :param path: the path to check, can be absolute or relative
139 |         :param ignore_non_source_files: whether to ignore files that are not source files
140 |             (according to the file masks determined by the project's programming language)
141 |         """
142 |         path = Path(path)
143 |         if path.is_absolute():
144 |             try:
145 |                 relative_path = path.relative_to(self.project_root)
146 |             except ValueError:
147 |                 # If the path is not relative to the project root, we consider it as an absolute path outside the project
148 |                 # (which we ignore)
149 |                 log.warning(f"Path {path} is not relative to the project root {self.project_root} and was therefore ignored")
150 |                 return True
151 |         else:
152 |             relative_path = path
153 | 
154 |         return self._is_ignored_relative_path(str(relative_path), ignore_non_source_files=ignore_non_source_files)
155 | 
156 |     def is_path_in_project(self, path: str | Path) -> bool:
157 |         """
158 |         Checks if the given (absolute or relative) path is inside the project directory.
159 |         Note that even relative paths may be outside if they contain ".." or point to symlinks.
160 |         """
161 |         path = Path(path)
162 |         _proj_root = Path(self.project_root)
163 |         if not path.is_absolute():
164 |             path = _proj_root / path
165 | 
166 |         path = path.resolve()
167 |         return path.is_relative_to(_proj_root)
168 | 
169 |     def relative_path_exists(self, relative_path: str) -> bool:
170 |         """
171 |         Checks if the given relative path exists in the project directory.
172 | 
173 |         :param relative_path: the path to check, relative to the project root
174 |         :return: True if the path exists, False otherwise
175 |         """
176 |         abs_path = Path(self.project_root) / relative_path
177 |         return abs_path.exists()
178 | 
179 |     def validate_relative_path(self, relative_path: str, require_not_ignored: bool = False) -> None:
180 |         """
181 |         Validates that the given relative path to an existing file/dir is safe to read or edit,
182 |         meaning it's inside the project directory.
183 | 
184 |         Passing a path to a non-existing file will lead to a `FileNotFoundError`.
185 | 
186 |         :param relative_path: the path to validate, relative to the project root
187 |         :param require_not_ignored: if True, the path must not be ignored according to the project's ignore settings
188 |         """
189 |         if not self.is_path_in_project(relative_path):
190 |             raise ValueError(f"{relative_path=} points to path outside of the repository root; cannot access for safety reasons")
191 | 
192 |         if require_not_ignored:
193 |             if self.is_ignored_path(relative_path):
194 |                 raise ValueError(f"Path {relative_path} is ignored; cannot access for safety reasons")
195 | 
196 |     def gather_source_files(self, relative_path: str = "") -> list[str]:
197 |         """Retrieves relative paths of all source files, optionally limited to the given path
198 | 
199 |         :param relative_path: if provided, restrict search to this path
200 |         """
201 |         rel_file_paths = []
202 |         start_path = os.path.join(self.project_root, relative_path)
203 |         if not os.path.exists(start_path):
204 |             raise FileNotFoundError(f"Relative path {start_path} not found.")
205 |         if os.path.isfile(start_path):
206 |             return [relative_path]
207 |         else:
208 |             for root, dirs, files in os.walk(start_path, followlinks=True):
209 |                 # prevent recursion into ignored directories
210 |                 dirs[:] = [d for d in dirs if not self.is_ignored_path(os.path.join(root, d))]
211 | 
212 |                 # collect non-ignored files
213 |                 for file in files:
214 |                     abs_file_path = os.path.join(root, file)
215 |                     try:
216 |                         if not self.is_ignored_path(abs_file_path, ignore_non_source_files=True):
217 |                             try:
218 |                                 rel_file_path = os.path.relpath(abs_file_path, start=self.project_root)
219 |                             except Exception:
220 |                                 log.warning(
221 |                                     "Ignoring path '%s' because it appears to be outside of the project root (%s)",
222 |                                     abs_file_path,
223 |                                     self.project_root,
224 |                                 )
225 |                                 continue
226 |                             rel_file_paths.append(rel_file_path)
227 |                     except FileNotFoundError:
228 |                         log.warning(
229 |                             f"File {abs_file_path} not found (possibly due it being a symlink), skipping it in request_parsed_files",
230 |                         )
231 |             return rel_file_paths
232 | 
233 |     def search_source_files_for_pattern(
234 |         self,
235 |         pattern: str,
236 |         relative_path: str = "",
237 |         context_lines_before: int = 0,
238 |         context_lines_after: int = 0,
239 |         paths_include_glob: str | None = None,
240 |         paths_exclude_glob: str | None = None,
241 |     ) -> list[MatchedConsecutiveLines]:
242 |         """
243 |         Search for a pattern across all (non-ignored) source files
244 | 
245 |         :param pattern: Regular expression pattern to search for, either as a compiled Pattern or string
246 |         :param relative_path:
247 |         :param context_lines_before: Number of lines of context to include before each match
248 |         :param context_lines_after: Number of lines of context to include after each match
249 |         :param paths_include_glob: Glob pattern to filter which files to include in the search
250 |         :param paths_exclude_glob: Glob pattern to filter which files to exclude from the search. Takes precedence over paths_include_glob.
251 |         :return: List of matched consecutive lines with context
252 |         """
253 |         relative_file_paths = self.gather_source_files(relative_path=relative_path)
254 |         return search_files(
255 |             relative_file_paths,
256 |             pattern,
257 |             root_path=self.project_root,
258 |             context_lines_before=context_lines_before,
259 |             context_lines_after=context_lines_after,
260 |             paths_include_glob=paths_include_glob,
261 |             paths_exclude_glob=paths_exclude_glob,
262 |         )
263 | 
264 |     def retrieve_content_around_line(
265 |         self, relative_file_path: str, line: int, context_lines_before: int = 0, context_lines_after: int = 0
266 |     ) -> MatchedConsecutiveLines:
267 |         """
268 |         Retrieve the content of the given file around the given line.
269 | 
270 |         :param relative_file_path: The relative path of the file to retrieve the content from
271 |         :param line: The line number to retrieve the content around
272 |         :param context_lines_before: The number of lines to retrieve before the given line
273 |         :param context_lines_after: The number of lines to retrieve after the given line
274 | 
275 |         :return MatchedConsecutiveLines: A container with the desired lines.
276 |         """
277 |         file_contents = self.read_file(relative_file_path)
278 |         return MatchedConsecutiveLines.from_file_contents(
279 |             file_contents,
280 |             line=line,
281 |             context_lines_before=context_lines_before,
282 |             context_lines_after=context_lines_after,
283 |             source_file_path=relative_file_path,
284 |         )
285 | 
286 |     def create_language_server(
287 |         self,
288 |         log_level: int = logging.INFO,
289 |         ls_timeout: float | None = DEFAULT_TOOL_TIMEOUT - 5,
290 |         trace_lsp_communication: bool = False,
291 |         ls_specific_settings: dict[Language, Any] | None = None,
292 |     ) -> SolidLanguageServer:
293 |         """
294 |         Create a language server for a project. Note that you will have to start it
295 |         before performing any LS operations.
296 | 
297 |         :param project: either a path to the project root or a ProjectConfig instance.
298 |             If no project.yml is found, the default project configuration will be used.
299 |         :param log_level: the log level for the language server
300 |         :param ls_timeout: the timeout for the language server
301 |         :param trace_lsp_communication: whether to trace LSP communication
302 |         :param ls_specific_settings: optional LS specific configuration of the language server,
303 |             see docstrings in the inits of subclasses of SolidLanguageServer to see what values may be passed.
304 |         :return: the language server
305 |         """
306 |         ls_config = LanguageServerConfig(
307 |             code_language=self.language,
308 |             ignored_paths=self._ignored_patterns,
309 |             trace_lsp_communication=trace_lsp_communication,
310 |         )
311 |         ls_logger = LanguageServerLogger(log_level=log_level)
312 | 
313 |         log.info(f"Creating language server instance for {self.project_root}.")
314 |         return SolidLanguageServer.create(
315 |             ls_config,
316 |             ls_logger,
317 |             self.project_root,
318 |             timeout=ls_timeout,
319 |             solidlsp_settings=SolidLSPSettings(
320 |                 solidlsp_dir=SERENA_MANAGED_DIR_IN_HOME,
321 |                 project_data_relative_path=SERENA_MANAGED_DIR_NAME,
322 |                 ls_specific_settings=ls_specific_settings or {},
323 |             ),
324 |         )
325 | 
```

--------------------------------------------------------------------------------
/src/solidlsp/ls_utils.py:
--------------------------------------------------------------------------------

```python
  1 | """
  2 | This file contains various utility functions like I/O operations, handling paths, etc.
  3 | """
  4 | 
  5 | import gzip
  6 | import logging
  7 | import os
  8 | import platform
  9 | import shutil
 10 | import subprocess
 11 | import uuid
 12 | import zipfile
 13 | from enum import Enum
 14 | from pathlib import Path, PurePath
 15 | 
 16 | import requests
 17 | 
 18 | from solidlsp.ls_exceptions import SolidLSPException
 19 | from solidlsp.ls_logger import LanguageServerLogger
 20 | from solidlsp.ls_types import UnifiedSymbolInformation
 21 | 
 22 | 
 23 | class InvalidTextLocationError(Exception):
 24 |     pass
 25 | 
 26 | 
 27 | class TextUtils:
 28 |     """
 29 |     Utilities for text operations.
 30 |     """
 31 | 
 32 |     @staticmethod
 33 |     def get_line_col_from_index(text: str, index: int) -> tuple[int, int]:
 34 |         """
 35 |         Returns the zero-indexed line and column number of the given index in the given text
 36 |         """
 37 |         l = 0
 38 |         c = 0
 39 |         idx = 0
 40 |         while idx < index:
 41 |             if text[idx] == "\n":
 42 |                 l += 1
 43 |                 c = 0
 44 |             else:
 45 |                 c += 1
 46 |             idx += 1
 47 | 
 48 |         return l, c
 49 | 
 50 |     @staticmethod
 51 |     def get_index_from_line_col(text: str, line: int, col: int) -> int:
 52 |         """
 53 |         Returns the index of the given zero-indexed line and column number in the given text
 54 |         """
 55 |         idx = 0
 56 |         while line > 0:
 57 |             if idx >= len(text):
 58 |                 raise InvalidTextLocationError
 59 |             if text[idx] == "\n":
 60 |                 line -= 1
 61 |             idx += 1
 62 |         idx += col
 63 |         return idx
 64 | 
 65 |     @staticmethod
 66 |     def _get_updated_position_from_line_and_column_and_edit(l: int, c: int, text_to_be_inserted: str) -> tuple[int, int]:
 67 |         """
 68 |         Utility function to get the position of the cursor after inserting text at a given line and column.
 69 |         """
 70 |         num_newlines_in_gen_text = text_to_be_inserted.count("\n")
 71 |         if num_newlines_in_gen_text > 0:
 72 |             l += num_newlines_in_gen_text
 73 |             c = len(text_to_be_inserted.split("\n")[-1])
 74 |         else:
 75 |             c += len(text_to_be_inserted)
 76 |         return (l, c)
 77 | 
 78 |     @staticmethod
 79 |     def delete_text_between_positions(text: str, start_line: int, start_col: int, end_line: int, end_col: int) -> tuple[str, str]:
 80 |         """
 81 |         Deletes the text between the given start and end positions.
 82 |         Returns the modified text and the deleted text.
 83 |         """
 84 |         del_start_idx = TextUtils.get_index_from_line_col(text, start_line, start_col)
 85 |         del_end_idx = TextUtils.get_index_from_line_col(text, end_line, end_col)
 86 | 
 87 |         deleted_text = text[del_start_idx:del_end_idx]
 88 |         new_text = text[:del_start_idx] + text[del_end_idx:]
 89 |         return new_text, deleted_text
 90 | 
 91 |     @staticmethod
 92 |     def insert_text_at_position(text: str, line: int, col: int, text_to_be_inserted: str) -> tuple[str, int, int]:
 93 |         """
 94 |         Inserts the given text at the given line and column.
 95 |         Returns the modified text and the new line and column.
 96 |         """
 97 |         try:
 98 |             change_index = TextUtils.get_index_from_line_col(text, line, col)
 99 |         except InvalidTextLocationError:
100 |             num_lines_in_text = text.count("\n") + 1
101 |             max_line = num_lines_in_text - 1
102 |             if line == max_line + 1 and col == 0:  # trying to insert at new line after full text
103 |                 # insert at end, adding missing newline
104 |                 change_index = len(text)
105 |                 text_to_be_inserted = "\n" + text_to_be_inserted
106 |             else:
107 |                 raise
108 |         new_text = text[:change_index] + text_to_be_inserted + text[change_index:]
109 |         new_l, new_c = TextUtils._get_updated_position_from_line_and_column_and_edit(line, col, text_to_be_inserted)
110 |         return new_text, new_l, new_c
111 | 
112 | 
113 | class PathUtils:
114 |     """
115 |     Utilities for platform-agnostic path operations.
116 |     """
117 | 
118 |     @staticmethod
119 |     def uri_to_path(uri: str) -> str:
120 |         """
121 |         Converts a URI to a file path. Works on both Linux and Windows.
122 | 
123 |         This method was obtained from https://stackoverflow.com/a/61922504
124 |         """
125 |         try:
126 |             from urllib.parse import unquote, urlparse
127 |             from urllib.request import url2pathname
128 |         except ImportError:
129 |             # backwards compatibility
130 |             from urllib import unquote, url2pathname
131 | 
132 |             from urlparse import urlparse
133 |         parsed = urlparse(uri)
134 |         host = f"{os.path.sep}{os.path.sep}{parsed.netloc}{os.path.sep}"
135 |         path = os.path.normpath(os.path.join(host, url2pathname(unquote(parsed.path))))
136 |         return path
137 | 
138 |     @staticmethod
139 |     def path_to_uri(path: str) -> str:
140 |         """
141 |         Converts a file path to a file URI (file:///...).
142 |         """
143 |         return str(Path(path).absolute().as_uri())
144 | 
145 |     @staticmethod
146 |     def is_glob_pattern(pattern: str) -> bool:
147 |         """Check if a pattern contains glob-specific characters."""
148 |         return any(c in pattern for c in "*?[]!")
149 | 
150 |     @staticmethod
151 |     def get_relative_path(path: str, base_path: str) -> str | None:
152 |         """
153 |         Gets relative path if it's possible (paths should be on the same drive),
154 |         returns `None` otherwise.
155 |         """
156 |         if PurePath(path).drive == PurePath(base_path).drive:
157 |             rel_path = str(PurePath(os.path.relpath(path, base_path)))
158 |             return rel_path
159 |         return None
160 | 
161 | 
162 | class FileUtils:
163 |     """
164 |     Utility functions for file operations.
165 |     """
166 | 
167 |     @staticmethod
168 |     def read_file(logger: LanguageServerLogger, file_path: str) -> str:
169 |         """
170 |         Reads the file at the given path and returns the contents as a string.
171 |         """
172 |         if not os.path.exists(file_path):
173 |             logger.log(f"File read '{file_path}' failed: File does not exist.", logging.ERROR)
174 |             raise SolidLSPException(f"File read '{file_path}' failed: File does not exist.")
175 |         try:
176 |             with open(file_path, encoding="utf-8") as inp_file:
177 |                 return inp_file.read()
178 |         except Exception as exc:
179 |             logger.log(f"File read '{file_path}' failed to read with encoding 'utf-8': {exc}", logging.ERROR)
180 |             raise SolidLSPException("File read failed.") from None
181 | 
182 |     @staticmethod
183 |     def download_file(logger: LanguageServerLogger, url: str, target_path: str) -> None:
184 |         """
185 |         Downloads the file from the given URL to the given {target_path}
186 |         """
187 |         os.makedirs(os.path.dirname(target_path), exist_ok=True)
188 |         try:
189 |             response = requests.get(url, stream=True, timeout=60)
190 |             if response.status_code != 200:
191 |                 logger.log(f"Error downloading file '{url}': {response.status_code} {response.text}", logging.ERROR)
192 |                 raise SolidLSPException("Error downloading file.")
193 |             with open(target_path, "wb") as f:
194 |                 shutil.copyfileobj(response.raw, f)
195 |         except Exception as exc:
196 |             logger.log(f"Error downloading file '{url}': {exc}", logging.ERROR)
197 |             raise SolidLSPException("Error downloading file.") from None
198 | 
199 |     @staticmethod
200 |     def download_and_extract_archive(logger: LanguageServerLogger, url: str, target_path: str, archive_type: str) -> None:
201 |         """
202 |         Downloads the archive from the given URL having format {archive_type} and extracts it to the given {target_path}
203 |         """
204 |         try:
205 |             tmp_files = []
206 |             tmp_file_name = str(PurePath(os.path.expanduser("~"), "multilspy_tmp", uuid.uuid4().hex))
207 |             tmp_files.append(tmp_file_name)
208 |             os.makedirs(os.path.dirname(tmp_file_name), exist_ok=True)
209 |             FileUtils.download_file(logger, url, tmp_file_name)
210 |             if archive_type in ["tar", "gztar", "bztar", "xztar"]:
211 |                 os.makedirs(target_path, exist_ok=True)
212 |                 shutil.unpack_archive(tmp_file_name, target_path, archive_type)
213 |             elif archive_type == "zip":
214 |                 os.makedirs(target_path, exist_ok=True)
215 |                 with zipfile.ZipFile(tmp_file_name, "r") as zip_ref:
216 |                     for zip_info in zip_ref.infolist():
217 |                         extracted_path = zip_ref.extract(zip_info, target_path)
218 |                         ZIP_SYSTEM_UNIX = 3  # zip file created on Unix system
219 |                         if zip_info.create_system != ZIP_SYSTEM_UNIX:
220 |                             continue
221 |                         # extractall() does not preserve permissions
222 |                         # see. https://github.com/python/cpython/issues/59999
223 |                         attrs = (zip_info.external_attr >> 16) & 0o777
224 |                         if attrs:
225 |                             os.chmod(extracted_path, attrs)
226 |             elif archive_type == "zip.gz":
227 |                 os.makedirs(target_path, exist_ok=True)
228 |                 tmp_file_name_ungzipped = tmp_file_name + ".zip"
229 |                 tmp_files.append(tmp_file_name_ungzipped)
230 |                 with gzip.open(tmp_file_name, "rb") as f_in, open(tmp_file_name_ungzipped, "wb") as f_out:
231 |                     shutil.copyfileobj(f_in, f_out)
232 |                 shutil.unpack_archive(tmp_file_name_ungzipped, target_path, "zip")
233 |             elif archive_type == "gz":
234 |                 with gzip.open(tmp_file_name, "rb") as f_in, open(target_path, "wb") as f_out:
235 |                     shutil.copyfileobj(f_in, f_out)
236 |             elif archive_type == "binary":
237 |                 # For single binary files, just move to target without extraction
238 |                 shutil.move(tmp_file_name, target_path)
239 |             else:
240 |                 logger.log(f"Unknown archive type '{archive_type}' for extraction", logging.ERROR)
241 |                 raise SolidLSPException(f"Unknown archive type '{archive_type}'")
242 |         except Exception as exc:
243 |             logger.log(f"Error extracting archive '{tmp_file_name}' obtained from '{url}': {exc}", logging.ERROR)
244 |             raise SolidLSPException("Error extracting archive.") from exc
245 |         finally:
246 |             for tmp_file_name in tmp_files:
247 |                 if os.path.exists(tmp_file_name):
248 |                     Path.unlink(Path(tmp_file_name))
249 | 
250 | 
251 | class PlatformId(str, Enum):
252 |     """
253 |     multilspy supported platforms
254 |     """
255 | 
256 |     WIN_x86 = "win-x86"
257 |     WIN_x64 = "win-x64"
258 |     WIN_arm64 = "win-arm64"
259 |     OSX = "osx"
260 |     OSX_x64 = "osx-x64"
261 |     OSX_arm64 = "osx-arm64"
262 |     LINUX_x86 = "linux-x86"
263 |     LINUX_x64 = "linux-x64"
264 |     LINUX_arm64 = "linux-arm64"
265 |     LINUX_MUSL_x64 = "linux-musl-x64"
266 |     LINUX_MUSL_arm64 = "linux-musl-arm64"
267 | 
268 |     def is_windows(self):
269 |         return self.value.startswith("win")
270 | 
271 | 
272 | class DotnetVersion(str, Enum):
273 |     """
274 |     multilspy supported dotnet versions
275 |     """
276 | 
277 |     V4 = "4"
278 |     V6 = "6"
279 |     V7 = "7"
280 |     V8 = "8"
281 |     V9 = "9"
282 |     VMONO = "mono"
283 | 
284 | 
285 | class PlatformUtils:
286 |     """
287 |     This class provides utilities for platform detection and identification.
288 |     """
289 | 
290 |     @classmethod
291 |     def get_platform_id(cls) -> PlatformId:
292 |         """
293 |         Returns the platform id for the current system
294 |         """
295 |         system = platform.system()
296 |         machine = platform.machine()
297 |         bitness = platform.architecture()[0]
298 |         if system == "Windows" and machine == "":
299 |             machine = cls._determine_windows_machine_type()
300 |         system_map = {"Windows": "win", "Darwin": "osx", "Linux": "linux"}
301 |         machine_map = {
302 |             "AMD64": "x64",
303 |             "x86_64": "x64",
304 |             "i386": "x86",
305 |             "i686": "x86",
306 |             "aarch64": "arm64",
307 |             "arm64": "arm64",
308 |             "ARM64": "arm64",
309 |         }
310 |         if system in system_map and machine in machine_map:
311 |             platform_id = system_map[system] + "-" + machine_map[machine]
312 |             if system == "Linux" and bitness == "64bit":
313 |                 libc = platform.libc_ver()[0]
314 |                 if libc != "glibc":
315 |                     platform_id += "-" + libc
316 |             return PlatformId(platform_id)
317 |         else:
318 |             raise SolidLSPException(f"Unknown platform: {system=}, {machine=}, {bitness=}")
319 | 
320 |     @staticmethod
321 |     def _determine_windows_machine_type():
322 |         import ctypes
323 |         from ctypes import wintypes
324 | 
325 |         class SYSTEM_INFO(ctypes.Structure):
326 |             class _U(ctypes.Union):
327 |                 class _S(ctypes.Structure):
328 |                     _fields_ = [("wProcessorArchitecture", wintypes.WORD), ("wReserved", wintypes.WORD)]
329 | 
330 |                 _fields_ = [("dwOemId", wintypes.DWORD), ("s", _S)]
331 |                 _anonymous_ = ("s",)
332 | 
333 |             _fields_ = [
334 |                 ("u", _U),
335 |                 ("dwPageSize", wintypes.DWORD),
336 |                 ("lpMinimumApplicationAddress", wintypes.LPVOID),
337 |                 ("lpMaximumApplicationAddress", wintypes.LPVOID),
338 |                 ("dwActiveProcessorMask", wintypes.LPVOID),
339 |                 ("dwNumberOfProcessors", wintypes.DWORD),
340 |                 ("dwProcessorType", wintypes.DWORD),
341 |                 ("dwAllocationGranularity", wintypes.DWORD),
342 |                 ("wProcessorLevel", wintypes.WORD),
343 |                 ("wProcessorRevision", wintypes.WORD),
344 |             ]
345 |             _anonymous_ = ("u",)
346 | 
347 |         sys_info = SYSTEM_INFO()
348 |         ctypes.windll.kernel32.GetNativeSystemInfo(ctypes.byref(sys_info))
349 | 
350 |         arch_map = {
351 |             9: "AMD64",
352 |             5: "ARM",
353 |             12: "arm64",
354 |             6: "Intel Itanium-based",
355 |             0: "i386",
356 |         }
357 | 
358 |         return arch_map.get(sys_info.wProcessorArchitecture, f"Unknown ({sys_info.wProcessorArchitecture})")
359 | 
360 |     @staticmethod
361 |     def get_dotnet_version() -> DotnetVersion:
362 |         """
363 |         Returns the dotnet version for the current system
364 |         """
365 |         try:
366 |             result = subprocess.run(["dotnet", "--list-runtimes"], capture_output=True, check=True)
367 |             available_version_cmd_output = []
368 |             for line in result.stdout.decode("utf-8").split("\n"):
369 |                 if line.startswith("Microsoft.NETCore.App"):
370 |                     version_cmd_output = line.split(" ")[1]
371 |                     available_version_cmd_output.append(version_cmd_output)
372 | 
373 |             if not available_version_cmd_output:
374 |                 raise SolidLSPException("dotnet not found on the system")
375 | 
376 |             # Check for supported versions in order of preference (latest first)
377 |             for version_cmd_output in available_version_cmd_output:
378 |                 if version_cmd_output.startswith("9"):
379 |                     return DotnetVersion.V9
380 |                 if version_cmd_output.startswith("8"):
381 |                     return DotnetVersion.V8
382 |                 if version_cmd_output.startswith("7"):
383 |                     return DotnetVersion.V7
384 |                 if version_cmd_output.startswith("6"):
385 |                     return DotnetVersion.V6
386 |                 if version_cmd_output.startswith("4"):
387 |                     return DotnetVersion.V4
388 | 
389 |             # If no supported version found, raise exception with all available versions
390 |             raise SolidLSPException(
391 |                 f"No supported dotnet version found. Available versions: {', '.join(available_version_cmd_output)}. Supported versions: 4, 6, 7, 8"
392 |             )
393 |         except (FileNotFoundError, subprocess.CalledProcessError):
394 |             try:
395 |                 result = subprocess.run(["mono", "--version"], capture_output=True, check=True)
396 |                 return DotnetVersion.VMONO
397 |             except (FileNotFoundError, subprocess.CalledProcessError):
398 |                 raise SolidLSPException("dotnet or mono not found on the system")
399 | 
400 | 
401 | class SymbolUtils:
402 |     @staticmethod
403 |     def symbol_tree_contains_name(roots: list[UnifiedSymbolInformation], name: str) -> bool:
404 |         for symbol in roots:
405 |             if symbol["name"] == name:
406 |                 return True
407 |             if SymbolUtils.symbol_tree_contains_name(symbol["children"], name):
408 |                 return True
409 |         return False
410 | 
```
Page 6/14FirstPrevNextLast