#
tokens: 47504/50000 4/367 files (page 13/18)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 13 of 18. Use http://codebase.md/shashankss1205/codegraphcontext?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .github
│   └── workflows
│       ├── e2e-tests.yml
│       ├── post_discord_invite.yml
│       ├── test.yml
│       └── update-contributors.yml
├── .gitignore
├── CONTRIBUTING.md
├── contributors.md
├── docs
│   ├── docs
│   │   ├── architecture.md
│   │   ├── cli.md
│   │   ├── contributing_languages.md
│   │   ├── contributing.md
│   │   ├── cookbook.md
│   │   ├── core.md
│   │   ├── future_work.md
│   │   ├── images
│   │   │   ├── 1.png
│   │   │   ├── 11.png
│   │   │   ├── 12.png
│   │   │   ├── 13.png
│   │   │   ├── 14.png
│   │   │   ├── 16.png
│   │   │   ├── 19.png
│   │   │   ├── 2.png
│   │   │   ├── 20.png
│   │   │   ├── 21.png
│   │   │   ├── 22.png
│   │   │   ├── 23.png
│   │   │   ├── 24.png
│   │   │   ├── 26.png
│   │   │   ├── 28.png
│   │   │   ├── 29.png
│   │   │   ├── 3.png
│   │   │   ├── 30.png
│   │   │   ├── 31.png
│   │   │   ├── 32.png
│   │   │   ├── 33.png
│   │   │   ├── 34.png
│   │   │   ├── 35.png
│   │   │   ├── 36.png
│   │   │   ├── 38.png
│   │   │   ├── 39.png
│   │   │   ├── 4.png
│   │   │   ├── 40.png
│   │   │   ├── 41.png
│   │   │   ├── 42.png
│   │   │   ├── 43.png
│   │   │   ├── 44.png
│   │   │   ├── 5.png
│   │   │   ├── 6.png
│   │   │   ├── 7.png
│   │   │   ├── 8.png
│   │   │   ├── 9.png
│   │   │   ├── Indexing.gif
│   │   │   ├── tool_images
│   │   │   │   ├── 1.png
│   │   │   │   ├── 2.png
│   │   │   │   └── 3.png
│   │   │   └── Usecase.gif
│   │   ├── index.md
│   │   ├── installation.md
│   │   ├── license.md
│   │   ├── server.md
│   │   ├── tools.md
│   │   ├── troubleshooting.md
│   │   └── use_cases.md
│   ├── mkdocs.yml
│   └── site
│       ├── 404.html
│       ├── architecture
│       │   └── index.html
│       ├── assets
│       │   ├── images
│       │   │   └── favicon.png
│       │   ├── javascripts
│       │   │   ├── bundle.f55a23d4.min.js
│       │   │   ├── bundle.f55a23d4.min.js.map
│       │   │   ├── lunr
│       │   │   │   ├── min
│       │   │   │   │   ├── lunr.ar.min.js
│       │   │   │   │   ├── lunr.da.min.js
│       │   │   │   │   ├── lunr.de.min.js
│       │   │   │   │   ├── lunr.du.min.js
│       │   │   │   │   ├── lunr.el.min.js
│       │   │   │   │   ├── lunr.es.min.js
│       │   │   │   │   ├── lunr.fi.min.js
│       │   │   │   │   ├── lunr.fr.min.js
│       │   │   │   │   ├── lunr.he.min.js
│       │   │   │   │   ├── lunr.hi.min.js
│       │   │   │   │   ├── lunr.hu.min.js
│       │   │   │   │   ├── lunr.hy.min.js
│       │   │   │   │   ├── lunr.it.min.js
│       │   │   │   │   ├── lunr.ja.min.js
│       │   │   │   │   ├── lunr.jp.min.js
│       │   │   │   │   ├── lunr.kn.min.js
│       │   │   │   │   ├── lunr.ko.min.js
│       │   │   │   │   ├── lunr.multi.min.js
│       │   │   │   │   ├── lunr.nl.min.js
│       │   │   │   │   ├── lunr.no.min.js
│       │   │   │   │   ├── lunr.pt.min.js
│       │   │   │   │   ├── lunr.ro.min.js
│       │   │   │   │   ├── lunr.ru.min.js
│       │   │   │   │   ├── lunr.sa.min.js
│       │   │   │   │   ├── lunr.stemmer.support.min.js
│       │   │   │   │   ├── lunr.sv.min.js
│       │   │   │   │   ├── lunr.ta.min.js
│       │   │   │   │   ├── lunr.te.min.js
│       │   │   │   │   ├── lunr.th.min.js
│       │   │   │   │   ├── lunr.tr.min.js
│       │   │   │   │   ├── lunr.vi.min.js
│       │   │   │   │   └── lunr.zh.min.js
│       │   │   │   ├── tinyseg.js
│       │   │   │   └── wordcut.js
│       │   │   └── workers
│       │   │       ├── search.973d3a69.min.js
│       │   │       └── search.973d3a69.min.js.map
│       │   └── stylesheets
│       │       ├── main.2a3383ac.min.css
│       │       ├── main.2a3383ac.min.css.map
│       │       ├── palette.06af60db.min.css
│       │       └── palette.06af60db.min.css.map
│       ├── cli
│       │   └── index.html
│       ├── contributing
│       │   └── index.html
│       ├── contributing_languages
│       │   └── index.html
│       ├── cookbook
│       │   └── index.html
│       ├── core
│       │   └── index.html
│       ├── future_work
│       │   └── index.html
│       ├── images
│       │   ├── 1.png
│       │   ├── 11.png
│       │   ├── 12.png
│       │   ├── 13.png
│       │   ├── 14.png
│       │   ├── 16.png
│       │   ├── 19.png
│       │   ├── 2.png
│       │   ├── 20.png
│       │   ├── 21.png
│       │   ├── 22.png
│       │   ├── 23.png
│       │   ├── 24.png
│       │   ├── 26.png
│       │   ├── 28.png
│       │   ├── 29.png
│       │   ├── 3.png
│       │   ├── 30.png
│       │   ├── 31.png
│       │   ├── 32.png
│       │   ├── 33.png
│       │   ├── 34.png
│       │   ├── 35.png
│       │   ├── 36.png
│       │   ├── 38.png
│       │   ├── 39.png
│       │   ├── 4.png
│       │   ├── 40.png
│       │   ├── 41.png
│       │   ├── 42.png
│       │   ├── 43.png
│       │   ├── 44.png
│       │   ├── 5.png
│       │   ├── 6.png
│       │   ├── 7.png
│       │   ├── 8.png
│       │   ├── 9.png
│       │   ├── Indexing.gif
│       │   ├── tool_images
│       │   │   ├── 1.png
│       │   │   ├── 2.png
│       │   │   └── 3.png
│       │   └── Usecase.gif
│       ├── index.html
│       ├── installation
│       │   └── index.html
│       ├── license
│       │   └── index.html
│       ├── search
│       │   └── search_index.json
│       ├── server
│       │   └── index.html
│       ├── sitemap.xml
│       ├── sitemap.xml.gz
│       ├── tools
│       │   └── index.html
│       ├── troubleshooting
│       │   └── index.html
│       └── use_cases
│           └── index.html
├── images
│   ├── 1.png
│   ├── 11.png
│   ├── 12.png
│   ├── 13.png
│   ├── 14.png
│   ├── 16.png
│   ├── 19.png
│   ├── 2.png
│   ├── 20.png
│   ├── 21.png
│   ├── 22.png
│   ├── 23.png
│   ├── 24.png
│   ├── 26.png
│   ├── 28.png
│   ├── 29.png
│   ├── 3.png
│   ├── 30.png
│   ├── 31.png
│   ├── 32.png
│   ├── 33.png
│   ├── 34.png
│   ├── 35.png
│   ├── 36.png
│   ├── 38.png
│   ├── 39.png
│   ├── 4.png
│   ├── 40.png
│   ├── 41.png
│   ├── 42.png
│   ├── 43.png
│   ├── 44.png
│   ├── 5.png
│   ├── 6.png
│   ├── 7.png
│   ├── 8.png
│   ├── 9.png
│   ├── Indexing.gif
│   ├── tool_images
│   │   ├── 1.png
│   │   ├── 2.png
│   │   └── 3.png
│   └── Usecase.gif
├── LICENSE
├── MANIFEST.in
├── organizer
│   ├── CONTRIBUTING_LANGUAGES.md
│   ├── cookbook.md
│   ├── docs.md
│   ├── language_specific_nodes.md
│   ├── Tools_Exploration.md
│   └── troubleshoot.md
├── package-lock.json
├── pyproject.toml
├── README.md
├── scripts
│   ├── generate_lang_contributors.py
│   └── post_install_fix.sh
├── SECURITY.md
├── src
│   └── codegraphcontext
│       ├── __init__.py
│       ├── __main__.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── cli_helpers.py
│       │   ├── main.py
│       │   ├── setup_macos.py
│       │   └── setup_wizard.py
│       ├── core
│       │   ├── __init__.py
│       │   ├── database.py
│       │   ├── jobs.py
│       │   └── watcher.py
│       ├── prompts.py
│       ├── server.py
│       ├── tools
│       │   ├── __init__.py
│       │   ├── advanced_language_query_tool.py
│       │   ├── code_finder.py
│       │   ├── graph_builder.py
│       │   ├── languages
│       │   │   ├── c.py
│       │   │   ├── cpp.py
│       │   │   ├── go.py
│       │   │   ├── java.py
│       │   │   ├── javascript.py
│       │   │   ├── python.py
│       │   │   ├── ruby.py
│       │   │   ├── rust.py
│       │   │   └── typescript.py
│       │   ├── package_resolver.py
│       │   ├── query_tool_languages
│       │   │   ├── c_toolkit.py
│       │   │   ├── cpp_toolkit.py
│       │   │   ├── go_toolkit.py
│       │   │   ├── java_toolkit.py
│       │   │   ├── javascript_toolkit.py
│       │   │   ├── python_toolkit.py
│       │   │   ├── ruby_toolkit.py
│       │   │   ├── rust_toolkit.py
│       │   │   └── typescript_toolkit.py
│       │   └── system.py
│       └── utils
│           └── debug_log.py
├── tests
│   ├── __init__.py
│   ├── conftest.py
│   ├── sample_project
│   │   ├── advanced_calls.py
│   │   ├── advanced_classes.py
│   │   ├── advanced_classes2.py
│   │   ├── advanced_functions.py
│   │   ├── advanced_imports.py
│   │   ├── async_features.py
│   │   ├── callbacks_decorators.py
│   │   ├── circular1.py
│   │   ├── circular2.py
│   │   ├── class_instantiation.py
│   │   ├── cli_and_dunder.py
│   │   ├── complex_classes.py
│   │   ├── comprehensions_generators.py
│   │   ├── context_managers.py
│   │   ├── control_flow.py
│   │   ├── datatypes.py
│   │   ├── dynamic_dispatch.py
│   │   ├── dynamic_imports.py
│   │   ├── edge_cases
│   │   │   ├── comments_only.py
│   │   │   ├── docstring_only.py
│   │   │   ├── empty.py
│   │   │   ├── hardcoded_secrets.py
│   │   │   ├── long_functions.py
│   │   │   └── syntax_error.py
│   │   ├── function_chains.py
│   │   ├── generators.py
│   │   ├── import_reexports.py
│   │   ├── mapping_calls.py
│   │   ├── module_a.py
│   │   ├── module_b.py
│   │   ├── module_c
│   │   │   ├── __init__.py
│   │   │   ├── submodule1.py
│   │   │   └── submodule2.py
│   │   ├── namespace_pkg
│   │   │   └── ns_module.py
│   │   ├── pattern_matching.py
│   │   └── typing_examples.py
│   ├── sample_project_c
│   │   ├── cgc_sample
│   │   ├── include
│   │   │   ├── config.h
│   │   │   ├── math
│   │   │   │   └── vec.h
│   │   │   ├── module.h
│   │   │   ├── platform.h
│   │   │   └── util.h
│   │   ├── Makefile
│   │   ├── README.md
│   │   └── src
│   │       ├── main.c
│   │       ├── math
│   │       │   └── vec.c
│   │       ├── module.c
│   │       └── util.c
│   ├── sample_project_cpp
│   │   ├── class_features.cpp
│   │   ├── classes.cpp
│   │   ├── control_flow.cpp
│   │   ├── edge_cases.cpp
│   │   ├── enum_struct_union.cpp
│   │   ├── exceptions.cpp
│   │   ├── file_io.cpp
│   │   ├── function_chain.cpp
│   │   ├── function_chain.h
│   │   ├── function_types.cpp
│   │   ├── main.cpp
│   │   ├── main.exe
│   │   ├── namespaces.cpp
│   │   ├── raii_example.cpp
│   │   ├── README.md
│   │   ├── sample_project.exe
│   │   ├── stl_usage.cpp
│   │   ├── templates.cpp
│   │   └── types_variable_assignments.cpp
│   ├── sample_project_go
│   │   ├── advanced_types.go
│   │   ├── basic_functions.go
│   │   ├── embedded_composition.go
│   │   ├── error_handling.go
│   │   ├── generics.go
│   │   ├── go.mod
│   │   ├── goroutines_channels.go
│   │   ├── interfaces.go
│   │   ├── packages_imports.go
│   │   ├── README.md
│   │   ├── structs_methods.go
│   │   └── util
│   │       └── helpers.go
│   ├── sample_project_java
│   │   ├── out
│   │   │   └── com
│   │   │       └── example
│   │   │           └── app
│   │   │               ├── annotations
│   │   │               │   └── Logged.class
│   │   │               ├── Main.class
│   │   │               ├── misc
│   │   │               │   ├── Outer.class
│   │   │               │   └── Outer$Inner.class
│   │   │               ├── model
│   │   │               │   ├── Role.class
│   │   │               │   └── User.class
│   │   │               ├── service
│   │   │               │   ├── AbstractGreeter.class
│   │   │               │   ├── GreetingService.class
│   │   │               │   └── impl
│   │   │               │       └── GreetingServiceImpl.class
│   │   │               └── util
│   │   │                   ├── CollectionUtils.class
│   │   │                   └── IOHelper.class
│   │   ├── README.md
│   │   ├── sources.txt
│   │   └── src
│   │       └── com
│   │           └── example
│   │               └── app
│   │                   ├── annotations
│   │                   │   └── Logged.java
│   │                   ├── Main.java
│   │                   ├── misc
│   │                   │   └── Outer.java
│   │                   ├── model
│   │                   │   ├── Role.java
│   │                   │   └── User.java
│   │                   ├── service
│   │                   │   ├── AbstractGreeter.java
│   │                   │   ├── GreetingService.java
│   │                   │   └── impl
│   │                   │       └── GreetingServiceImpl.java
│   │                   └── util
│   │                       ├── CollectionUtils.java
│   │                       └── IOHelper.java
│   ├── sample_project_javascript
│   │   ├── arrays.js
│   │   ├── asyncAwait.js
│   │   ├── classes.js
│   │   ├── dom.js
│   │   ├── errorHandling.js
│   │   ├── events.js
│   │   ├── exporter.js
│   │   ├── fetchAPI.js
│   │   ├── fixtures
│   │   │   └── js
│   │   │       └── accessors.js
│   │   ├── functions.js
│   │   ├── importer.js
│   │   ├── objects.js
│   │   ├── promises.js
│   │   ├── README.md
│   │   └── variables.js
│   ├── sample_project_misc
│   │   ├── index.html
│   │   ├── README.md
│   │   ├── styles.css
│   │   ├── tables.css
│   │   └── tables.html
│   ├── sample_project_php
│   │   ├── classes_objects.php
│   │   ├── database.php
│   │   ├── edgecases.php
│   │   ├── error_handling.php
│   │   ├── file_handling.php
│   │   ├── functions.php
│   │   ├── generators_iterators.php
│   │   ├── globals_superglobals.php
│   │   ├── Inheritance.php
│   │   ├── interface_traits.php
│   │   └── README.md
│   ├── sample_project_ruby
│   │   ├── class_example.rb
│   │   ├── enumerables.rb
│   │   ├── error_handling.rb
│   │   ├── file_io.rb
│   │   ├── inheritance_example.rb
│   │   ├── main.rb
│   │   ├── metaprogramming.rb
│   │   ├── mixins_example.rb
│   │   ├── module_example.rb
│   │   └── tests
│   │       ├── test_mixins.py
│   │       └── test_sample.rb
│   ├── sample_project_rust
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src
│   │       ├── basic_functions.rs
│   │       ├── concurrency.rs
│   │       ├── error_handling.rs
│   │       ├── generics.rs
│   │       ├── iterators_closures.rs
│   │       ├── lib.rs
│   │       ├── lifetimes_references.rs
│   │       ├── modules.rs
│   │       ├── smart_pointers.rs
│   │       ├── structs_enums.rs
│   │       └── traits.rs
│   ├── sample_project_typescript
│   │   ├── package.json
│   │   ├── README.md
│   │   ├── src
│   │   │   ├── advanced-types.ts
│   │   │   ├── async-promises.ts
│   │   │   ├── classes-inheritance.ts
│   │   │   ├── decorators-metadata.ts
│   │   │   ├── error-validation.ts
│   │   │   ├── functions-generics.ts
│   │   │   ├── index.ts
│   │   │   ├── modules-namespaces.ts
│   │   │   ├── types-interfaces.ts
│   │   │   └── utilities-helpers.ts
│   │   └── tsconfig.json
│   ├── test_cpp_parser.py
│   ├── test_database_validation.py
│   ├── test_end_to_end.py
│   ├── test_graph_indexing_js.py
│   ├── test_graph_indexing.py
│   ├── test_tree_sitter
│   │   ├── __init__.py
│   │   ├── class_instantiation.py
│   │   ├── complex_classes.py
│   │   └── test_file.py
│   └── test_typescript_parser.py
└── website
    ├── .example.env
    ├── .gitignore
    ├── api
    │   └── pypi.ts
    ├── bun.lockb
    ├── components.json
    ├── eslint.config.js
    ├── index.html
    ├── package-lock.json
    ├── package.json
    ├── postcss.config.js
    ├── public
    │   ├── favicon.ico
    │   ├── placeholder.svg
    │   └── robots.txt
    ├── README.md
    ├── src
    │   ├── App.css
    │   ├── App.tsx
    │   ├── assets
    │   │   ├── function-calls.png
    │   │   ├── graph-total.png
    │   │   ├── hero-graph.jpg
    │   │   └── hierarchy.png
    │   ├── components
    │   │   ├── ComparisonTable.tsx
    │   │   ├── CookbookSection.tsx
    │   │   ├── DemoSection.tsx
    │   │   ├── ExamplesSection.tsx
    │   │   ├── FeaturesSection.tsx
    │   │   ├── Footer.tsx
    │   │   ├── HeroSection.tsx
    │   │   ├── InstallationSection.tsx
    │   │   ├── MoveToTop.tsx
    │   │   ├── ShowDownloads.tsx
    │   │   ├── ShowStarGraph.tsx
    │   │   ├── TestimonialSection.tsx
    │   │   ├── ThemeProvider.tsx
    │   │   ├── ThemeToggle.tsx
    │   │   └── ui
    │   │       ├── accordion.tsx
    │   │       ├── alert-dialog.tsx
    │   │       ├── alert.tsx
    │   │       ├── aspect-ratio.tsx
    │   │       ├── avatar.tsx
    │   │       ├── badge.tsx
    │   │       ├── breadcrumb.tsx
    │   │       ├── button.tsx
    │   │       ├── calendar.tsx
    │   │       ├── card.tsx
    │   │       ├── carousel.tsx
    │   │       ├── chart.tsx
    │   │       ├── checkbox.tsx
    │   │       ├── collapsible.tsx
    │   │       ├── command.tsx
    │   │       ├── context-menu.tsx
    │   │       ├── dialog.tsx
    │   │       ├── drawer.tsx
    │   │       ├── dropdown-menu.tsx
    │   │       ├── form.tsx
    │   │       ├── hover-card.tsx
    │   │       ├── input-otp.tsx
    │   │       ├── input.tsx
    │   │       ├── label.tsx
    │   │       ├── menubar.tsx
    │   │       ├── navigation-menu.tsx
    │   │       ├── orbiting-circles.tsx
    │   │       ├── pagination.tsx
    │   │       ├── popover.tsx
    │   │       ├── progress.tsx
    │   │       ├── radio-group.tsx
    │   │       ├── resizable.tsx
    │   │       ├── scroll-area.tsx
    │   │       ├── select.tsx
    │   │       ├── separator.tsx
    │   │       ├── sheet.tsx
    │   │       ├── sidebar.tsx
    │   │       ├── skeleton.tsx
    │   │       ├── slider.tsx
    │   │       ├── sonner.tsx
    │   │       ├── switch.tsx
    │   │       ├── table.tsx
    │   │       ├── tabs.tsx
    │   │       ├── textarea.tsx
    │   │       ├── toast.tsx
    │   │       ├── toaster.tsx
    │   │       ├── toggle-group.tsx
    │   │       ├── toggle.tsx
    │   │       ├── tooltip.tsx
    │   │       └── use-toast.ts
    │   ├── hooks
    │   │   ├── use-mobile.tsx
    │   │   └── use-toast.ts
    │   ├── index.css
    │   ├── lib
    │   │   └── utils.ts
    │   ├── main.tsx
    │   ├── pages
    │   │   ├── Index.tsx
    │   │   └── NotFound.tsx
    │   └── vite-env.d.ts
    ├── tailwind.config.ts
    ├── tsconfig.app.json
    ├── tsconfig.json
    ├── tsconfig.node.json
    ├── vercel.json
    └── vite.config.ts
```

# Files

--------------------------------------------------------------------------------
/src/codegraphcontext/cli/setup_wizard.py:
--------------------------------------------------------------------------------

```python
  1 | from InquirerPy import prompt
  2 | from rich.console import Console
  3 | import subprocess
  4 | import platform
  5 | import os
  6 | from pathlib import Path
  7 | import time
  8 | import json
  9 | import sys
 10 | import shutil
 11 | import yaml 
 12 | from codegraphcontext.core.database import DatabaseManager
 13 | 
 14 | console = Console()
 15 | 
 16 | # Constants for Docker Neo4j setup
 17 | DEFAULT_NEO4J_URI = "neo4j://localhost:7687"
 18 | DEFAULT_NEO4J_USERNAME = "neo4j"
 19 | DEFAULT_NEO4J_BOLT_PORT = 7687
 20 | DEFAULT_NEO4J_HTTP_PORT = 7474
 21 | 
 22 | def _generate_mcp_json(creds):
 23 |     """Generates and prints the MCP JSON configuration."""
 24 |     cgc_path = shutil.which("cgc") or sys.executable
 25 | 
 26 |     if "python" in Path(cgc_path).name:
 27 |         # fallback to running as module if no cgc binary is found
 28 |         command = cgc_path
 29 |         args = ["-m", "cgc", "start"]
 30 |     else:
 31 |         command = cgc_path
 32 |         args = ["start"]
 33 | 
 34 |     mcp_config = {
 35 |         "mcpServers": {
 36 |             "CodeGraphContext": {
 37 |                 "command": command,
 38 |                 "args": args,
 39 |                 "env": {
 40 |                     "NEO4J_URI": creds.get("uri", ""),
 41 |                     "NEO4J_USERNAME": creds.get("username", "neo4j"),
 42 |                     "NEO4J_PASSWORD": creds.get("password", "")
 43 |                 },
 44 |                 "tools": {
 45 |                     "alwaysAllow": [
 46 |                         "add_code_to_graph", "add_package_to_graph",
 47 |                         "check_job_status", "list_jobs", "find_code",
 48 |                         "analyze_code_relationships", "watch_directory",
 49 |                         "find_dead_code", "execute_cypher_query",
 50 |                         "calculate_cyclomatic_complexity", "find_most_complex_functions",
 51 |                         "list_indexed_repositories", "delete_repository", "list_watched_paths", 
 52 |                         "unwatch_directory", "visualize_graph_query"
 53 |                     ],
 54 |                     "disabled": False
 55 |                 },
 56 |                 "disabled": False,
 57 |                 "alwaysAllow": []
 58 |             }
 59 |         }
 60 |     }
 61 | 
 62 |     console.print("\n[bold green]Configuration successful![/bold green]")
 63 |     console.print("Copy the following JSON and add it to your MCP server configuration file:")
 64 |     console.print(json.dumps(mcp_config, indent=2))
 65 | 
 66 |     # Also save to a file for convenience
 67 |     mcp_file = Path.cwd() / "mcp.json"
 68 |     with open(mcp_file, "w") as f:
 69 |         json.dump(mcp_config, f, indent=2)
 70 |     console.print(f"\n[cyan]For your convenience, the configuration has also been saved to: {mcp_file}[/cyan]")
 71 | 
 72 |     # Also save to a .env file for convenience
 73 |     env_file = Path.home() / ".codegraphcontext" / ".env"
 74 |     env_file.parent.mkdir(parents=True, exist_ok=True)
 75 |     with open(env_file, "w") as f:
 76 |         f.write(f"NEO4J_URI={creds.get('uri', '')}\n")
 77 |         f.write(f"NEO4J_USERNAME={creds.get('username', 'neo4j')}\n")
 78 |         f.write(f"NEO4J_PASSWORD={creds.get('password', '')}\n")
 79 | 
 80 |     console.print(f"[cyan]Neo4j credentials also saved to: {env_file}[/cyan]")
 81 |     _configure_ide(mcp_config)
 82 | 
 83 | 
 84 | def find_jetbrains_mcp_config():
 85 |     bases = [
 86 |         Path.home() / ".config" / "JetBrains",
 87 |         Path.home() / "Library/Application Support/JetBrains",
 88 |         Path.home() / "AppData/Roaming/JetBrains"
 89 |     ]
 90 |     configs = []
 91 |     for base in bases:
 92 |         if base.exists():
 93 |             for folder in base.iterdir():  # each IDE/version
 94 |                 options = folder / "options"
 95 |                 mcp_file = options / "mcpServer.xml"
 96 |                 if mcp_file.exists():
 97 |                     configs.append(mcp_file)
 98 |                     print(mcp_file)
 99 |                     return configs
100 | 
101 | 
102 | def convert_mcp_json_to_yaml():
103 |     json_path = Path.cwd() / "mcp.json"
104 |     yaml_path = Path.cwd() / "devfile.yaml"
105 |     if json_path.exists():
106 |         with open(json_path, "r") as json_file:
107 |             mcp_config = json.load(json_file)
108 |         with open(yaml_path, "w") as yaml_file:
109 |             yaml.dump(mcp_config, yaml_file, default_flow_style=False)
110 |         console.print(f"[green]Generated devfile.yaml for Amazon Q Developer at {yaml_path}[/green]")
111 | 
112 | def _configure_ide(mcp_config):
113 |     """Asks user for their IDE and configures it automatically."""
114 |     questions = [
115 |         {
116 |             "type": "confirm",
117 |             "message": "Automatically configure your IDE/CLI (VS Code, Cursor, Windsurf, Claude, Gemini, Cline, RooCode, ChatGPT Codex, Amazon Q Developer, Aider)?",
118 |             "name": "configure_ide",
119 |             "default": True,
120 |         }
121 |     ]
122 |     result = prompt(questions)
123 |     if not result or not result.get("configure_ide"):
124 |         console.print("\n[cyan]Skipping automatic IDE configuration. You can add the MCP server manually.[/cyan]")
125 |         return
126 | 
127 |     ide_questions = [
128 |         {
129 |             "type": "list",
130 |             "message": "Choose your IDE/CLI to configure:",
131 |             "choices": ["VS Code", "Cursor", "Windsurf", "Claude code", "Gemini CLI", "ChatGPT Codex", "Cline", "RooCode", "Amazon Q Developer", "JetBrainsAI", "Aider", "None of the above"],
132 |             "name": "ide_choice",
133 |         }
134 |     ]
135 |     ide_result = prompt(ide_questions)
136 |     ide_choice = ide_result.get("ide_choice")
137 | 
138 |     if not ide_choice or ide_choice == "None of the above":
139 |         console.print("\n[cyan]You can add the MCP server manually to your IDE/CLI.[/cyan]")
140 |         return
141 | 
142 | 
143 |     if ide_choice in ["VS Code", "Cursor/CLI", "Claude code", "Gemini CLI", "ChatGPT Codex", "Cline", "Windsurf", "RooCode", "Amazon Q Developer , JetBrainsAI", "Aider"]:
144 |         console.print(f"\n[bold cyan]Configuring for {ide_choice}...[/bold cyan]")
145 | 
146 |         if ide_choice == "Amazon Q Developer":
147 |             convert_mcp_json_to_yaml()
148 |             return  
149 |         
150 |         config_paths = {
151 |             "VS Code": [
152 |                 Path.home() / ".config" / "Code" / "User" / "settings.json",
153 |                 Path.home() / "Library" / "Application Support" / "Code" / "User" / "settings.json",
154 |                 Path.home() / "AppData" / "Roaming" / "Code" / "User" / "settings.json"
155 |             ],
156 |             "Cursor/CLI": [
157 |                 Path.home() / ".cursor" / "settings.json",
158 |                 Path.home() / ".config" / "cursor" / "settings.json",
159 |                 Path.home() / "Library" / "Application Support" / "cursor" / "settings.json",
160 |                 Path.home() / "AppData" / "Roaming" / "cursor" / "settings.json",
161 |                 Path.home() / ".config" / "Cursor" / "User" / "settings.json",
162 |             ],
163 |             "Windsurf": [
164 |                 Path.home() / ".windsurf" / "settings.json",
165 |                 Path.home() / ".config" / "windsurf" / "settings.json",
166 |                 Path.home() / "Library" / "Application Support" / "windsurf" / "settings.json",
167 |                 Path.home() / "AppData" / "Roaming" / "windsurf" / "settings.json",
168 |                 Path.home() / ".config" / "Windsurf" / "User" / "settings.json",
169 |             ],
170 |             "Claude code": [
171 |                 Path.home() / ".claude.json"
172 |             ],
173 |             "Gemini CLI": [
174 |                 Path.home() / ".gemini" / "settings.json"
175 |             ],
176 |             "ChatGPT Codex": [
177 |                 Path.home() / ".openai" / "mcp_settings.json",
178 |                 Path.home() / ".config" / "openai" / "settings.json",
179 |                 Path.home() / "AppData" / "Roaming" / "OpenAI" / "settings.json"
180 |             ],
181 |             "Cline": [
182 |                 Path.home() / ".config" / "Code" / "User" / "globalStorage" / "saoudrizwan.claude-dev" / "settings" / "cline_mcp_settings.json",
183 |                 Path.home() / ".config" / "Code - OSS" / "User" / "globalStorage" / "saoudrizwan.claude-dev" / "settings" / "cline_mcp_settings.json",
184 |                 Path.home() / "Library" / "Application Support" / "Code" / "User" / "globalStorage" / "saoudrizwan.claude-dev" / "settings" / "cline_mcp_settings.json",
185 |                 Path.home() / "AppData" / "Roaming" / "Code" / "User" / "globalStorage" / "saoudrizwan.claude-dev" / "settings" / "cline_mcp_settings.json"
186 |             ],
187 | 
188 |             "JetBrainsAI": find_jetbrains_mcp_config(), #only for jetbrains ide
189 | 
190 |             "RooCode": [
191 |                 Path.home() / ".config" / "Code" / "User" / "settings.json",   # Linux 
192 |                 Path.home() / "AppData" / "Roaming" / "Code" / "User" / "settings.json",  # Windows
193 |                 Path.home() / "Library" / "Application Support" / "Code" / "User" / "settings.json"  # macOS
194 |             ],
195 |             "Aider": [
196 |                 Path.home() / ".aider" / "settings.json",
197 |                 Path.home() / ".config" / "aider" / "settings.json",
198 |                 Path.home() / "Library" / "Application Support" / "aider" / "settings.json",
199 |                 Path.home() / "AppData" / "Roaming" / "aider" / "settings.json",
200 |                 Path.home() / ".config" / "Aider" / "User" / "settings.json",
201 |             ]
202 |         }
203 | 
204 |         target_path = None
205 |         paths_to_check = config_paths.get(ide_choice, [])
206 |         for path in paths_to_check:
207 |             if path.exists():
208 |                 target_path = path
209 |                 break
210 |         
211 |         if not target_path:
212 |             # If file doesn't exist, check if parent directory exists
213 |             for path in paths_to_check:
214 |                 if path.parent.exists():
215 |                     target_path = path
216 |                     break
217 |         
218 |         if not target_path:
219 |             console.print(f"[yellow]Could not automatically find or create the configuration directory for {ide_choice}.[/yellow]")
220 |             console.print("Please add the MCP configuration manually from the `mcp.json` file generated above.")
221 |             return
222 | 
223 |         console.print(f"Using configuration file at: {target_path}")
224 |         
225 |         try:
226 |             with open(target_path, "r") as f:
227 |                 try:
228 |                     settings = json.load(f)
229 |                 except json.JSONDecodeError:
230 |                     settings = {}
231 |         except FileNotFoundError:
232 |             settings = {}
233 | 
234 |         if not isinstance(settings, dict):
235 |             console.print(f"[red]Error: Configuration file at {target_path} is not a valid JSON object.[/red]")
236 |             return
237 | 
238 |         if "mcpServers" not in settings:
239 |             settings["mcpServers"] = {}
240 |         
241 |         settings["mcpServers"].update(mcp_config["mcpServers"])
242 | 
243 |         try:
244 |             with open(target_path, "w") as f:
245 |                 json.dump(settings, f, indent=2)
246 |             console.print(f"[green]Successfully updated {ide_choice} configuration.[/green]")
247 |         except Exception as e:
248 |             console.print(f"[red]Failed to write to configuration file: {e}[/red]")
249 | 
250 | 
251 | 
252 | 
253 | def get_project_root() -> Path:
254 |     """Always return the directory where the user runs `cgc` (CWD)."""
255 |     return Path.cwd()
256 | 
257 | def run_command(command, console, shell=False, check=True, input_text=None):
258 |     """
259 |     Runs a command, captures its output, and handles execution.
260 |     Returns the completed process object on success, None on failure.
261 |     """
262 |     cmd_str = command if isinstance(command, str) else ' '.join(command)
263 |     console.print(f"[cyan]$ {cmd_str}[/cyan]")
264 |     try:
265 |         process = subprocess.run(
266 |             command,
267 |             shell=shell,
268 |             check=check,
269 |             capture_output=True,  # Always capture to control what gets displayed
270 |             text=True,
271 |             timeout=300,
272 |             input=input_text
273 |         )
274 |         return process
275 |     except subprocess.CalledProcessError as e:
276 |         console.print(f"[bold red]Error executing command:[/bold red] {cmd_str}")
277 |         if e.stdout:
278 |             console.print(f"[red]STDOUT: {e.stdout}[/red]")
279 |         if e.stderr:
280 |             console.print(f"[red]STDERR: {e.stderr}[/red]")
281 |         return None
282 |     except subprocess.TimeoutExpired:
283 |         console.print(f"[bold red]Command timed out:[/bold red] {cmd_str}")
284 |         return None
285 | 
286 | def run_setup_wizard():
287 |     """Guides the user through setting up CodeGraphContext."""
288 |     console.print("[bold cyan]Welcome to the CodeGraphContext Setup Wizard![/bold cyan]")
289 |     
290 |     questions = [
291 |         {
292 |             "type": "list",
293 |             "message": "Where do you want to setup your Neo4j database?",
294 |             "choices": [
295 |                 "Local (Recommended: I'll help you run it on this machine)",
296 |                 "Hosted (Connect to a remote database like AuraDB)",
297 |                 "I already have an existing neo4j instance running.",
298 |             ],
299 |             "name": "db_location",
300 |         }
301 |     ]
302 |     result = prompt(questions)
303 |     db_location = result.get("db_location")
304 | 
305 |     if db_location and "Hosted" in db_location:
306 |         setup_hosted_db()
307 |     elif db_location and "Local" in db_location:
308 |         setup_local_db()
309 |     elif db_location:
310 |         setup_existing_db()
311 | 
312 | def find_latest_neo4j_creds_file():
313 |     """Finds the latest Neo4j credentials file in the Downloads folder."""
314 |     downloads_path = Path.home() / "Downloads"
315 |     if not downloads_path.exists():
316 |         return None
317 |     
318 |     cred_files = list(downloads_path.glob("Neo4j*.txt"))
319 |     if not cred_files:
320 |         return None
321 |         
322 |     latest_file = max(cred_files, key=lambda f: f.stat().st_mtime)
323 |     return latest_file
324 | 
325 | 
326 | def setup_existing_db():
327 |     """Guides user to configure an existing Neo4j instance."""
328 |     console.print("\nTo connect to an existing Neo4j database, you'll need your connection credentials.")
329 |     console.print("If you don't have credentials for the database, you can create a new one using 'Local' installation in the previous menu.")
330 |     
331 |     questions = [
332 | 
333 |         {
334 |             "type": "list",
335 |             "message": "How would you like to add your Neo4j credentials?",
336 |             "choices": ["Add credentials from file", "Add credentials manually"],
337 |             "name": "cred_method",
338 |         }
339 |     ]
340 |     result = prompt(questions)
341 |     cred_method = result.get("cred_method")
342 | 
343 |     creds = {}
344 |     if cred_method and "file" in cred_method:
345 |         latest_file = find_latest_neo4j_creds_file()
346 |         file_to_parse = None
347 |         if latest_file:
348 |             confirm_questions = [
349 |                 {
350 |                     "type": "confirm",
351 |                     "message": f"Found a credentials file: {latest_file}. Use this file?",
352 |                     "name": "use_latest",
353 |                     "default": True,
354 |                 }
355 |             ]
356 |             if prompt(confirm_questions).get("use_latest"):
357 |                 file_to_parse = latest_file
358 | 
359 |         if not file_to_parse:
360 |             path_questions = [
361 |                 {"type": "input", "message": "Please enter the path to your credentials file:", "name": "cred_file_path"}
362 |             ]
363 |             file_path_str = prompt(path_questions).get("cred_file_path", "")
364 |             file_path = Path(file_path_str.strip())
365 |             if file_path.exists() and file_path.is_file():
366 |                 file_to_parse = file_path
367 |             else:
368 |                 console.print("[red]❌ The specified file path does not exist or is not a file.[/red]")
369 |                 return
370 | 
371 |         if file_to_parse:
372 |             try:
373 |                 with open(file_to_parse, "r") as f:
374 |                     for line in f:
375 |                         if "=" in line:
376 |                             key, value = line.strip().split("=", 1)
377 |                             if key == "NEO4J_URI":
378 |                                 creds["uri"] = value
379 |                             elif key == "NEO4J_USERNAME":
380 |                                 creds["username"] = value
381 |                             elif key == "NEO4J_PASSWORD":
382 |                                 creds["password"] = value
383 |             except Exception as e:
384 |                 console.print(f"[red]❌ Failed to parse credentials file: {e}[/red]")
385 |                 return
386 | 
387 |     elif cred_method: # Manual entry
388 |         console.print("Please enter your Neo4j connection details.")
389 |         
390 |         # Loop until valid credentials are provided
391 |         while True:
392 |             questions = [
393 |                 {"type": "input", "message": "URI (e.g., 'neo4j://localhost:7687'):", "name": "uri", "default": "neo4j://localhost:7687"},
394 |                 {"type": "input", "message": "Username:", "name": "username", "default": "neo4j"},
395 |                 {"type": "password", "message": "Password:", "name": "password"},
396 |             ]
397 |             
398 |             manual_creds = prompt(questions)
399 |             if not manual_creds: 
400 |                 return # User cancelled
401 |             
402 |             # Validate the user input
403 |             console.print("\n[cyan]🔍 Validating configuration...[/cyan]")
404 |             is_valid, validation_error = DatabaseManager.validate_config(
405 |                 manual_creds.get("uri", ""),
406 |                 manual_creds.get("username", ""),
407 |                 manual_creds.get("password", "")
408 |             )
409 |             
410 |             if not is_valid:
411 |                 console.print(validation_error)
412 |                 console.print("\n[red]❌ Invalid configuration. Please try again.[/red]\n")
413 |                 continue  # Ask for input again
414 |             
415 |             console.print("[green]✅ Configuration format is valid[/green]")
416 |             
417 |             # Test the connection
418 |             console.print("\n[cyan]🔗 Testing connection...[/cyan]")
419 |             is_connected, error_msg = DatabaseManager.test_connection(
420 |                 manual_creds.get("uri", ""),
421 |                 manual_creds.get("username", ""),
422 |                 manual_creds.get("password", "")
423 |             )
424 |             
425 |             if not is_connected:
426 |                 console.print(error_msg)
427 |                 retry = prompt([{"type": "confirm", "message": "Connection failed. Try again with different credentials?", "name": "retry", "default": True}])
428 |                 if not retry.get("retry"):
429 |                     return
430 |                 continue  # Ask for input again
431 |             
432 |             console.print("[green]✅ Connection successful![/green]")
433 |             creds = manual_creds
434 |             break  # Exit loop with valid credentials
435 | 
436 | 
437 |     if creds.get("uri") and creds.get("password"):
438 |         _generate_mcp_json(creds)
439 |     else:
440 |         console.print("[red]❌ Incomplete credentials. Please try again.[/red]")
441 | 
442 | 
443 | def setup_hosted_db():
444 |     """Guides user to configure a remote Neo4j instance."""
445 |     console.print("\nTo connect to a hosted Neo4j database, you'll need your connection credentials.")
446 |     console.print("[yellow]Warning: You are configuring to connect to a remote/hosted Neo4j database. Ensure your credentials are secure.[/yellow]")
447 |     console.print("If you don't have a hosted database, you can create a free one at [bold blue]https://neo4j.com/product/auradb/[/bold blue] (click 'Start free').")
448 |     
449 |     questions = [
450 | 
451 |         {
452 |             "type": "list",
453 |             "message": "How would you like to add your Neo4j credentials?",
454 |             "choices": ["Add credentials from file", "Add credentials manually"],
455 |             "name": "cred_method",
456 |         }
457 |     ]
458 |     result = prompt(questions)
459 |     cred_method = result.get("cred_method")
460 | 
461 |     creds = {}
462 |     if cred_method and "file" in cred_method:
463 |         latest_file = find_latest_neo4j_creds_file()
464 |         file_to_parse = None
465 |         if latest_file:
466 |             confirm_questions = [
467 |                 {
468 |                     "type": "confirm",
469 |                     "message": f"Found a credentials file: {latest_file}. Use this file?",
470 |                     "name": "use_latest",
471 |                     "default": True,
472 |                 }
473 |             ]
474 |             if prompt(confirm_questions).get("use_latest"):
475 |                 file_to_parse = latest_file
476 | 
477 |         if not file_to_parse:
478 |             path_questions = [
479 |                 {"type": "input", "message": "Please enter the path to your credentials file:", "name": "cred_file_path"}
480 |             ]
481 |             file_path_str = prompt(path_questions).get("cred_file_path", "")
482 |             file_path = Path(file_path_str.strip())
483 |             if file_path.exists() and file_path.is_file():
484 |                 file_to_parse = file_path
485 |             else:
486 |                 console.print("[red]❌ The specified file path does not exist or is not a file.[/red]")
487 |                 return
488 | 
489 |         if file_to_parse:
490 |             try:
491 |                 with open(file_to_parse, "r") as f:
492 |                     for line in f:
493 |                         if "=" in line:
494 |                             key, value = line.strip().split("=", 1)
495 |                             if key == "NEO4J_URI":
496 |                                 creds["uri"] = value
497 |                             elif key == "NEO4J_USERNAME":
498 |                                 creds["username"] = value
499 |                             elif key == "NEO4J_PASSWORD":
500 |                                 creds["password"] = value
501 |             except Exception as e:
502 |                 console.print(f"[red]❌ Failed to parse credentials file: {e}[/red]")
503 |                 return
504 | 
505 |     elif cred_method: # Manual entry
506 |         console.print("Please enter your remote Neo4j connection details.")
507 |         
508 |         # Loop until valid credentials are provided
509 |         while True:
510 |             questions = [
511 |                 {"type": "input", "message": "URI (e.g., neo4j+s://xxxx.databases.neo4j.io):", "name": "uri"},
512 |                 {"type": "input", "message": "Username:", "name": "username", "default": "neo4j"},
513 |                 {"type": "password", "message": "Password:", "name": "password"},
514 |             ]
515 |             
516 |             manual_creds = prompt(questions)
517 |             if not manual_creds:
518 |                 return # User cancelled
519 |             
520 |             # Validate the user input
521 |             console.print("\n[cyan]🔍 Validating configuration...[/cyan]")
522 |             is_valid, validation_error = DatabaseManager.validate_config(
523 |                 manual_creds.get("uri", ""),
524 |                 manual_creds.get("username", ""),
525 |                 manual_creds.get("password", "")
526 |             )
527 |             
528 |             if not is_valid:
529 |                 console.print(validation_error)
530 |                 console.print("\n[red]❌ Invalid configuration. Please try again.[/red]\n")
531 |                 continue  # Ask for input again
532 |             
533 |             console.print("[green]✅ Configuration format is valid[/green]")
534 |             
535 |             # Test the connection
536 |             console.print("\n[cyan]🔗 Testing connection...[/cyan]")
537 |             is_connected, error_msg = DatabaseManager.test_connection(
538 |                 manual_creds.get("uri", ""),
539 |                 manual_creds.get("username", ""),
540 |                 manual_creds.get("password", "")
541 |             )
542 |             
543 |             if not is_connected:
544 |                 console.print(error_msg)
545 |                 retry = prompt([{"type": "confirm", "message": "Connection failed. Try again with different credentials?", "name": "retry", "default": True}])
546 |                 if not retry.get("retry"):
547 |                     return
548 |                 continue  # Ask for input again
549 |             
550 |             console.print("[green]✅ Connection successful![/green]")
551 |             creds = manual_creds
552 |             break  
553 | 
554 | 
555 |     if creds.get("uri") and creds.get("password"):
556 |         _generate_mcp_json(creds)
557 |     else:
558 |         console.print("[red]❌ Incomplete credentials. Please try again.[/red]")
559 | 
560 | def setup_local_db():
561 |     """Guides user to set up a local Neo4j instance."""
562 |     questions = [
563 |         {
564 |             "type": "list",
565 |             "message": "How would you like to run Neo4j locally?",
566 |             "choices": ["Docker (Easiest)", "Local Binary (Advanced)"],
567 |             "name": "local_method",
568 |         }
569 |     ]
570 |     result = prompt(questions)
571 |     local_method = result.get("local_method")
572 | 
573 |     if local_method and "Docker" in local_method:
574 |         setup_docker()
575 |     elif local_method:
576 |         if platform.system() == "Darwin":
577 |             # lazy import to avoid circular import
578 |             from .setup_macos import setup_macos_binary
579 |             setup_macos_binary(console, prompt, run_command, _generate_mcp_json)
580 |         else:
581 |             setup_local_binary()
582 | 
583 | def setup_docker():
584 |     """Creates Docker files and runs docker-compose for Neo4j."""
585 |     console.print("\n[bold cyan]Setting up Neo4j with Docker...[/bold cyan]")
586 | 
587 |     # Prompt for password first
588 |     console.print("Please set a secure password for your Neo4j database:")
589 |     password_questions = [
590 |         {"type": "password", "message": "Enter Neo4j password:", "name": "password"},
591 |         {"type": "password", "message": "Confirm password:", "name": "password_confirm"},
592 |     ]
593 |     
594 |     while True:
595 |         passwords = prompt(password_questions)
596 |         if not passwords:
597 |             return  # User cancelled
598 |         
599 |         password = passwords.get("password", "")
600 |         if password and password == passwords.get("password_confirm"):
601 |             break
602 |         console.print("[red]Passwords do not match or are empty. Please try again.[/red]")
603 | 
604 |     # Create data directories
605 |     neo4j_dir = Path.cwd() / "neo4j_data"
606 |     for subdir in ["data", "logs", "conf", "plugins"]:
607 |         (neo4j_dir / subdir).mkdir(parents=True, exist_ok=True)
608 | 
609 |     # Fixed docker-compose.yml content
610 |     docker_compose_content = f"""
611 | services:
612 |   neo4j:
613 |     image: neo4j:5.21
614 |     container_name: neo4j-cgc
615 |     restart: unless-stopped
616 |     ports:
617 |       - "7474:7474"
618 |       - "7687:7687"
619 |     environment:
620 |       - NEO4J_AUTH=neo4j/{password}
621 |       - NEO4J_ACCEPT_LICENSE_AGREEMENT=yes
622 |     volumes:
623 |       - neo4j_data:/data
624 |       - neo4j_logs:/logs
625 | 
626 | volumes:
627 |   neo4j_data:
628 |   neo4j_logs:
629 | """
630 | 
631 |     # Write docker-compose.yml
632 |     compose_file = Path.cwd() / "docker-compose.yml"
633 |     with open(compose_file, "w") as f:
634 |         f.write(docker_compose_content)
635 | 
636 |     console.print("[green]✅ docker-compose.yml created with secure password.[/green]")
637 | 
638 |     # Validate configuration format before attempting Docker operations
639 |     console.print("\n[cyan]🔍 Validating configuration...[/cyan]")
640 |     is_valid, validation_error = DatabaseManager.validate_config(
641 |         DEFAULT_NEO4J_URI, 
642 |         DEFAULT_NEO4J_USERNAME, 
643 |         password
644 |     )
645 | 
646 |     if not is_valid:
647 |         console.print(validation_error)
648 |         console.print("\n[red]❌ Configuration validation failed. Please fix the issues and try again.[/red]")
649 |         return
650 | 
651 |     console.print("[green]✅ Configuration format is valid[/green]")
652 | 
653 |     # Check if Docker is running
654 |     docker_check = run_command(["docker", "--version"], console, check=False)
655 |     if not docker_check:
656 |         console.print("[red]❌ Docker is not installed or not running. Please install Docker first.[/red]")
657 |         return
658 | 
659 |     # Check if docker-compose is available
660 |     compose_check = run_command(["docker", "compose", "version"], console, check=False)
661 |     if not compose_check:
662 |         console.print("[red]❌ Docker Compose is not available. Please install Docker Compose.[/red]")
663 |         return
664 | 
665 |     confirm_q = [{"type": "confirm", "message": "Ready to launch Neo4j in Docker?", "name": "proceed", "default": True}]
666 |     if not prompt(confirm_q).get("proceed"):
667 |         return
668 | 
669 |     try:
670 |         # Pull the image first
671 |         console.print("[cyan]Pulling Neo4j Docker image...[/cyan]")
672 |         pull_process = run_command(["docker", "pull", "neo4j:5.21"], console, check=True)
673 |         if not pull_process:
674 |             console.print("[yellow]⚠️ Could not pull image, but continuing anyway...[/yellow]")
675 | 
676 |         # Start containers
677 |         console.print("[cyan]Starting Neo4j container...[/cyan]")
678 |         docker_process = run_command(["docker", "compose", "up", "-d"], console, check=True)
679 |         
680 |         if docker_process:
681 |             console.print("[bold green]🚀 Neo4j Docker container started successfully![/bold green]")
682 |             
683 |             # Wait for Neo4j to be ready
684 |             console.print("[cyan]Waiting for Neo4j to be ready (this may take 30-60 seconds)...[/cyan]")
685 |             
686 |             # Try to connect for up to 2 minutes
687 |             max_attempts = 24  # 24 * 5 seconds = 2 minutes
688 |             for attempt in range(max_attempts):
689 |                 time.sleep(5)
690 |                 
691 |                 # Check if container is still running
692 |                 status_check = run_command(["docker", "compose", "ps", "-q", "neo4j"], console, check=False)
693 |                 if not status_check or not status_check.stdout.strip():
694 |                     console.print("[red]❌ Neo4j container stopped unexpectedly. Check logs with: docker compose logs neo4j[/red]")
695 |                     return
696 |                 
697 |                 # updated test_connection method
698 |                 console.print(f"[yellow]Testing connection... (attempt {attempt + 1}/{max_attempts})[/yellow]")
699 |                 is_connected, error_msg = DatabaseManager.test_connection(DEFAULT_NEO4J_URI, DEFAULT_NEO4J_USERNAME, password)
700 |                 
701 |                 if is_connected:
702 |                     console.print("[bold green]✅ Neo4j is ready and accepting connections![/bold green]")
703 |                     connection_successful = True
704 |                     break
705 |                 
706 |                 else:
707 |                     # Only show detailed error on last attempt
708 |                     if attempt == max_attempts - 1:
709 |                         console.print("\n[red]❌ Neo4j did not become ready within 2 minutes.[/red]")
710 |                         console.print(error_msg)
711 |                         console.print("\n[cyan]Troubleshooting:[/cyan]")
712 |                         console.print("  • Check logs: docker compose logs neo4j")
713 |                         console.print("  • Verify container is running: docker ps")
714 |                         console.print("  • Try restarting: docker compose restart")
715 |                         return
716 |             
717 |             if not connection_successful:
718 |                 return
719 | 
720 |             # Generate MCP configuration
721 |             creds = {
722 |                 "uri": DEFAULT_NEO4J_URI,
723 |                 "username": DEFAULT_NEO4J_USERNAME,
724 |                 "password": password
725 |             }
726 | 
727 |             _generate_mcp_json(creds)
728 |             
729 |             console.print("\n[bold green]🎉 Setup complete![/bold green]")
730 |             console.print("Neo4j is running at:")
731 |             console.print("  • Web interface: http://localhost:7474")
732 |             console.print("  • Bolt connection: neo4j://localhost:7687")
733 |             console.print("\n[cyan]Useful commands:[/cyan]")
734 |             console.print("  • Stop: docker compose down")
735 |             console.print("  • Restart: docker compose restart")
736 |             console.print("  • View logs: docker compose logs neo4j")
737 |             
738 |     except Exception as e:
739 |         console.print(f"[bold red]❌ Failed to start Neo4j Docker container:[/bold red] {e}")
740 |         console.print("[cyan]Try checking the logs with: docker compose logs neo4j[/cyan]")
741 | 
742 | def setup_local_binary():
743 |     """Automates the installation and configuration of Neo4j on Ubuntu/Debian."""
744 |     os_name = platform.system()
745 |     console.print(f"Detected Operating System: [bold yellow]{os_name}[/bold yellow]")
746 | 
747 |     if os_name != "Linux" or not os.path.exists("/etc/debian_version"):
748 |         console.print("[yellow]Automated installer is designed for Debian-based systems (like Ubuntu).[/yellow]")
749 |         console.print(f"For other systems, please follow the manual installation guide: [bold blue]https://neo4j.com/docs/operations-manual/current/installation/[/bold blue]")
750 |         return
751 | 
752 |     console.print("[bold]Starting automated Neo4j installation for Ubuntu/Debian.[/bold]")
753 |     console.print("[yellow]This will run several commands with 'sudo'. You will be prompted for your password.[/yellow]")
754 |     confirm_q = [{"type": "confirm", "message": "Do you want to proceed?", "name": "proceed", "default": True}]
755 |     if not prompt(confirm_q).get("proceed"):
756 |         return
757 | 
758 |     NEO4J_VERSION = "1:5.21.0" 
759 | 
760 |     install_commands = [
761 |         ("Creating keyring directory", ["sudo", "mkdir", "-p", "/etc/apt/keyrings"]),
762 |         ("Adding Neo4j GPG key", "wget -qO- https://debian.neo4j.com/neotechnology.gpg.key | sudo gpg --dearmor --yes -o /etc/apt/keyrings/neotechnology.gpg", True),
763 |         ("Adding Neo4j repository", "echo 'deb [signed-by=/etc/apt/keyrings/neotechnology.gpg] https://debian.neo4j.com stable 5' | sudo tee /etc/apt/sources.list.d/neo4j.list > /dev/null", True),
764 |         ("Updating apt sources", ["sudo", "apt-get", "-qq", "update"]),
765 |         (f"Installing Neo4j ({NEO4J_VERSION}) and Cypher Shell", ["sudo", "apt-get", "install", "-qq", "-y", f"neo4j={NEO4J_VERSION}", "cypher-shell"])
766 |     ]
767 | 
768 |     for desc, cmd, use_shell in [(c[0], c[1], c[2] if len(c) > 2 else False) for c in install_commands]:
769 |         console.print(f"\n[bold]Step: {desc}...[/bold]")
770 |         if not run_command(cmd, console, shell=use_shell):
771 |             console.print(f"[bold red]Failed on step: {desc}. Aborting installation.[/bold]")
772 |             return
773 |             
774 |     console.print("\n[bold green]Neo4j installed successfully![/bold green]")
775 |     
776 |     console.print("\n[bold]Please set the initial password for the 'neo4j' user.""")
777 |     
778 |     new_password = ""
779 |     while True:
780 |         questions = [
781 |             {"type": "password", "message": "Enter a new password for Neo4j:", "name": "password"},
782 |             {"type": "password", "message": "Confirm the new password:", "name": "password_confirm"},
783 |         ]
784 |         passwords = prompt(questions)
785 |         if not passwords: return # User cancelled
786 |         new_password = passwords.get("password")
787 |         if new_password and new_password == passwords.get("password_confirm"):
788 |             break
789 |         console.print("[red]Passwords do not match or are empty. Please try again.[/red]")
790 | 
791 |     console.print("\n[bold]Stopping Neo4j to set the password...""")
792 |     if not run_command(["sudo", "systemctl", "stop", "neo4j"], console):
793 |         console.print("[bold red]Could not stop Neo4j service. Aborting.[/bold red]")
794 |         return
795 |         
796 |     console.print("\n[bold]Setting initial password using neo4j-admin...""")
797 |     pw_command = ["sudo", "-u", "neo4j", "neo4j-admin", "dbms", "set-initial-password", new_password]
798 |     if not run_command(pw_command, console, check=True):
799 |         console.print("[bold red]Failed to set the initial password. Please check the logs.[/bold red]")
800 |         run_command(["sudo", "systemctl", "start", "neo4j"], console)
801 |         return
802 |     
803 |     console.print("\n[bold]Starting Neo4j service...""")
804 |     if not run_command(["sudo", "systemctl", "start", "neo4j"], console):
805 |         console.print("[bold red]Failed to start Neo4j service after setting password.[/bold red]")
806 |         return
807 | 
808 |     console.print("\n[bold]Enabling Neo4j service to start on boot...""")
809 |     if not run_command(["sudo", "systemctl", "enable", "neo4j"], console):
810 |         console.print("[bold yellow]Could not enable Neo4j service. You may need to start it manually after reboot.[/bold yellow]")
811 | 
812 |     console.print("[bold green]Password set and service started.[/bold green]")
813 |     
814 |     console.print("\n[yellow]Waiting 10 seconds for the database to become available...""")
815 |     time.sleep(10)
816 | 
817 |     creds = {
818 |         "uri": "neo4j://localhost:7687",
819 |         "username": "neo4j",
820 |         "password": new_password
821 |     }
822 |     _generate_mcp_json(creds)
823 |     console.print("\n[bold green]All done! Your local Neo4j instance is ready to use.[/bold green]")
824 | 
```

--------------------------------------------------------------------------------
/src/codegraphcontext/tools/graph_builder.py:
--------------------------------------------------------------------------------

```python
  1 | 
  2 | # src/codegraphcontext/tools/graph_builder.py
  3 | import asyncio
  4 | import pathspec
  5 | from pathlib import Path
  6 | from typing import Any, Coroutine, Dict, Optional, Tuple
  7 | from datetime import datetime
  8 | 
  9 | from ..core.database import DatabaseManager
 10 | from ..core.jobs import JobManager, JobStatus
 11 | from ..utils.debug_log import debug_log, info_logger, error_logger, warning_logger
 12 | 
 13 | # New imports for tree-sitter
 14 | from tree_sitter import Language, Parser
 15 | from tree_sitter_languages import get_language
 16 | 
 17 | class TreeSitterParser:
 18 |     """A generic parser wrapper for a specific language using tree-sitter."""
 19 | 
 20 |     def __init__(self, language_name: str):
 21 |         self.language_name = language_name
 22 |         self.language: Language = get_language(language_name)
 23 |         self.parser = Parser()
 24 |         self.parser.set_language(self.language)
 25 | 
 26 |         self.language_specific_parser = None
 27 |         if self.language_name == 'python':
 28 |             from .languages.python import PythonTreeSitterParser
 29 |             self.language_specific_parser = PythonTreeSitterParser(self)
 30 |         elif self.language_name == 'javascript':
 31 |             from .languages.javascript import JavascriptTreeSitterParser
 32 |             self.language_specific_parser = JavascriptTreeSitterParser(self)
 33 |         elif self.language_name == 'go':
 34 |              from .languages.go import GoTreeSitterParser
 35 |              self.language_specific_parser = GoTreeSitterParser(self)
 36 |         elif self.language_name == 'typescript':
 37 |             from .languages.typescript import TypescriptTreeSitterParser
 38 |             self.language_specific_parser = TypescriptTreeSitterParser(self)
 39 |         elif self.language_name == 'cpp':
 40 |             from .languages.cpp import CppTreeSitterParser
 41 |             self.language_specific_parser = CppTreeSitterParser(self)
 42 |         elif self.language_name == 'rust':
 43 |             from .languages.rust import RustTreeSitterParser
 44 |             self.language_specific_parser = RustTreeSitterParser(self)
 45 |         elif self.language_name == 'c':
 46 |             from .languages.c import CTreeSitterParser
 47 |             self.language_specific_parser = CTreeSitterParser(self)
 48 |         elif self.language_name == 'java':
 49 |             from .languages.java import JavaTreeSitterParser
 50 |             self.language_specific_parser = JavaTreeSitterParser(self)
 51 |         elif self.language_name == 'ruby':
 52 |             from .languages.ruby import RubyTreeSitterParser
 53 |             self.language_specific_parser = RubyTreeSitterParser(self)
 54 | 
 55 | 
 56 |     def parse(self, file_path: Path, is_dependency: bool = False, **kwargs) -> Dict:
 57 |         """Dispatches parsing to the language-specific parser."""
 58 |         if self.language_specific_parser:
 59 |             return self.language_specific_parser.parse(file_path, is_dependency, **kwargs)
 60 |         else:
 61 |             raise NotImplementedError(f"No language-specific parser implemented for {self.language_name}")
 62 | 
 63 | class GraphBuilder:
 64 |     """Module for building and managing the Neo4j code graph."""
 65 | 
 66 |     def __init__(self, db_manager: DatabaseManager, job_manager: JobManager, loop: asyncio.AbstractEventLoop):
 67 |         self.db_manager = db_manager
 68 |         self.job_manager = job_manager
 69 |         self.loop = loop
 70 |         self.driver = self.db_manager.get_driver()
 71 |         self.parsers = {
 72 |             '.py': TreeSitterParser('python'),
 73 |             '.ipynb': TreeSitterParser('python'),
 74 |             '.js': TreeSitterParser('javascript'),
 75 |             '.jsx': TreeSitterParser('javascript'),
 76 |             '.mjs': TreeSitterParser('javascript'),
 77 |             '.cjs': TreeSitterParser('javascript'),
 78 |             '.go': TreeSitterParser('go'),
 79 |             '.ts': TreeSitterParser('typescript'),
 80 |             '.tsx': TreeSitterParser('typescript'),
 81 |             '.cpp': TreeSitterParser('cpp'),
 82 |             '.h': TreeSitterParser('cpp'),
 83 |             '.hpp': TreeSitterParser('cpp'),
 84 |             '.rs': TreeSitterParser('rust'),
 85 |             '.c': TreeSitterParser('c'),
 86 |             # '.h': TreeSitterParser('c'), # Need to write an algo for distinguishing C vs C++ headers
 87 |             '.java': TreeSitterParser('java'),
 88 |             '.rb': TreeSitterParser('ruby')
 89 |         }
 90 |         self.create_schema()
 91 | 
 92 |     # A general schema creation based on common features across languages
 93 |     def create_schema(self):
 94 |         """Create constraints and indexes in Neo4j."""
 95 |         # When adding a new node type with a unique key, add its constraint here.
 96 |         with self.driver.session() as session:
 97 |             try:
 98 |                 session.run("CREATE CONSTRAINT repository_path IF NOT EXISTS FOR (r:Repository) REQUIRE r.path IS UNIQUE")
 99 |                 session.run("CREATE CONSTRAINT file_path IF NOT EXISTS FOR (f:File) REQUIRE f.path IS UNIQUE")
100 |                 session.run("CREATE CONSTRAINT directory_path IF NOT EXISTS FOR (d:Directory) REQUIRE d.path IS UNIQUE")
101 |                 session.run("CREATE CONSTRAINT function_unique IF NOT EXISTS FOR (f:Function) REQUIRE (f.name, f.file_path, f.line_number) IS UNIQUE")
102 |                 session.run("CREATE CONSTRAINT class_unique IF NOT EXISTS FOR (c:Class) REQUIRE (c.name, c.file_path, c.line_number) IS UNIQUE")
103 |                 session.run("CREATE CONSTRAINT trait_unique IF NOT EXISTS FOR (t:Trait) REQUIRE (t.name, t.file_path, t.line_number) IS UNIQUE") # Added trait constraint
104 |                 session.run("CREATE CONSTRAINT interface_unique IF NOT EXISTS FOR (i:Interface) REQUIRE (i.name, i.file_path, i.line_number) IS UNIQUE")
105 |                 session.run("CREATE CONSTRAINT macro_unique IF NOT EXISTS FOR (m:Macro) REQUIRE (m.name, m.file_path, m.line_number) IS UNIQUE")
106 |                 session.run("CREATE CONSTRAINT variable_unique IF NOT EXISTS FOR (v:Variable) REQUIRE (v.name, v.file_path, v.line_number) IS UNIQUE")
107 |                 session.run("CREATE CONSTRAINT module_name IF NOT EXISTS FOR (m:Module) REQUIRE m.name IS UNIQUE")
108 |                 session.run("CREATE CONSTRAINT struct_cpp IF NOT EXISTS FOR (cstruct: Struct) REQUIRE (cstruct.name, cstruct.file_path, cstruct.line_number) IS UNIQUE")
109 |                 session.run("CREATE CONSTRAINT enum_cpp IF NOT EXISTS FOR (cenum: Enum) REQUIRE (cenum.name, cenum.file_path, cenum.line_number) IS UNIQUE")
110 |                 session.run("CREATE CONSTRAINT union_cpp IF NOT EXISTS FOR (cunion: Union) REQUIRE (cunion.name, cunion.file_path, cunion.line_number) IS UNIQUE")
111 |                 session.run("CREATE CONSTRAINT annotation_unique IF NOT EXISTS FOR (a:Annotation) REQUIRE (a.name, a.file_path, a.line_number) IS UNIQUE")
112 |                 
113 |                 # Indexes for language attribute
114 |                 session.run("CREATE INDEX function_lang IF NOT EXISTS FOR (f:Function) ON (f.lang)")
115 |                 session.run("CREATE INDEX class_lang IF NOT EXISTS FOR (c:Class) ON (c.lang)")
116 |                 session.run("CREATE INDEX annotation_lang IF NOT EXISTS FOR (a:Annotation) ON (a.lang)")
117 |                 session.run("""
118 |                     CREATE FULLTEXT INDEX code_search_index IF NOT EXISTS 
119 |                     FOR (n:Function|Class|Variable) 
120 |                     ON EACH [n.name, n.source, n.docstring]
121 |                 """ )
122 |                 
123 |                 info_logger("Database schema verified/created successfully")
124 |             except Exception as e:
125 |                 warning_logger(f"Schema creation warning: {e}")
126 | 
127 | 
128 |     def _pre_scan_for_imports(self, files: list[Path]) -> dict:
129 |         """Dispatches pre-scan to the correct language-specific implementation."""
130 |         imports_map = {}
131 |         
132 |         # Group files by language/extension
133 |         files_by_lang = {}
134 |         for file in files:
135 |             if file.suffix in self.parsers:
136 |                 lang_ext = file.suffix
137 |                 if lang_ext not in files_by_lang:
138 |                     files_by_lang[lang_ext] = []
139 |                 files_by_lang[lang_ext].append(file)
140 | 
141 |         if '.py' in files_by_lang:
142 |             from .languages import python as python_lang_module
143 |             imports_map.update(python_lang_module.pre_scan_python(files_by_lang['.py'], self.parsers['.py']))
144 |         elif '.ipynb' in files_by_lang:
145 |             from .languages import python as python_lang_module
146 |             imports_map.update(python_lang_module.pre_scan_python(files_by_lang['.ipynb'], self.parsers['.ipynb']))
147 |         elif '.js' in files_by_lang:
148 |             from .languages import javascript as js_lang_module
149 |             imports_map.update(js_lang_module.pre_scan_javascript(files_by_lang['.js'], self.parsers['.js']))
150 |         elif '.jsx' in files_by_lang:
151 |             from .languages import javascript as js_lang_module
152 |             imports_map.update(js_lang_module.pre_scan_javascript(files_by_lang['.jsx'], self.parsers['.jsx']))
153 |         elif '.mjs' in files_by_lang:
154 |             from .languages import javascript as js_lang_module
155 |             imports_map.update(js_lang_module.pre_scan_javascript(files_by_lang['.mjs'], self.parsers['.mjs']))
156 |         elif '.cjs' in files_by_lang:
157 |             from .languages import javascript as js_lang_module
158 |             imports_map.update(js_lang_module.pre_scan_javascript(files_by_lang['.cjs'], self.parsers['.cjs']))
159 |         elif '.go' in files_by_lang:
160 |              from .languages import go as go_lang_module
161 |              imports_map.update(go_lang_module.pre_scan_go(files_by_lang['.go'], self.parsers['.go']))
162 |         elif '.ts' in files_by_lang:
163 |             from .languages import typescript as ts_lang_module
164 |             imports_map.update(ts_lang_module.pre_scan_typescript(files_by_lang['.ts'], self.parsers['.ts']))
165 |         elif '.tsx' in files_by_lang:
166 |             from .languages import typescript as ts_lang_module
167 |             imports_map.update(ts_lang_module.pre_scan_typescript(files_by_lang['.tsx'], self.parsers['.tsx']))
168 |         elif '.cpp' in files_by_lang:
169 |             from .languages import cpp as cpp_lang_module
170 |             imports_map.update(cpp_lang_module.pre_scan_cpp(files_by_lang['.cpp'], self.parsers['.cpp']))
171 |         elif '.h' in files_by_lang:
172 |             from .languages import cpp as cpp_lang_module
173 |             imports_map.update(cpp_lang_module.pre_scan_cpp(files_by_lang['.h'], self.parsers['.h']))
174 |         elif '.hpp' in files_by_lang:
175 |             from .languages import cpp as cpp_lang_module
176 |             imports_map.update(cpp_lang_module.pre_scan_cpp(files_by_lang['.hpp'], self.parsers['.hpp']))
177 |         elif '.rs' in files_by_lang:
178 |             from .languages import rust as rust_lang_module
179 |             imports_map.update(rust_lang_module.pre_scan_rust(files_by_lang['.rs'], self.parsers['.rs']))
180 |         elif '.c' in files_by_lang:
181 |             from .languages import c as c_lang_module
182 |             imports_map.update(c_lang_module.pre_scan_c(files_by_lang['.c'], self.parsers['.c']))
183 |         elif '.java' in files_by_lang:
184 |             from .languages import java as java_lang_module
185 |             imports_map.update(java_lang_module.pre_scan_java(files_by_lang['.java'], self.parsers['.java']))
186 |         elif '.rb' in files_by_lang:
187 |             from .languages import ruby as ruby_lang_module
188 |             imports_map.update(ruby_lang_module.pre_scan_ruby(files_by_lang['.rb'], self.parsers['.rb']))
189 |             
190 |         return imports_map
191 | 
192 |     # Language-agnostic method
193 |     def add_repository_to_graph(self, repo_path: Path, is_dependency: bool = False):
194 |         """Adds a repository node using its absolute path as the unique key."""
195 |         repo_name = repo_path.name
196 |         repo_path_str = str(repo_path.resolve())
197 |         with self.driver.session() as session:
198 |             session.run(
199 |                 """
200 |                 MERGE (r:Repository {path: $path})
201 |                 SET r.name = $name, r.is_dependency = $is_dependency
202 |                 """,
203 |                 path=repo_path_str,
204 |                 name=repo_name,
205 |                 is_dependency=is_dependency,
206 |             )
207 | 
208 |     # First pass to add file and its contents
209 |     def add_file_to_graph(self, file_data: Dict, repo_name: str, imports_map: dict):
210 |         info_logger("Executing add_file_to_graph with my change!")
211 |         """Adds a file and its contents within a single, unified session."""
212 |         file_path_str = str(Path(file_data['file_path']).resolve())
213 |         file_name = Path(file_path_str).name
214 |         is_dependency = file_data.get('is_dependency', False)
215 | 
216 |         with self.driver.session() as session:
217 |             try:
218 |                 # Match repository by path, not name, to avoid conflicts with same-named folders at different locations
219 |                 repo_result = session.run("MATCH (r:Repository {path: $repo_path}) RETURN r.path as path", repo_path=str(Path(file_data['repo_path']).resolve())).single()
220 |                 relative_path = str(Path(file_path_str).relative_to(Path(repo_result['path']))) if repo_result else file_name
221 |             except ValueError:
222 |                 relative_path = file_name
223 | 
224 |             session.run("""
225 |                 MERGE (f:File {path: $path})
226 |                 SET f.name = $name, f.relative_path = $relative_path, f.is_dependency = $is_dependency
227 |             """, path=file_path_str, name=file_name, relative_path=relative_path, is_dependency=is_dependency)
228 | 
229 |             file_path_obj = Path(file_path_str)
230 |             repo_path_obj = Path(repo_result['path'])
231 |             
232 |             relative_path_to_file = file_path_obj.relative_to(repo_path_obj)
233 |             
234 |             parent_path = str(repo_path_obj)
235 |             parent_label = 'Repository'
236 | 
237 |             for part in relative_path_to_file.parts[:-1]:
238 |                 current_path = Path(parent_path) / part
239 |                 current_path_str = str(current_path)
240 |                 
241 |                 session.run(f"""
242 |                     MATCH (p:{parent_label} {{path: $parent_path}})
243 |                     MERGE (d:Directory {{path: $current_path}})
244 |                     SET d.name = $part
245 |                     MERGE (p)-[:CONTAINS]->(d)
246 |                 """, parent_path=parent_path, current_path=current_path_str, part=part)
247 | 
248 |                 parent_path = current_path_str
249 |                 parent_label = 'Directory'
250 | 
251 |             session.run(f"""
252 |                 MATCH (p:{parent_label} {{path: $parent_path}})
253 |                 MATCH (f:File {{path: $file_path}})
254 |                 MERGE (p)-[:CONTAINS]->(f)
255 |             """, parent_path=parent_path, file_path=file_path_str)
256 | 
257 |             # CONTAINS relationships for functions, classes, and variables
258 |             # To add a new language-specific node type (e.g., 'Trait' for Rust):
259 |             # 1. Ensure your language-specific parser returns a list under a unique key (e.g., 'traits': [...] ).
260 |             # 2. Add a new constraint for the new label in the `create_schema` method.
261 |             # 3. Add a new entry to the `item_mappings` list below (e.g., (file_data.get('traits', []), 'Trait') ).
262 |             item_mappings = [
263 |                 (file_data.get('functions', []), 'Function'),
264 |                 (file_data.get('classes', []), 'Class'),
265 |                 (file_data.get('traits', []), 'Trait'), # <-- Added trait mapping
266 |                 (file_data.get('variables', []), 'Variable'),
267 |                 (file_data.get('interfaces', []), 'Interface'),
268 |                 (file_data.get('macros', []), 'Macro'),
269 |                 (file_data.get('structs',[]), 'Struct'),
270 |                 (file_data.get('enums',[]), 'Enum'),
271 |                 (file_data.get('unions',[]), 'Union'),
272 |             ]
273 |             for item_data, label in item_mappings:
274 |                 for item in item_data:
275 |                     # Ensure cyclomatic_complexity is set for functions
276 |                     if label == 'Function' and 'cyclomatic_complexity' not in item:
277 |                         item['cyclomatic_complexity'] = 1 # Default value
278 | 
279 |                     query = f"""
280 |                         MATCH (f:File {{path: $file_path}})
281 |                         MERGE (n:{label} {{name: $name, file_path: $file_path, line_number: $line_number}})
282 |                         SET n += $props
283 |                         MERGE (f)-[:CONTAINS]->(n)
284 |                     """
285 |                     session.run(query, file_path=file_path_str, name=item['name'], line_number=item['line_number'], props=item)
286 |                     
287 |                     if label == 'Function':
288 |                         for arg_name in item.get('args', []):
289 |                             session.run("""
290 |                                 MATCH (fn:Function {name: $func_name, file_path: $file_path, line_number: $line_number})
291 |                                 MERGE (p:Parameter {name: $arg_name, file_path: $file_path, function_line_number: $line_number})
292 |                                 MERGE (fn)-[:HAS_PARAMETER]->(p)
293 |                             """, func_name=item['name'], file_path=file_path_str, line_number=item['line_number'], arg_name=arg_name)
294 | 
295 |             # --- NEW: persist Ruby Modules ---
296 |             for m in file_data.get('modules', []):
297 |                 session.run("""
298 |                     MERGE (mod:Module {name: $name})
299 |                     ON CREATE SET mod.lang = $lang
300 |                     ON MATCH  SET mod.lang = coalesce(mod.lang, $lang)
301 |                 """, name=m["name"], lang=file_data.get("lang"))
302 | 
303 |             # Create CONTAINS relationships for nested functions
304 |             for item in file_data.get('functions', []):
305 |                 if item.get("context_type") == "function_definition":
306 |                     session.run("""
307 |                         MATCH (outer:Function {name: $context, file_path: $file_path})
308 |                         MATCH (inner:Function {name: $name, file_path: $file_path, line_number: $line_number})
309 |                         MERGE (outer)-[:CONTAINS]->(inner)
310 |                     """, context=item["context"], file_path=file_path_str, name=item["name"], line_number=item["line_number"])
311 | 
312 |             # Handle imports and create IMPORTS relationships
313 |             for imp in file_data.get('imports', []):
314 |                 info_logger(f"Processing import: {imp}")
315 |                 lang = file_data.get('lang')
316 |                 if lang == 'javascript':
317 |                     # New, correct logic for JS
318 |                     module_name = imp.get('source')
319 |                     if not module_name: continue
320 | 
321 |                     # Use a map for relationship properties to handle optional alias
322 |                     rel_props = {'imported_name': imp.get('name', '*')}
323 |                     if imp.get('alias'):
324 |                         rel_props['alias'] = imp.get('alias')
325 | 
326 |                     session.run("""
327 |                         MATCH (f:File {path: $file_path})
328 |                         MERGE (m:Module {name: $module_name})
329 |                         MERGE (f)-[r:IMPORTS]->(m)
330 |                         SET r += $props
331 |                     """, file_path=file_path_str, module_name=module_name, props=rel_props)
332 |                 else:
333 |                     # Existing logic for Python (and other languages)
334 |                     set_clauses = ["m.alias = $alias"]
335 |                     if 'full_import_name' in imp:
336 |                         set_clauses.append("m.full_import_name = $full_import_name")
337 |                     set_clause_str = ", ".join(set_clauses)
338 | 
339 |                     session.run(f"""
340 |                         MATCH (f:File {{path: $file_path}})
341 |                         MERGE (m:Module {{name: $name}})
342 |                         SET {set_clause_str}
343 |                         MERGE (f)-[:IMPORTS]->(m)
344 |                     """, file_path=file_path_str, **imp)
345 | 
346 |             # Handle CONTAINS relationship between class to their children like variables
347 |             for func in file_data.get('functions', []):
348 |                 if func.get('class_context'):
349 |                     session.run("""
350 |                         MATCH (c:Class {name: $class_name, file_path: $file_path})
351 |                         MATCH (fn:Function {name: $func_name, file_path: $file_path, line_number: $func_line})
352 |                         MERGE (c)-[:CONTAINS]->(fn)
353 |                     """, 
354 |                     class_name=func['class_context'],
355 |                     file_path=file_path_str,
356 |                     func_name=func['name'],
357 |                     func_line=func['line_number'])
358 | 
359 |             # --- NEW: Class INCLUDES Module (Ruby mixins) ---
360 |             for inc in file_data.get('module_inclusions', []):
361 |                 session.run("""
362 |                     MATCH (c:Class {name: $class_name, file_path: $file_path})
363 |                     MERGE (m:Module {name: $module_name})
364 |                     MERGE (c)-[:INCLUDES]->(m)
365 |                 """,
366 |                 class_name=inc["class"],
367 |                 file_path=file_path_str,
368 |                 module_name=inc["module"])
369 | 
370 |             # Class inheritance is handled in a separate pass after all files are processed.
371 |             # Function calls are also handled in a separate pass after all files are processed.
372 | 
373 |     # Second pass to create relationships that depend on all files being present like call functions and class inheritance
374 |     def _create_function_calls(self, session, file_data: Dict, imports_map: dict):
375 |         """Create CALLS relationships with a unified, prioritized logic flow for all call types."""
376 |         caller_file_path = str(Path(file_data['file_path']).resolve())
377 |         local_function_names = {func['name'] for func in file_data.get('functions', [])}
378 |         local_imports = {imp.get('alias') or imp['name'].split('.')[-1]: imp['name'] 
379 |                         for imp in file_data.get('imports', [])}
380 |         
381 |         for call in file_data.get('function_calls', []):
382 |             called_name = call['name']
383 |             if called_name in __builtins__: continue
384 | 
385 |             resolved_path = None
386 |             
387 |             if call.get('inferred_obj_type'):
388 |                 obj_type = call['inferred_obj_type']
389 |                 possible_paths = imports_map.get(obj_type, [])
390 |                 if len(possible_paths) > 0:
391 |                     resolved_path = possible_paths[0]
392 |             
393 |             else:
394 |                 lookup_name = call['full_name'].split('.')[0] if '.' in call['full_name'] else called_name
395 |                 possible_paths = imports_map.get(lookup_name, [])
396 | 
397 |                 if lookup_name in local_function_names:
398 |                     resolved_path = caller_file_path
399 |                 elif len(possible_paths) == 1:
400 |                     resolved_path = possible_paths[0]
401 |                 elif len(possible_paths) > 1 and lookup_name in local_imports:
402 |                     full_import_name = local_imports[lookup_name]
403 |                     for path in possible_paths:
404 |                         if full_import_name.replace('.', '/') in path:
405 |                             resolved_path = path
406 |                             break
407 |             
408 |             if not resolved_path:
409 |                 if called_name in imports_map and imports_map[called_name]:
410 |                     resolved_path = imports_map[called_name][0]
411 |                 else:
412 |                     resolved_path = caller_file_path
413 | 
414 |             caller_context = call.get('context')
415 |             if caller_context and len(caller_context) == 3 and caller_context[0] is not None:
416 |                 caller_name, _, caller_line_number = caller_context
417 |                 session.run("""
418 |                     MATCH (caller:Function {name: $caller_name, file_path: $caller_file_path, line_number: $caller_line_number})
419 |                     MATCH (called:Function {name: $called_name, file_path: $called_file_path})
420 |                     MERGE (caller)-[:CALLS {line_number: $line_number, args: $args, full_call_name: $full_call_name}]->(called)
421 |                 """,
422 |                 caller_name=caller_name,
423 |                 caller_file_path=caller_file_path,
424 |                 caller_line_number=caller_line_number,
425 |                 called_name=called_name,
426 |                 called_file_path=resolved_path,
427 |                 line_number=call['line_number'],
428 |                 args=call.get('args', []),
429 |                 full_call_name=call.get('full_name', called_name))
430 |             else:
431 |                 session.run("""
432 |                     MATCH (caller:File {path: $caller_file_path})
433 |                     MATCH (called:Function {name: $called_name, file_path: $called_file_path})
434 |                     MERGE (caller)-[:CALLS {line_number: $line_number, args: $args, full_call_name: $full_call_name}]->(called)
435 |                 """,
436 |                 caller_file_path=caller_file_path,
437 |                 called_name=called_name,
438 |                 called_file_path=resolved_path,
439 |                 line_number=call['line_number'],
440 |                 args=call.get('args', []),
441 |                 full_call_name=call.get('full_name', called_name))
442 | 
443 |     def _create_all_function_calls(self, all_file_data: list[Dict], imports_map: dict):
444 |         """Create CALLS relationships for all functions after all files have been processed."""
445 |         with self.driver.session() as session:
446 |             for file_data in all_file_data:
447 |                 self._create_function_calls(session, file_data, imports_map)
448 | 
449 |     def _create_inheritance_links(self, session, file_data: Dict, imports_map: dict):
450 |         """Create INHERITS relationships with a more robust resolution logic."""
451 |         caller_file_path = str(Path(file_data['file_path']).resolve())
452 |         local_class_names = {c['name'] for c in file_data.get('classes', [])}
453 |         # Create a map of local import aliases/names to full import names
454 |         local_imports = {imp.get('alias') or imp['name'].split('.')[-1]: imp['name']
455 |                          for imp in file_data.get('imports', [])}
456 | 
457 |         for class_item in file_data.get('classes', []):
458 |             if not class_item.get('bases'):
459 |                 continue
460 | 
461 |             for base_class_str in class_item['bases']:
462 |                 if base_class_str == 'object':
463 |                     continue
464 | 
465 |                 resolved_path = None
466 |                 target_class_name = base_class_str.split('.')[-1]
467 | 
468 |                 # Handle qualified names like module.Class or alias.Class
469 |                 if '.' in base_class_str:
470 |                     lookup_name = base_class_str.split('.')[0]
471 |                     
472 |                     # Case 1: The prefix is a known import
473 |                     if lookup_name in local_imports:
474 |                         full_import_name = local_imports[lookup_name]
475 |                         possible_paths = imports_map.get(target_class_name, [])
476 |                         # Find the path that corresponds to the imported module
477 |                         for path in possible_paths:
478 |                             if full_import_name.replace('.', '/') in path:
479 |                                 resolved_path = path
480 |                                 break
481 |                 # Handle simple names
482 |                 else:
483 |                     lookup_name = base_class_str
484 |                     # Case 2: The base class is in the same file
485 |                     if lookup_name in local_class_names:
486 |                         resolved_path = caller_file_path
487 |                     # Case 3: The base class was imported directly (e.g., from module import Parent)
488 |                     elif lookup_name in local_imports:
489 |                         full_import_name = local_imports[lookup_name]
490 |                         possible_paths = imports_map.get(target_class_name, [])
491 |                         for path in possible_paths:
492 |                             if full_import_name.replace('.', '/') in path:
493 |                                 resolved_path = path
494 |                                 break
495 |                     # Case 4: Fallback to global map (less reliable)
496 |                     elif lookup_name in imports_map:
497 |                         possible_paths = imports_map[lookup_name]
498 |                         if len(possible_paths) == 1:
499 |                             resolved_path = possible_paths[0]
500 |                 
501 |                 # If a path was found, create the relationship
502 |                 if resolved_path:
503 |                     session.run("""
504 |                         MATCH (child:Class {name: $child_name, file_path: $file_path})
505 |                         MATCH (parent:Class {name: $parent_name, file_path: $resolved_parent_file_path})
506 |                         MERGE (child)-[:INHERITS]->(parent)
507 |                     """,
508 |                     child_name=class_item['name'],
509 |                     file_path=caller_file_path,
510 |                     parent_name=target_class_name,
511 |                     resolved_parent_file_path=resolved_path)
512 | 
513 |     def _create_all_inheritance_links(self, all_file_data: list[Dict], imports_map: dict):
514 |         """Create INHERITS relationships for all classes after all files have been processed."""
515 |         with self.driver.session() as session:
516 |             for file_data in all_file_data:
517 |                 self._create_inheritance_links(session, file_data, imports_map)
518 |                 
519 |     def delete_file_from_graph(self, file_path: str):
520 |         """Deletes a file and all its contained elements and relationships."""
521 |         file_path_str = str(Path(file_path).resolve())
522 |         with self.driver.session() as session:
523 |             parents_res = session.run("""
524 |                 MATCH (f:File {path: $path})<-[:CONTAINS*]-(d:Directory)
525 |                 RETURN d.path as path ORDER BY d.path DESC
526 |             """, path=file_path_str)
527 |             parent_paths = [record["path"] for record in parents_res]
528 | 
529 |             session.run(
530 |                 """
531 |                 MATCH (f:File {path: $path})
532 |                 OPTIONAL MATCH (f)-[:CONTAINS]->(element)
533 |                 DETACH DELETE f, element
534 |                 """,
535 |                 path=file_path_str,
536 |             )
537 |             info_logger(f"Deleted file and its elements from graph: {file_path_str}")
538 | 
539 |             for path in parent_paths:
540 |                 session.run("""
541 |                     MATCH (d:Directory {path: $path})
542 |                     WHERE NOT (d)-[:CONTAINS]->()
543 |                     DETACH DELETE d
544 |                 """, path=path)
545 | 
546 |     def delete_repository_from_graph(self, repo_path: str):
547 |         """Deletes a repository and all its contents from the graph."""
548 |         repo_path_str = str(Path(repo_path).resolve())
549 |         with self.driver.session() as session:
550 |             session.run("""MATCH (r:Repository {path: $path})
551 |                           OPTIONAL MATCH (r)-[:CONTAINS*]->(e)
552 |                           DETACH DELETE r, e""", path=repo_path_str)
553 |             info_logger(f"Deleted repository and its contents from graph: {repo_path_str}")
554 | 
555 |     def update_file_in_graph(self, file_path: Path, repo_path: Path, imports_map: dict):
556 |         """Updates a single file's nodes in the graph."""
557 |         file_path_str = str(file_path.resolve())
558 |         repo_name = repo_path.name
559 |         
560 |         self.delete_file_from_graph(file_path_str)
561 | 
562 |         if file_path.exists():
563 |             file_data = self.parse_file(repo_path, file_path)
564 |             
565 |             if "error" not in file_data:
566 |                 self.add_file_to_graph(file_data, repo_name, imports_map)
567 |                 return file_data
568 |             else:
569 |                 error_logger(f"Skipping graph add for {file_path_str} due to parsing error: {file_data['error']}")
570 |                 return None
571 |         else:
572 |             return {"deleted": True, "path": file_path_str}
573 | 
574 |     def parse_file(self, repo_path: Path, file_path: Path, is_dependency: bool = False) -> Dict:
575 |         """Parses a file with the appropriate language parser and extracts code elements."""
576 |         parser = self.parsers.get(file_path.suffix)
577 |         if not parser:
578 |             warning_logger(f"No parser found for file extension {file_path.suffix}. Skipping {file_path}")
579 |             return {"file_path": str(file_path), "error": f"No parser for {file_path.suffix}"}
580 | 
581 |         debug_log(f"[parse_file] Starting parsing for: {file_path} with {parser.language_name} parser")
582 |         try:
583 |             if parser.language_name == 'python':
584 |                 is_notebook = file_path.suffix == '.ipynb'
585 |                 file_data = parser.parse(file_path, is_dependency, is_notebook=is_notebook)
586 |             else:
587 |                 file_data = parser.parse(file_path, is_dependency)
588 |             file_data['repo_path'] = str(repo_path)
589 |             debug_log(f"[parse_file] Successfully parsed: {file_path}")
590 |             return file_data
591 |             
592 |         except Exception as e:
593 |             error_logger(f"Error parsing {file_path} with {parser.language_name} parser: {e}")
594 |             debug_log(f"[parse_file] Error parsing {file_path}: {e}")
595 |             return {"file_path": str(file_path), "error": str(e)}
596 | 
597 |     def estimate_processing_time(self, path: Path) -> Optional[Tuple[int, float]]:
598 |         """Estimate processing time and file count"""
599 |         try:
600 |             supported_extensions = self.parsers.keys()
601 |             if path.is_file():
602 |                 if path.suffix in supported_extensions:
603 |                     files = [path]
604 |                 else:
605 |                     return 0, 0.0 # Not a supported file type
606 |             else:
607 |                 all_files = path.rglob("*")
608 |                 files = [f for f in all_files if f.is_file() and f.suffix in supported_extensions]
609 |             
610 |             total_files = len(files)
611 |             estimated_time = total_files * 0.05 # tree-sitter is faster
612 |             return total_files, estimated_time
613 |         except Exception as e:
614 |             error_logger(f"Could not estimate processing time for {path}: {e}")
615 |             return None
616 | 
617 |     async def build_graph_from_path_async(
618 |         self, path: Path, is_dependency: bool = False, job_id: str = None
619 |     ):
620 |         """Builds graph from a directory or file path."""
621 |         try:
622 |             if job_id:
623 |                 self.job_manager.update_job(job_id, status=JobStatus.RUNNING)
624 |             
625 |             self.add_repository_to_graph(path, is_dependency)
626 |             repo_name = path.name
627 | 
628 |             cgcignore_path = path / ".cgcignore"
629 |             if cgcignore_path.exists():
630 |                 with open(cgcignore_path) as f:
631 |                     ignore_patterns = f.read().splitlines()
632 |                 spec = pathspec.PathSpec.from_lines('gitwildmatch', ignore_patterns)
633 |             else:
634 |                 spec = None
635 | 
636 |             supported_extensions = self.parsers.keys()
637 |             all_files = path.rglob("*") if path.is_dir() else [path]
638 |             files = [f for f in all_files if f.is_file() and f.suffix in supported_extensions]
639 |             if spec:
640 |                 files = [f for f in files if not spec.match_file(str(f.relative_to(path)))]
641 |             if job_id:
642 |                 self.job_manager.update_job(job_id, total_files=len(files))
643 |             
644 |             debug_log("Starting pre-scan to build imports map...")
645 |             imports_map = self._pre_scan_for_imports(files)
646 |             debug_log(f"Pre-scan complete. Found {len(imports_map)} definitions.")
647 | 
648 |             all_file_data = []
649 | 
650 |             processed_count = 0
651 |             for file in files:
652 |                 if file.is_file():
653 |                     if job_id:
654 |                         self.job_manager.update_job(job_id, current_file=str(file))
655 |                     repo_path = path.resolve() if path.is_dir() else file.parent.resolve()
656 |                     file_data = self.parse_file(repo_path, file, is_dependency)
657 |                     if "error" not in file_data:
658 |                         self.add_file_to_graph(file_data, repo_name, imports_map)
659 |                         all_file_data.append(file_data)
660 |                     processed_count += 1
661 |                     if job_id:
662 |                         self.job_manager.update_job(job_id, processed_files=processed_count)
663 |                     await asyncio.sleep(0.01)
664 | 
665 |             self._create_all_inheritance_links(all_file_data, imports_map)
666 |             self._create_all_function_calls(all_file_data, imports_map)
667 |             
668 |             if job_id:
669 |                 self.job_manager.update_job(job_id, status=JobStatus.COMPLETED, end_time=datetime.now())
670 |         except Exception as e:
671 |             error_message=str(e)
672 |             error_logger(f"Failed to build graph for path {path}: {error_message}")
673 |             if job_id:
674 |                 '''checking if the repo got deleted '''
675 |                 if "no such file found" in error_message or "deleted" in error_message or "not found" in error_message:
676 |                     status=JobStatus.CANCELLED
677 |                     
678 |                 else:
679 |                     status=JobStatus.FAILED
680 | 
681 |                 self.job_manager.update_job(
682 |                     job_id, status=status, end_time=datetime.now(), errors=[str(e)]
683 |                 )
684 | 
```

--------------------------------------------------------------------------------
/docs/site/assets/javascripts/workers/search.973d3a69.min.js:
--------------------------------------------------------------------------------

```javascript
 1 | "use strict";(()=>{var xe=Object.create;var U=Object.defineProperty,ve=Object.defineProperties,Se=Object.getOwnPropertyDescriptor,Te=Object.getOwnPropertyDescriptors,Qe=Object.getOwnPropertyNames,J=Object.getOwnPropertySymbols,Ee=Object.getPrototypeOf,Z=Object.prototype.hasOwnProperty,be=Object.prototype.propertyIsEnumerable;var K=Math.pow,X=(t,e,r)=>e in t?U(t,e,{enumerable:!0,configurable:!0,writable:!0,value:r}):t[e]=r,A=(t,e)=>{for(var r in e||(e={}))Z.call(e,r)&&X(t,r,e[r]);if(J)for(var r of J(e))be.call(e,r)&&X(t,r,e[r]);return t},G=(t,e)=>ve(t,Te(e));var Le=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var we=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Qe(e))!Z.call(t,i)&&i!==r&&U(t,i,{get:()=>e[i],enumerable:!(n=Se(e,i))||n.enumerable});return t};var Pe=(t,e,r)=>(r=t!=null?xe(Ee(t)):{},we(e||!t||!t.__esModule?U(r,"default",{value:t,enumerable:!0}):r,t));var B=(t,e,r)=>new Promise((n,i)=>{var s=u=>{try{a(r.next(u))}catch(c){i(c)}},o=u=>{try{a(r.throw(u))}catch(c){i(c)}},a=u=>u.done?n(u.value):Promise.resolve(u.value).then(s,o);a((r=r.apply(t,e)).next())});var re=Le((ee,te)=>{/**
 2 |  * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9
 3 |  * Copyright (C) 2020 Oliver Nightingale
 4 |  * @license MIT
 5 |  */(function(){var t=function(e){var r=new t.Builder;return r.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),r.searchPipeline.add(t.stemmer),e.call(r,r),r.build()};t.version="2.3.9";/*!
 6 |  * lunr.utils
 7 |  * Copyright (C) 2020 Oliver Nightingale
 8 |  */t.utils={},t.utils.warn=(function(e){return function(r){e.console&&console.warn&&console.warn(r)}})(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var r=Object.create(null),n=Object.keys(e),i=0;i<n.length;i++){var s=n[i],o=e[s];if(Array.isArray(o)){r[s]=o.slice();continue}if(typeof o=="string"||typeof o=="number"||typeof o=="boolean"){r[s]=o;continue}throw new TypeError("clone is not deep and does not support nested objects")}return r},t.FieldRef=function(e,r,n){this.docRef=e,this.fieldName=r,this._stringValue=n},t.FieldRef.joiner="/",t.FieldRef.fromString=function(e){var r=e.indexOf(t.FieldRef.joiner);if(r===-1)throw"malformed field ref string";var n=e.slice(0,r),i=e.slice(r+1);return new t.FieldRef(i,n,e)},t.FieldRef.prototype.toString=function(){return this._stringValue==null&&(this._stringValue=this.fieldName+t.FieldRef.joiner+this.docRef),this._stringValue};/*!
 9 |  * lunr.Set
10 |  * Copyright (C) 2020 Oliver Nightingale
11 |  */t.Set=function(e){if(this.elements=Object.create(null),e){this.length=e.length;for(var r=0;r<this.length;r++)this.elements[e[r]]=!0}else this.length=0},t.Set.complete={intersect:function(e){return e},union:function(){return this},contains:function(){return!0}},t.Set.empty={intersect:function(){return this},union:function(e){return e},contains:function(){return!1}},t.Set.prototype.contains=function(e){return!!this.elements[e]},t.Set.prototype.intersect=function(e){var r,n,i,s=[];if(e===t.Set.complete)return this;if(e===t.Set.empty)return e;this.length<e.length?(r=this,n=e):(r=e,n=this),i=Object.keys(r.elements);for(var o=0;o<i.length;o++){var a=i[o];a in n.elements&&s.push(a)}return new t.Set(s)},t.Set.prototype.union=function(e){return e===t.Set.complete?t.Set.complete:e===t.Set.empty?this:new t.Set(Object.keys(this.elements).concat(Object.keys(e.elements)))},t.idf=function(e,r){var n=0;for(var i in e)i!="_index"&&(n+=Object.keys(e[i]).length);var s=(r-n+.5)/(n+.5);return Math.log(1+Math.abs(s))},t.Token=function(e,r){this.str=e||"",this.metadata=r||{}},t.Token.prototype.toString=function(){return this.str},t.Token.prototype.update=function(e){return this.str=e(this.str,this.metadata),this},t.Token.prototype.clone=function(e){return e=e||function(r){return r},new t.Token(e(this.str,this.metadata),this.metadata)};/*!
12 |  * lunr.tokenizer
13 |  * Copyright (C) 2020 Oliver Nightingale
14 |  */t.tokenizer=function(e,r){if(e==null||e==null)return[];if(Array.isArray(e))return e.map(function(g){return new t.Token(t.utils.asString(g).toLowerCase(),t.utils.clone(r))});for(var n=e.toString().toLowerCase(),i=n.length,s=[],o=0,a=0;o<=i;o++){var u=n.charAt(o),c=o-a;if(u.match(t.tokenizer.separator)||o==i){if(c>0){var f=t.utils.clone(r)||{};f.position=[a,c],f.index=s.length,s.push(new t.Token(n.slice(a,o),f))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;/*!
15 |  * lunr.Pipeline
16 |  * Copyright (C) 2020 Oliver Nightingale
17 |  */t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,r){r in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+r),e.label=r,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var r=e.label&&e.label in this.registeredFunctions;r||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index.
18 | `,e)},t.Pipeline.load=function(e){var r=new t.Pipeline;return e.forEach(function(n){var i=t.Pipeline.registeredFunctions[n];if(i)r.add(i);else throw new Error("Cannot load unregistered function: "+n)}),r},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(r){t.Pipeline.warnIfFunctionNotRegistered(r),this._stack.push(r)},this)},t.Pipeline.prototype.after=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");n=n+1,this._stack.splice(n,0,r)},t.Pipeline.prototype.before=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");this._stack.splice(n,0,r)},t.Pipeline.prototype.remove=function(e){var r=this._stack.indexOf(e);r!=-1&&this._stack.splice(r,1)},t.Pipeline.prototype.run=function(e){for(var r=this._stack.length,n=0;n<r;n++){for(var i=this._stack[n],s=[],o=0;o<e.length;o++){var a=i(e[o],o,e);if(!(a==null||a===""))if(Array.isArray(a))for(var u=0;u<a.length;u++)s.push(a[u]);else s.push(a)}e=s}return e},t.Pipeline.prototype.runString=function(e,r){var n=new t.Token(e,r);return this.run([n]).map(function(i){return i.toString()})},t.Pipeline.prototype.reset=function(){this._stack=[]},t.Pipeline.prototype.toJSON=function(){return this._stack.map(function(e){return t.Pipeline.warnIfFunctionNotRegistered(e),e.label})};/*!
19 |  * lunr.Vector
20 |  * Copyright (C) 2020 Oliver Nightingale
21 |  */t.Vector=function(e){this._magnitude=0,this.elements=e||[]},t.Vector.prototype.positionForIndex=function(e){if(this.elements.length==0)return 0;for(var r=0,n=this.elements.length/2,i=n-r,s=Math.floor(i/2),o=this.elements[s*2];i>1&&(o<e&&(r=s),o>e&&(n=s),o!=e);)i=n-r,s=r+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(o<e)return(s+1)*2},t.Vector.prototype.insert=function(e,r){this.upsert(e,r,function(){throw"duplicate index"})},t.Vector.prototype.upsert=function(e,r,n){this._magnitude=0;var i=this.positionForIndex(e);this.elements[i]==e?this.elements[i+1]=n(this.elements[i+1],r):this.elements.splice(i,0,e,r)},t.Vector.prototype.magnitude=function(){if(this._magnitude)return this._magnitude;for(var e=0,r=this.elements.length,n=1;n<r;n+=2){var i=this.elements[n];e+=i*i}return this._magnitude=Math.sqrt(e)},t.Vector.prototype.dot=function(e){for(var r=0,n=this.elements,i=e.elements,s=n.length,o=i.length,a=0,u=0,c=0,f=0;c<s&&f<o;)a=n[c],u=i[f],a<u?c+=2:a>u?f+=2:a==u&&(r+=n[c+1]*i[f+1],c+=2,f+=2);return r},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),r=1,n=0;r<this.elements.length;r+=2,n++)e[n]=this.elements[r];return e},t.Vector.prototype.toJSON=function(){return this.elements};/*!
22 |  * lunr.stemmer
23 |  * Copyright (C) 2020 Oliver Nightingale
24 |  * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt
25 |  */t.stemmer=(function(){var e={ational:"ate",tional:"tion",enci:"ence",anci:"ance",izer:"ize",bli:"ble",alli:"al",entli:"ent",eli:"e",ousli:"ous",ization:"ize",ation:"ate",ator:"ate",alism:"al",iveness:"ive",fulness:"ful",ousness:"ous",aliti:"al",iviti:"ive",biliti:"ble",logi:"log"},r={icate:"ic",ative:"",alize:"al",iciti:"ic",ical:"ic",ful:"",ness:""},n="[^aeiou]",i="[aeiouy]",s=n+"[^aeiouy]*",o=i+"[aeiou]*",a="^("+s+")?"+o+s,u="^("+s+")?"+o+s+"("+o+")?$",c="^("+s+")?"+o+s+o+s,f="^("+s+")?"+i,g=new RegExp(a),l=new RegExp(c),m=new RegExp(u),x=new RegExp(f),v=/^(.+?)(ss|i)es$/,d=/^(.+?)([^s])s$/,y=/^(.+?)eed$/,b=/^(.+?)(ed|ing)$/,E=/.$/,w=/(at|bl|iz)$/,R=new RegExp("([^aeiouylsz])\\1$"),j=new RegExp("^"+s+i+"[^aeiouwxy]$"),_=/^(.+?[^aeiou])y$/,D=/^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/,N=/^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/,C=/^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/,V=/^(.+?)(s|t)(ion)$/,P=/^(.+?)e$/,z=/ll$/,$=new RegExp("^"+s+i+"[^aeiouwxy]$"),M=function(h){var S,k,L,p,T,O,F;if(h.length<3)return h;if(L=h.substr(0,1),L=="y"&&(h=L.toUpperCase()+h.substr(1)),p=v,T=d,p.test(h)?h=h.replace(p,"$1$2"):T.test(h)&&(h=h.replace(T,"$1$2")),p=y,T=b,p.test(h)){var Q=p.exec(h);p=g,p.test(Q[1])&&(p=E,h=h.replace(p,""))}else if(T.test(h)){var Q=T.exec(h);S=Q[1],T=x,T.test(S)&&(h=S,T=w,O=R,F=j,T.test(h)?h=h+"e":O.test(h)?(p=E,h=h.replace(p,"")):F.test(h)&&(h=h+"e"))}if(p=_,p.test(h)){var Q=p.exec(h);S=Q[1],h=S+"i"}if(p=D,p.test(h)){var Q=p.exec(h);S=Q[1],k=Q[2],p=g,p.test(S)&&(h=S+e[k])}if(p=N,p.test(h)){var Q=p.exec(h);S=Q[1],k=Q[2],p=g,p.test(S)&&(h=S+r[k])}if(p=C,T=V,p.test(h)){var Q=p.exec(h);S=Q[1],p=l,p.test(S)&&(h=S)}else if(T.test(h)){var Q=T.exec(h);S=Q[1]+Q[2],T=l,T.test(S)&&(h=S)}if(p=P,p.test(h)){var Q=p.exec(h);S=Q[1],p=l,T=m,O=$,(p.test(S)||T.test(S)&&!O.test(S))&&(h=S)}return p=z,T=l,p.test(h)&&T.test(h)&&(p=E,h=h.replace(p,"")),L=="y"&&(h=L.toLowerCase()+h.substr(1)),h};return function(I){return I.update(M)}})(),t.Pipeline.registerFunction(t.stemmer,"stemmer");/*!
26 |  * lunr.stopWordFilter
27 |  * Copyright (C) 2020 Oliver Nightingale
28 |  */t.generateStopWordFilter=function(e){var r=e.reduce(function(n,i){return n[i]=i,n},{});return function(n){if(n&&r[n.toString()]!==n.toString())return n}},t.stopWordFilter=t.generateStopWordFilter(["a","able","about","across","after","all","almost","also","am","among","an","and","any","are","as","at","be","because","been","but","by","can","cannot","could","dear","did","do","does","either","else","ever","every","for","from","get","got","had","has","have","he","her","hers","him","his","how","however","i","if","in","into","is","it","its","just","least","let","like","likely","may","me","might","most","must","my","neither","no","nor","not","of","off","often","on","only","or","other","our","own","rather","said","say","says","she","should","since","so","some","than","that","the","their","them","then","there","these","they","this","tis","to","too","twas","us","wants","was","we","were","what","when","where","which","while","who","whom","why","will","with","would","yet","you","your"]),t.Pipeline.registerFunction(t.stopWordFilter,"stopWordFilter");/*!
29 |  * lunr.trimmer
30 |  * Copyright (C) 2020 Oliver Nightingale
31 |  */t.trimmer=function(e){return e.update(function(r){return r.replace(/^\W+/,"").replace(/\W+$/,"")})},t.Pipeline.registerFunction(t.trimmer,"trimmer");/*!
32 |  * lunr.TokenSet
33 |  * Copyright (C) 2020 Oliver Nightingale
34 |  */t.TokenSet=function(){this.final=!1,this.edges={},this.id=t.TokenSet._nextId,t.TokenSet._nextId+=1},t.TokenSet._nextId=1,t.TokenSet.fromArray=function(e){for(var r=new t.TokenSet.Builder,n=0,i=e.length;n<i;n++)r.insert(e[n]);return r.finish(),r.root},t.TokenSet.fromClause=function(e){return"editDistance"in e?t.TokenSet.fromFuzzyString(e.term,e.editDistance):t.TokenSet.fromString(e.term)},t.TokenSet.fromFuzzyString=function(e,r){for(var n=new t.TokenSet,i=[{node:n,editsRemaining:r,str:e}];i.length;){var s=i.pop();if(s.str.length>0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new t.TokenSet;s.node.edges["*"]=u}if(s.str.length==0&&(u.final=!0),i.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var c=s.node.edges["*"];else{var c=new t.TokenSet;s.node.edges["*"]=c}s.str.length==1&&(c.final=!0),i.push({node:c,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var f=s.str.charAt(0),g=s.str.charAt(1),l;g in s.node.edges?l=s.node.edges[g]:(l=new t.TokenSet,s.node.edges[g]=l),s.str.length==1&&(l.final=!0),i.push({node:l,editsRemaining:s.editsRemaining-1,str:f+s.str.slice(2)})}}}return n},t.TokenSet.fromString=function(e){for(var r=new t.TokenSet,n=r,i=0,s=e.length;i<s;i++){var o=e[i],a=i==s-1;if(o=="*")r.edges[o]=r,r.final=a;else{var u=new t.TokenSet;u.final=a,r.edges[o]=u,r=u}}return n},t.TokenSet.prototype.toArray=function(){for(var e=[],r=[{prefix:"",node:this}];r.length;){var n=r.pop(),i=Object.keys(n.node.edges),s=i.length;n.node.final&&(n.prefix.charAt(0),e.push(n.prefix));for(var o=0;o<s;o++){var a=i[o];r.push({prefix:n.prefix.concat(a),node:n.node.edges[a]})}}return e},t.TokenSet.prototype.toString=function(){if(this._str)return this._str;for(var e=this.final?"1":"0",r=Object.keys(this.edges).sort(),n=r.length,i=0;i<n;i++){var s=r[i],o=this.edges[s];e=e+s+o.id}return e},t.TokenSet.prototype.intersect=function(e){for(var r=new t.TokenSet,n=void 0,i=[{qNode:e,output:r,node:this}];i.length;){n=i.pop();for(var s=Object.keys(n.qNode.edges),o=s.length,a=Object.keys(n.node.edges),u=a.length,c=0;c<o;c++)for(var f=s[c],g=0;g<u;g++){var l=a[g];if(l==f||f=="*"){var m=n.node.edges[l],x=n.qNode.edges[f],v=m.final&&x.final,d=void 0;l in n.output.edges?(d=n.output.edges[l],d.final=d.final||v):(d=new t.TokenSet,d.final=v,n.output.edges[l]=d),i.push({qNode:x,output:d,node:m})}}}return r},t.TokenSet.Builder=function(){this.previousWord="",this.root=new t.TokenSet,this.uncheckedNodes=[],this.minimizedNodes={}},t.TokenSet.Builder.prototype.insert=function(e){var r,n=0;if(e<this.previousWord)throw new Error("Out of order word insertion");for(var i=0;i<e.length&&i<this.previousWord.length&&e[i]==this.previousWord[i];i++)n++;this.minimize(n),this.uncheckedNodes.length==0?r=this.root:r=this.uncheckedNodes[this.uncheckedNodes.length-1].child;for(var i=n;i<e.length;i++){var s=new t.TokenSet,o=e[i];r.edges[o]=s,this.uncheckedNodes.push({parent:r,char:o,child:s}),r=s}r.final=!0,this.previousWord=e},t.TokenSet.Builder.prototype.finish=function(){this.minimize(0)},t.TokenSet.Builder.prototype.minimize=function(e){for(var r=this.uncheckedNodes.length-1;r>=e;r--){var n=this.uncheckedNodes[r],i=n.child.toString();i in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[i]:(n.child._str=i,this.minimizedNodes[i]=n.child),this.uncheckedNodes.pop()}};/*!
35 |  * lunr.Index
36 |  * Copyright (C) 2020 Oliver Nightingale
37 |  */t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(r){var n=new t.QueryParser(e,r);n.parse()})},t.Index.prototype.query=function(e){for(var r=new t.Query(this.fields),n=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u<this.fields.length;u++)i[this.fields[u]]=new t.Vector;e.call(r,r);for(var u=0;u<r.clauses.length;u++){var c=r.clauses[u],f=null,g=t.Set.empty;c.usePipeline?f=this.pipeline.runString(c.term,{fields:c.fields}):f=[c.term];for(var l=0;l<f.length;l++){var m=f[l];c.term=m;var x=t.TokenSet.fromClause(c),v=this.tokenSet.intersect(x).toArray();if(v.length===0&&c.presence===t.Query.presence.REQUIRED){for(var d=0;d<c.fields.length;d++){var y=c.fields[d];o[y]=t.Set.empty}break}for(var b=0;b<v.length;b++)for(var E=v[b],w=this.invertedIndex[E],R=w._index,d=0;d<c.fields.length;d++){var y=c.fields[d],j=w[y],_=Object.keys(j),D=E+"/"+y,N=new t.Set(_);if(c.presence==t.Query.presence.REQUIRED&&(g=g.union(N),o[y]===void 0&&(o[y]=t.Set.complete)),c.presence==t.Query.presence.PROHIBITED){a[y]===void 0&&(a[y]=t.Set.empty),a[y]=a[y].union(N);continue}if(i[y].upsert(R,c.boost,function(ye,me){return ye+me}),!s[D]){for(var C=0;C<_.length;C++){var V=_[C],P=new t.FieldRef(V,y),z=j[V],$;($=n[P])===void 0?n[P]=new t.MatchData(E,y,z):$.add(E,y,z)}s[D]=!0}}}if(c.presence===t.Query.presence.REQUIRED)for(var d=0;d<c.fields.length;d++){var y=c.fields[d];o[y]=o[y].intersect(g)}}for(var M=t.Set.complete,I=t.Set.empty,u=0;u<this.fields.length;u++){var y=this.fields[u];o[y]&&(M=M.intersect(o[y])),a[y]&&(I=I.union(a[y]))}var h=Object.keys(n),S=[],k=Object.create(null);if(r.isNegated()){h=Object.keys(this.fieldVectors);for(var u=0;u<h.length;u++){var P=h[u],L=t.FieldRef.fromString(P);n[P]=new t.MatchData}}for(var u=0;u<h.length;u++){var L=t.FieldRef.fromString(h[u]),p=L.docRef;if(M.contains(p)&&!I.contains(p)){var T=this.fieldVectors[L],O=i[L.fieldName].similarity(T),F;if((F=k[p])!==void 0)F.score+=O,F.matchData.combine(n[L]);else{var Q={ref:p,score:O,matchData:n[L]};k[p]=Q,S.push(Q)}}}return S.sort(function(pe,ge){return ge.score-pe.score})},t.Index.prototype.toJSON=function(){var e=Object.keys(this.invertedIndex).sort().map(function(n){return[n,this.invertedIndex[n]]},this),r=Object.keys(this.fieldVectors).map(function(n){return[n,this.fieldVectors[n].toJSON()]},this);return{version:t.version,fields:this.fields,fieldVectors:r,invertedIndex:e,pipeline:this.pipeline.toJSON()}},t.Index.load=function(e){var r={},n={},i=e.fieldVectors,s=Object.create(null),o=e.invertedIndex,a=new t.TokenSet.Builder,u=t.Pipeline.load(e.pipeline);e.version!=t.version&&t.utils.warn("Version mismatch when loading serialised index. Current version of lunr '"+t.version+"' does not match serialized index '"+e.version+"'");for(var c=0;c<i.length;c++){var f=i[c],g=f[0],l=f[1];n[g]=new t.Vector(l)}for(var c=0;c<o.length;c++){var f=o[c],m=f[0],x=f[1];a.insert(m),s[m]=x}return a.finish(),r.fields=e.fields,r.fieldVectors=n,r.invertedIndex=s,r.tokenSet=a.root,r.pipeline=u,new t.Index(r)};/*!
38 |  * lunr.Builder
39 |  * Copyright (C) 2020 Oliver Nightingale
40 |  */t.Builder=function(){this._ref="id",this._fields=Object.create(null),this._documents=Object.create(null),this.invertedIndex=Object.create(null),this.fieldTermFrequencies={},this.fieldLengths={},this.tokenizer=t.tokenizer,this.pipeline=new t.Pipeline,this.searchPipeline=new t.Pipeline,this.documentCount=0,this._b=.75,this._k1=1.2,this.termIndex=0,this.metadataWhitelist=[]},t.Builder.prototype.ref=function(e){this._ref=e},t.Builder.prototype.field=function(e,r){if(/\//.test(e))throw new RangeError("Field '"+e+"' contains illegal character '/'");this._fields[e]=r||{}},t.Builder.prototype.b=function(e){e<0?this._b=0:e>1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,r){var n=e[this._ref],i=Object.keys(this._fields);this._documents[n]=r||{},this.documentCount+=1;for(var s=0;s<i.length;s++){var o=i[s],a=this._fields[o].extractor,u=a?a(e):e[o],c=this.tokenizer(u,{fields:[o]}),f=this.pipeline.run(c),g=new t.FieldRef(n,o),l=Object.create(null);this.fieldTermFrequencies[g]=l,this.fieldLengths[g]=0,this.fieldLengths[g]+=f.length;for(var m=0;m<f.length;m++){var x=f[m];if(l[x]==null&&(l[x]=0),l[x]+=1,this.invertedIndex[x]==null){var v=Object.create(null);v._index=this.termIndex,this.termIndex+=1;for(var d=0;d<i.length;d++)v[i[d]]=Object.create(null);this.invertedIndex[x]=v}this.invertedIndex[x][o][n]==null&&(this.invertedIndex[x][o][n]=Object.create(null));for(var y=0;y<this.metadataWhitelist.length;y++){var b=this.metadataWhitelist[y],E=x.metadata[b];this.invertedIndex[x][o][n][b]==null&&(this.invertedIndex[x][o][n][b]=[]),this.invertedIndex[x][o][n][b].push(E)}}}},t.Builder.prototype.calculateAverageFieldLengths=function(){for(var e=Object.keys(this.fieldLengths),r=e.length,n={},i={},s=0;s<r;s++){var o=t.FieldRef.fromString(e[s]),a=o.fieldName;i[a]||(i[a]=0),i[a]+=1,n[a]||(n[a]=0),n[a]+=this.fieldLengths[o]}for(var u=Object.keys(this._fields),s=0;s<u.length;s++){var c=u[s];n[c]=n[c]/i[c]}this.averageFieldLength=n},t.Builder.prototype.createFieldVectors=function(){for(var e={},r=Object.keys(this.fieldTermFrequencies),n=r.length,i=Object.create(null),s=0;s<n;s++){for(var o=t.FieldRef.fromString(r[s]),a=o.fieldName,u=this.fieldLengths[o],c=new t.Vector,f=this.fieldTermFrequencies[o],g=Object.keys(f),l=g.length,m=this._fields[a].boost||1,x=this._documents[o.docRef].boost||1,v=0;v<l;v++){var d=g[v],y=f[d],b=this.invertedIndex[d]._index,E,w,R;i[d]===void 0?(E=t.idf(this.invertedIndex[d],this.documentCount),i[d]=E):E=i[d],w=E*((this._k1+1)*y)/(this._k1*(1-this._b+this._b*(u/this.averageFieldLength[a]))+y),w*=m,w*=x,R=Math.round(w*1e3)/1e3,c.insert(b,R)}e[o]=c}this.fieldVectors=e},t.Builder.prototype.createTokenSet=function(){this.tokenSet=t.TokenSet.fromArray(Object.keys(this.invertedIndex).sort())},t.Builder.prototype.build=function(){return this.calculateAverageFieldLengths(),this.createFieldVectors(),this.createTokenSet(),new t.Index({invertedIndex:this.invertedIndex,fieldVectors:this.fieldVectors,tokenSet:this.tokenSet,fields:Object.keys(this._fields),pipeline:this.searchPipeline})},t.Builder.prototype.use=function(e){var r=Array.prototype.slice.call(arguments,1);r.unshift(this),e.apply(this,r)},t.MatchData=function(e,r,n){for(var i=Object.create(null),s=Object.keys(n||{}),o=0;o<s.length;o++){var a=s[o];i[a]=n[a].slice()}this.metadata=Object.create(null),e!==void 0&&(this.metadata[e]=Object.create(null),this.metadata[e][r]=i)},t.MatchData.prototype.combine=function(e){for(var r=Object.keys(e.metadata),n=0;n<r.length;n++){var i=r[n],s=Object.keys(e.metadata[i]);this.metadata[i]==null&&(this.metadata[i]=Object.create(null));for(var o=0;o<s.length;o++){var a=s[o],u=Object.keys(e.metadata[i][a]);this.metadata[i][a]==null&&(this.metadata[i][a]=Object.create(null));for(var c=0;c<u.length;c++){var f=u[c];this.metadata[i][a][f]==null?this.metadata[i][a][f]=e.metadata[i][a][f]:this.metadata[i][a][f]=this.metadata[i][a][f].concat(e.metadata[i][a][f])}}}},t.MatchData.prototype.add=function(e,r,n){if(!(e in this.metadata)){this.metadata[e]=Object.create(null),this.metadata[e][r]=n;return}if(!(r in this.metadata[e])){this.metadata[e][r]=n;return}for(var i=Object.keys(n),s=0;s<i.length;s++){var o=i[s];o in this.metadata[e][r]?this.metadata[e][r][o]=this.metadata[e][r][o].concat(n[o]):this.metadata[e][r][o]=n[o]}},t.Query=function(e){this.clauses=[],this.allFields=e},t.Query.wildcard=new String("*"),t.Query.wildcard.NONE=0,t.Query.wildcard.LEADING=1,t.Query.wildcard.TRAILING=2,t.Query.presence={OPTIONAL:1,REQUIRED:2,PROHIBITED:3},t.Query.prototype.clause=function(e){return"fields"in e||(e.fields=this.allFields),"boost"in e||(e.boost=1),"usePipeline"in e||(e.usePipeline=!0),"wildcard"in e||(e.wildcard=t.Query.wildcard.NONE),e.wildcard&t.Query.wildcard.LEADING&&e.term.charAt(0)!=t.Query.wildcard&&(e.term="*"+e.term),e.wildcard&t.Query.wildcard.TRAILING&&e.term.slice(-1)!=t.Query.wildcard&&(e.term=""+e.term+"*"),"presence"in e||(e.presence=t.Query.presence.OPTIONAL),this.clauses.push(e),this},t.Query.prototype.isNegated=function(){for(var e=0;e<this.clauses.length;e++)if(this.clauses[e].presence!=t.Query.presence.PROHIBITED)return!1;return!0},t.Query.prototype.term=function(e,r){if(Array.isArray(e))return e.forEach(function(i){this.term(i,t.utils.clone(r))},this),this;var n=r||{};return n.term=e.toString(),this.clause(n),this},t.QueryParseError=function(e,r,n){this.name="QueryParseError",this.message=e,this.start=r,this.end=n},t.QueryParseError.prototype=new Error,t.QueryLexer=function(e){this.lexemes=[],this.str=e,this.length=e.length,this.pos=0,this.start=0,this.escapeCharPositions=[]},t.QueryLexer.prototype.run=function(){for(var e=t.QueryLexer.lexText;e;)e=e(this)},t.QueryLexer.prototype.sliceString=function(){for(var e=[],r=this.start,n=this.pos,i=0;i<this.escapeCharPositions.length;i++)n=this.escapeCharPositions[i],e.push(this.str.slice(r,n)),r=n+1;return e.push(this.str.slice(r,this.pos)),this.escapeCharPositions.length=0,e.join("")},t.QueryLexer.prototype.emit=function(e){this.lexemes.push({type:e,str:this.sliceString(),start:this.start,end:this.pos}),this.start=this.pos},t.QueryLexer.prototype.escapeCharacter=function(){this.escapeCharPositions.push(this.pos-1),this.pos+=1},t.QueryLexer.prototype.next=function(){if(this.pos>=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,r;do e=this.next(),r=e.charCodeAt(0);while(r>47&&r<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos<this.length},t.QueryLexer.EOS="EOS",t.QueryLexer.FIELD="FIELD",t.QueryLexer.TERM="TERM",t.QueryLexer.EDIT_DISTANCE="EDIT_DISTANCE",t.QueryLexer.BOOST="BOOST",t.QueryLexer.PRESENCE="PRESENCE",t.QueryLexer.lexField=function(e){return e.backup(),e.emit(t.QueryLexer.FIELD),e.ignore(),t.QueryLexer.lexText},t.QueryLexer.lexTerm=function(e){if(e.width()>1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var r=e.next();if(r==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(r.charCodeAt(0)==92){e.escapeCharacter();continue}if(r==":")return t.QueryLexer.lexField;if(r=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(r=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(r=="+"&&e.width()===1||r=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(r.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,r){this.lexer=new t.QueryLexer(e),this.query=r,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var r=e.peekLexeme();if(r!=null)switch(r.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(n+=" with value '"+r.str+"'"),new t.QueryParseError(n,r.start,r.end)}},t.QueryParser.parsePresence=function(e){var r=e.consumeLexeme();if(r!=null){switch(r.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+r.str+"'";throw new t.QueryParseError(n,r.start,r.end)}var i=e.peekLexeme();if(i==null){var n="expecting term or field, found nothing";throw new t.QueryParseError(n,r.start,r.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(n,i.start,i.end)}}},t.QueryParser.parseField=function(e){var r=e.consumeLexeme();if(r!=null){if(e.query.allFields.indexOf(r.str)==-1){var n=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+r.str+"', possible fields: "+n;throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.fields=[r.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,r.start,r.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var r=e.consumeLexeme();if(r!=null){e.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(n==null){e.nextClause();return}switch(n.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+n.type+"'";throw new t.QueryParseError(i,n.start,n.end)}}},t.QueryParser.parseEditDistance=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="edit distance must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.editDistance=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="boost must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.boost=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},(function(e,r){typeof define=="function"&&define.amd?define(r):typeof ee=="object"?te.exports=r():e.lunr=r()})(this,function(){return t})})()});var Y=Pe(re());function ne(t,e=document){let r=ke(t,e);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${t}" to be present`);return r}function ke(t,e=document){return e.querySelector(t)||void 0}Object.entries||(Object.entries=function(t){let e=[];for(let r of Object.keys(t))e.push([r,t[r]]);return e});Object.values||(Object.values=function(t){let e=[];for(let r of Object.keys(t))e.push(t[r]);return e});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(t,e){typeof t=="object"?(this.scrollLeft=t.left,this.scrollTop=t.top):(this.scrollLeft=t,this.scrollTop=e)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...t){let e=this.parentNode;if(e){t.length===0&&e.removeChild(this);for(let r=t.length-1;r>=0;r--){let n=t[r];typeof n=="string"?n=document.createTextNode(n):n.parentNode&&n.parentNode.removeChild(n),r?e.insertBefore(this.previousSibling,n):e.replaceChild(n,this)}}}));function ie(t){let e=new Map;for(let r of t){let[n]=r.location.split("#"),i=e.get(n);typeof i=="undefined"?e.set(n,r):(e.set(r.location,r),r.parent=i)}return e}function W(t,e,r){var s;e=new RegExp(e,"g");let n,i=0;do{n=e.exec(t);let o=(s=n==null?void 0:n.index)!=null?s:t.length;if(i<o&&r(i,o),n){let[a]=n;i=n.index+a.length,a.length===0&&(e.lastIndex=n.index+1)}}while(n)}function se(t,e){let r=0,n=0,i=0;for(let s=0;i<t.length;i++)t.charAt(i)==="<"&&i>n?e(r,1,n,n=i):t.charAt(i)===">"&&(t.charAt(n+1)==="/"?--s===0&&e(r++,2,n,i+1):t.charAt(i-1)!=="/"&&s++===0&&e(r,0,n,i+1),n=i+1);i>n&&e(r,1,n,i)}function oe(t,e,r,n=!1){return q([t],e,r,n).pop()}function q(t,e,r,n=!1){let i=[0];for(let s=1;s<e.length;s++){let o=e[s-1],a=e[s],u=o[o.length-1]>>>2&1023,c=a[0]>>>12;i.push(+(u>c)+i[i.length-1])}return t.map((s,o)=>{let a=0,u=new Map;for(let f of r.sort((g,l)=>g-l)){let g=f&1048575,l=f>>>20;if(i[l]!==o)continue;let m=u.get(l);typeof m=="undefined"&&u.set(l,m=[]),m.push(g)}if(u.size===0)return s;let c=[];for(let[f,g]of u){let l=e[f],m=l[0]>>>12,x=l[l.length-1]>>>12,v=l[l.length-1]>>>2&1023;n&&m>a&&c.push(s.slice(a,m));let d=s.slice(m,x+v);for(let y of g.sort((b,E)=>E-b)){let b=(l[y]>>>12)-m,E=(l[y]>>>2&1023)+b;d=[d.slice(0,b),"<mark>",d.slice(b,E),"</mark>",d.slice(E)].join("")}if(a=x+v,c.push(d)===2)break}return n&&a<s.length&&c.push(s.slice(a)),c.join("")})}function ae(t){let e=[];if(typeof t=="undefined")return e;let r=Array.isArray(t)?t:[t];for(let n=0;n<r.length;n++){let i=lunr.tokenizer.table,s=i.length;se(r[n],(o,a,u,c)=>{var f;switch(i[f=o+=s]||(i[f]=[]),a){case 0:case 2:i[o].push(u<<12|c-u<<2|a);break;case 1:let g=r[n].slice(u,c);W(g,lunr.tokenizer.separator,(l,m)=>{if(typeof lunr.segmenter!="undefined"){let x=g.slice(l,m);if(/^[MHIK]$/.test(lunr.segmenter.ctype_(x))){let v=lunr.segmenter.segment(x);for(let d=0,y=0;d<v.length;d++)i[o]||(i[o]=[]),i[o].push(u+l+y<<12|v[d].length<<2|a),e.push(new lunr.Token(v[d].toLowerCase(),{position:o<<20|i[o].length-1})),y+=v[d].length;return}}i[o].push(u+l<<12|m-l<<2|a),e.push(new lunr.Token(g.slice(l,m).toLowerCase(),{position:o<<20|i[o].length-1}))})}})}return e}function ue(t,e=r=>r){return t.trim().split(/"([^"]+)"/g).map((r,n)=>n&1?r.replace(/^\b|^(?![^\x00-\x7F]|$)|\s+/g," +"):r).join("").replace(/"|(?:^|\s+)[*+\-:^~]+(?=\s+|$)/g,"").split(/\s+/g).reduce((r,n)=>{let i=e(n);return[...r,...Array.isArray(i)?i:[i]]},[]).map(r=>/([~^]$)/.test(r)?`${r}1`:r).map(r=>/(^[+-]|[~^]\d+$)/.test(r)?r:`${r}*`).join(" ")}function ce(t){return ue(t,e=>{let r=[],n=new lunr.QueryLexer(e);n.run();for(let{type:i,str:s,start:o,end:a}of n.lexemes)switch(i){case"FIELD":["title","text","tags"].includes(s)||(e=[e.slice(0,a)," ",e.slice(a+1)].join(""));break;case"TERM":W(s,lunr.tokenizer.separator,(...u)=>{r.push([e.slice(0,o),s.slice(...u),e.slice(a)].join(""))})}return r})}function le(t){let e=new lunr.Query(["title","text","tags"]);new lunr.QueryParser(t,e).parse();for(let n of e.clauses)n.usePipeline=!0,n.term.startsWith("*")&&(n.wildcard=lunr.Query.wildcard.LEADING,n.term=n.term.slice(1)),n.term.endsWith("*")&&(n.wildcard=lunr.Query.wildcard.TRAILING,n.term=n.term.slice(0,-1));return e.clauses}function he(t,e){var i;let r=new Set(t),n={};for(let s=0;s<e.length;s++)for(let o of r)e[s].startsWith(o.term)&&(n[o.term]=!0,r.delete(o));for(let s of r)(i=lunr.stopWordFilter)!=null&&i.call(lunr,s.term)&&(n[s.term]=!1);return n}function fe(t,e){let r=new Set,n=new Uint16Array(t.length);for(let s=0;s<t.length;s++)for(let o=s+1;o<t.length;o++)t.slice(s,o)in e&&(n[s]=o-s);let i=[0];for(let s=i.length;s>0;){let o=i[--s];for(let u=1;u<n[o];u++)n[o+u]>n[o]-u&&(r.add(t.slice(o,o+u)),i[s++]=o+u);let a=o+n[o];n[a]&&a<t.length-1&&(i[s++]=a),r.add(t.slice(o,a))}return r.has("")?new Set([t]):r}function Oe(t){return e=>r=>{if(typeof r[e]=="undefined")return;let n=[r.location,e].join(":");return t.set(n,lunr.tokenizer.table=[]),r[e]}}function Re(t,e){let[r,n]=[new Set(t),new Set(e)];return[...new Set([...r].filter(i=>!n.has(i)))]}var H=class{constructor({config:e,docs:r,options:n}){let i=Oe(this.table=new Map);this.map=ie(r),this.options=n,this.index=lunr(function(){this.metadataWhitelist=["position"],this.b(0),e.lang.length===1&&e.lang[0]!=="en"?this.use(lunr[e.lang[0]]):e.lang.length>1&&this.use(lunr.multiLanguage(...e.lang)),this.tokenizer=ae,lunr.tokenizer.separator=new RegExp(e.separator),lunr.segmenter="TinySegmenter"in lunr?new lunr.TinySegmenter:void 0;let s=Re(["trimmer","stopWordFilter","stemmer"],e.pipeline);for(let o of e.lang.map(a=>a==="en"?lunr:lunr[a]))for(let a of s)this.pipeline.remove(o[a]),this.searchPipeline.remove(o[a]);this.ref("location"),this.field("title",{boost:1e3,extractor:i("title")}),this.field("text",{boost:1,extractor:i("text")}),this.field("tags",{boost:1e6,extractor:i("tags")});for(let o of r)this.add(o,{boost:o.boost})})}search(e){if(e=e.replace(new RegExp("\\p{sc=Han}+","gu"),s=>[...fe(s,this.index.invertedIndex)].join("* ")),e=ce(e),!e)return{items:[]};let r=le(e).filter(s=>s.presence!==lunr.Query.presence.PROHIBITED),n=this.index.search(e).reduce((s,{ref:o,score:a,matchData:u})=>{let c=this.map.get(o);if(typeof c!="undefined"){c=A({},c),c.tags&&(c.tags=[...c.tags]);let f=he(r,Object.keys(u.metadata));for(let l of this.index.fields){if(typeof c[l]=="undefined")continue;let m=[];for(let d of Object.values(u.metadata))typeof d[l]!="undefined"&&m.push(...d[l].position);if(!m.length)continue;let x=this.table.get([c.location,l].join(":")),v=Array.isArray(c[l])?q:oe;c[l]=v(c[l],x,m,l!=="text")}let g=+!c.parent+Object.values(f).filter(l=>l).length/Object.keys(f).length;s.push(G(A({},c),{score:a*(1+K(g,2)),terms:f}))}return s},[]).sort((s,o)=>o.score-s.score).reduce((s,o)=>{let a=this.map.get(o.location);if(typeof a!="undefined"){let u=a.parent?a.parent.location:a.location;s.set(u,[...s.get(u)||[],o])}return s},new Map);for(let[s,o]of n)if(!o.find(a=>a.location===s)){let a=this.map.get(s);o.push(G(A({},a),{score:0,terms:{}}))}let i;if(this.options.suggest){let s=this.index.query(o=>{for(let a of r)o.term(a.term,{fields:["title"],presence:lunr.Query.presence.REQUIRED,wildcard:lunr.Query.wildcard.TRAILING})});i=s.length?Object.keys(s[0].matchData.metadata):[]}return A({items:[...n.values()]},typeof i!="undefined"&&{suggest:i})}};var de;function Ie(t){return B(this,null,function*(){let e="../lunr";if(typeof parent!="undefined"&&"IFrameWorker"in parent){let n=ne("script[src]"),[i]=n.src.split("/worker");e=e.replace("..",i)}let r=[];for(let n of t.lang){switch(n){case"ja":r.push(`${e}/tinyseg.js`);break;case"hi":case"th":r.push(`${e}/wordcut.js`);break}n!=="en"&&r.push(`${e}/min/lunr.${n}.min.js`)}t.lang.length>1&&r.push(`${e}/min/lunr.multi.min.js`),r.length&&(yield importScripts(`${e}/min/lunr.stemmer.support.min.js`,...r))})}function Fe(t){return B(this,null,function*(){switch(t.type){case 0:return yield Ie(t.data.config),de=new H(t.data),{type:1};case 2:let e=t.data;try{return{type:3,data:de.search(e)}}catch(r){return console.warn(`Invalid query: ${e} \u2013 see https://bit.ly/2s3ChXG`),console.warn(r),{type:3,data:{items:[]}}}default:throw new TypeError("Invalid message type")}})}self.lunr=Y.default;Y.default.utils.warn=console.warn;addEventListener("message",t=>B(null,null,function*(){postMessage(yield Fe(t.data))}));})();
41 | //# sourceMappingURL=search.973d3a69.min.js.map
42 | 
43 | 
```

--------------------------------------------------------------------------------
/src/codegraphcontext/server.py:
--------------------------------------------------------------------------------

```python
  1 | # src/codegraphcontext/server.py
  2 | import urllib.parse
  3 | import asyncio
  4 | import json
  5 | import importlib
  6 | import stdlibs
  7 | import sys
  8 | import traceback
  9 | import os
 10 | import re
 11 | from datetime import datetime
 12 | from pathlib import Path
 13 | from neo4j.exceptions import CypherSyntaxError
 14 | from dataclasses import asdict
 15 | 
 16 | from typing import Any, Dict, Coroutine, Optional
 17 | 
 18 | from .prompts import LLM_SYSTEM_PROMPT
 19 | from .core.database import DatabaseManager
 20 | from .core.jobs import JobManager, JobStatus
 21 | from .core.watcher import CodeWatcher
 22 | from .tools.graph_builder import GraphBuilder
 23 | from .tools.code_finder import CodeFinder
 24 | from .tools.package_resolver import get_local_package_path
 25 | from .utils.debug_log import debug_log, info_logger, error_logger, warning_logger, debug_logger
 26 | 
 27 | DEFAULT_EDIT_DISTANCE = 2
 28 | DEFAULT_FUZZY_SEARCH = False
 29 | 
 30 | class MCPServer:
 31 |     """
 32 |     The main MCP Server class.
 33 |     
 34 |     This class orchestrates all the major components of the application, including:
 35 |     - Database connection management (`DatabaseManager`)
 36 |     - Background job tracking (`JobManager`)
 37 |     - File system watching for live updates (`CodeWatcher`)
 38 |     - Tool handlers for graph building, code searching, etc.
 39 |     - The main JSON-RPC communication loop for interacting with an AI assistant.
 40 |     """
 41 | 
 42 |     def __init__(self, loop=None):
 43 |         """
 44 |         Initializes the MCP server and its components. 
 45 |         
 46 |         Args:
 47 |             loop: The asyncio event loop to use. If not provided, it gets the current
 48 |                   running loop or creates a new one.
 49 |         """
 50 |         try:
 51 |             # Initialize the database manager and establish a connection early
 52 |             # to fail fast if credentials are wrong.
 53 |             self.db_manager = DatabaseManager()
 54 |             self.db_manager.get_driver() 
 55 |         except ValueError as e:
 56 |             raise ValueError(f"Database configuration error: {e}")
 57 | 
 58 |         # Initialize managers for jobs and file watching.
 59 |         self.job_manager = JobManager()
 60 |         
 61 |         # Get the current event loop to pass to thread-sensitive components like the graph builder.
 62 |         if loop is None:
 63 |             try:
 64 |                 loop = asyncio.get_running_loop()
 65 |             except RuntimeError:
 66 |                 loop = asyncio.new_event_loop()
 67 |                 asyncio.set_event_loop(loop)
 68 |         self.loop = loop
 69 | 
 70 |         # Initialize all the tool handlers, passing them the necessary managers and the event loop.
 71 |         self.graph_builder = GraphBuilder(self.db_manager, self.job_manager, loop)
 72 |         self.code_finder = CodeFinder(self.db_manager)
 73 |         self.code_watcher = CodeWatcher(self.graph_builder, self.job_manager)
 74 |         
 75 |         # Define the tool manifest that will be exposed to the AI assistant.
 76 |         self._init_tools()
 77 | 
 78 |     def _init_tools(self):
 79 |         """
 80 |         Defines the complete tool manifest for the LLM.
 81 |         This dictionary contains the schema for every tool the AI can call,
 82 |         including its name, description, and input parameters.
 83 |         """
 84 |         self.tools = {
 85 |             "add_code_to_graph": {
 86 |                 "name": "add_code_to_graph",
 87 |                 "description": "Performs a one-time scan of a local folder to add its code to the graph. Ideal for indexing libraries, dependencies, or projects not being actively modified. Returns a job ID for background processing.",
 88 |                 "inputSchema": {
 89 |                     "type": "object",
 90 |                     "properties": {
 91 |                         "path": {"type": "string", "description": "Path to the directory or file to add."},
 92 |                         "is_dependency": {"type": "boolean", "description": "Whether this code is a dependency.", "default": False}
 93 |                     },
 94 |                     "required": ["path"]
 95 |                 }
 96 |             },
 97 |             "check_job_status": {
 98 |                 "name": "check_job_status",
 99 |                 "description": "Check the status and progress of a background job.",
100 |                 "inputSchema": {
101 |                     "type": "object",
102 |                     "properties": { "job_id": {"type": "string", "description": "Job ID from a previous tool call"} },
103 |                     "required": ["job_id"]
104 |                 }
105 |             },
106 |             "list_jobs": {
107 |                 "name": "list_jobs",
108 |                 "description": "List all background jobs and their current status.",
109 |                 "inputSchema": {"type": "object", "properties": {}}
110 |             },
111 |            "find_code": {
112 |                 "name": "find_code",
113 |                 "description": "Find relevant code snippets related to a keyword (e.g., function name, class name, or content).",
114 |                 "inputSchema": {
115 |                     "type": "object",
116 |                     "properties": { "query": {"type": "string", "description": "Keyword or phrase to search for"}, "fuzzy_search": {"type": "boolean", "description": "Whether to use fuzzy search", "default": False}, "edit_distance": {"type": "number", "description": "Edit distance for fuzzy search (between 0-2)", "default": 2}}, 
117 |                     "required": ["query"]
118 |                 }
119 |             },
120 | 
121 |             "analyze_code_relationships": {
122 |                 "name": "analyze_code_relationships",
123 |                 "description": "Analyze code relationships like 'who calls this function' or 'class hierarchy'. Supported query types include: find_callers, find_callees, find_all_callers, find_all_callees, find_importers, who_modifies, class_hierarchy, overrides, dead_code, call_chain, module_deps, variable_scope, find_complexity, find_functions_by_argument, find_functions_by_decorator.",
124 |                 "inputSchema": {
125 |                     "type": "object",
126 |                     "properties": {
127 |                         "query_type": {"type": "string", "description": "Type of relationship query to run.", "enum": ["find_callers", "find_callees", "find_all_callers", "find_all_callees", "find_importers", "who_modifies", "class_hierarchy", "overrides", "dead_code", "call_chain", "module_deps", "variable_scope", "find_complexity", "find_functions_by_argument", "find_functions_by_decorator"]},
128 |                         "target": {"type": "string", "description": "The function, class, or module to analyze."},
129 |                         "context": {"type": "string", "description": "Optional: specific file path for precise results."} 
130 |                     },
131 |                     "required": ["query_type", "target"]
132 |                 }
133 |             },
134 |             "watch_directory": {
135 |                 "name": "watch_directory",
136 |                 "description": "Performs an initial scan of a directory and then continuously monitors it for changes, automatically keeping the graph up-to-date. Ideal for projects under active development. Returns a job ID for the initial scan.",
137 |                 "inputSchema": {
138 |                     "type": "object",
139 |                     "properties": { "path": {"type": "string", "description": "Path to directory to watch"} },
140 |                     "required": ["path"]
141 |                 }
142 |             },
143 |             "execute_cypher_query": {
144 |                 "name": "execute_cypher_query",
145 |                 "description": "Fallback tool to run a direct, read-only Cypher query against the code graph. Use this for complex questions not covered by other tools. The graph contains nodes representing code structures and relationships between them. **Schema Overview:**\n- **Nodes:** `Repository`, `File`, `Module`, `Class`, `Function`.\n- **Properties:** Nodes have properties like `name`, `path`, `cyclomatic_complexity` (on Function nodes), and `code`.\n- **Relationships:** `CONTAINS` (e.g., File-[:CONTAINS]->Function), `CALLS` (Function-[:CALLS]->Function or File-[:CALLS]->Function), `IMPORTS` (File-[:IMPORTS]->Module), `INHERITS` (Class-[:INHERITS]->Class).",
146 |                 "inputSchema": {
147 |                     "type": "object",
148 |                     "properties": { "cypher_query": {"type": "string", "description": "The read-only Cypher query to execute."} },
149 |                     "required": ["cypher_query"]
150 |                 }
151 |             },
152 |             "add_package_to_graph": {
153 |                 "name": "add_package_to_graph",
154 |                 "description": "Add a package to the graph by discovering its location. Supports multiple languages. Returns immediately with a job ID.",
155 |                 "inputSchema": {
156 |                     "type": "object",
157 |                     "properties": {
158 |                         "package_name": {"type": "string", "description": "Name of the package to add (e.g., 'requests', 'express', 'moment', 'lodash')."},
159 |                         "language": {"type": "string", "description": "The programming language of the package.", "enum": ["python", "javascript", "typescript", "java", "c", "go", "ruby", "php","cpp"]},
160 |                         "is_dependency": {"type": "boolean", "description": "Mark as a dependency.", "default": True}
161 |                     },
162 |                     "required": ["package_name", "language"]
163 |                 }
164 |             },
165 |             "find_dead_code": {
166 |                 "name": "find_dead_code",
167 |                 "description": "Find potentially unused functions (dead code) across the entire indexed codebase, optionally excluding functions with specific decorators.",
168 |                 "inputSchema": {
169 |                     "type": "object",
170 |                     "properties": {
171 |                         "exclude_decorated_with": {"type": "array", "items": {"type": "string"}, "description": "Optional: A list of decorator names (e.g., '@app.route') to exclude from dead code detection.", "default": []}
172 |                     }
173 |                 }
174 |             },
175 |             "calculate_cyclomatic_complexity": {
176 |                 "name": "calculate_cyclomatic_complexity",
177 |                 "description": "Calculate the cyclomatic complexity of a specific function to measure its complexity.",
178 |                 "inputSchema": {
179 |                     "type": "object",
180 |                     "properties": {
181 |                         "function_name": {"type": "string", "description": "The name of the function to analyze."},
182 |                         "file_path": {"type": "string", "description": "Optional: The full path to the file containing the function for a more specific query."} 
183 |                     },
184 |                     "required": ["function_name"]
185 |                 }
186 |             },
187 |             "find_most_complex_functions": {
188 |                 "name": "find_most_complex_functions",
189 |                 "description": "Find the most complex functions in the codebase based on cyclomatic complexity.",
190 |                 "inputSchema": {
191 |                     "type": "object",
192 |                     "properties": {
193 |                         "limit": {"type": "integer", "description": "The maximum number of complex functions to return.", "default": 10}
194 |                     }
195 |                 }
196 |             },
197 |             "list_indexed_repositories": {
198 |                 "name": "list_indexed_repositories",
199 |                 "description": "List all indexed repositories.",
200 |                 "inputSchema": {
201 |                     "type": "object",
202 |                     "properties": {}
203 |                 }
204 |             },
205 |             "delete_repository": {
206 |                 "name": "delete_repository",
207 |                 "description": "Delete an indexed repository from the graph.",
208 |                 "inputSchema": {
209 |                     "type": "object",
210 |                     "properties": {
211 |                         "repo_path": {"type": "string", "description": "The path of the repository to delete."} 
212 |                     },
213 |                     "required": ["repo_path"]
214 |                 }
215 |             },
216 |             "visualize_graph_query": {
217 |                 "name": "visualize_graph_query",
218 |                 "description": "Generates a URL to visualize the results of a Cypher query in the Neo4j Browser. The user can open this URL in their web browser to see the graph visualization.",
219 |                 "inputSchema": {
220 |                     "type": "object",
221 |                     "properties": {
222 |                         "cypher_query": {"type": "string", "description": "The Cypher query to visualize."}
223 |                     },
224 |                     "required": ["cypher_query"]
225 |                 }
226 |             },
227 |             "list_watched_paths": {
228 |                 "name": "list_watched_paths",
229 |                 "description": "Lists all directories currently being watched for live file changes.",
230 |                 "inputSchema": {"type": "object", "properties": {}}
231 |             },
232 |             "unwatch_directory": {
233 |                 "name": "unwatch_directory",
234 |                 "description": "Stops watching a directory for live file changes.",
235 |                 "inputSchema": {
236 |                     "type": "object",
237 |                     "properties": {
238 |                         "path": {"type": "string", "description": "The absolute path of the directory to stop watching."}
239 |                     },
240 |                     "required": ["path"]
241 |                 }
242 |             }
243 |         }    
244 | 
245 |     def get_database_status(self) -> dict:
246 |         """Returns the current connection status of the Neo4j database."""
247 |         return {"connected": self.db_manager.is_connected()}
248 |         
249 | 
250 |     def execute_cypher_query_tool(self, **args) -> Dict[str, Any]:
251 |         """
252 |         Tool implementation for executing a read-only Cypher query.
253 |         
254 |         Important: Includes a safety check to prevent any database modification
255 |         by disallowing keywords like CREATE, MERGE, DELETE, etc.
256 |         """
257 |         cypher_query = args.get("cypher_query")
258 |         if not cypher_query:
259 |             return {"error": "Cypher query cannot be empty."}
260 | 
261 |         # Safety Check: Prevent any write operations to the database.
262 |         # This check first removes all string literals and then checks for forbidden keywords.
263 |         forbidden_keywords = ['CREATE', 'MERGE', 'DELETE', 'SET', 'REMOVE', 'DROP', 'CALL apoc']
264 |         
265 |         # Regex to match single or double quoted strings, handling escaped quotes.
266 |         string_literal_pattern = r'"(?:\\.|[^"\\])*"|\'(?:\\.|[^\'\\])*\''
267 |         
268 |         # Remove all string literals from the query.
269 |         query_without_strings = re.sub(string_literal_pattern, '', cypher_query)
270 |         
271 |         # Now, check for forbidden keywords in the query without strings.
272 |         for keyword in forbidden_keywords:
273 |             if re.search(r'\b' + keyword + r'\b', query_without_strings, re.IGNORECASE):
274 |                 return {
275 |                     "error": "This tool only supports read-only queries. Prohibited keywords like CREATE, MERGE, DELETE, SET, etc., are not allowed."
276 |                 }
277 | 
278 |         try:
279 |             debug_log(f"Executing Cypher query: {cypher_query}")
280 |             with self.db_manager.get_driver().session() as session:
281 |                 result = session.run(cypher_query)
282 |                 # Convert results to a list of dictionaries for clean JSON serialization.
283 |                 records = [record.data() for record in result]
284 |                 
285 |                 return {
286 |                     "success": True,
287 |                     "query": cypher_query,
288 |                     "record_count": len(records),
289 |                     "results": records
290 |                 }
291 |         
292 |         except CypherSyntaxError as e:
293 |             debug_log(f"Cypher syntax error: {str(e)}")
294 |             return {
295 |                 "error": "Cypher syntax error.",
296 |                 "details": str(e),
297 |                 "query": cypher_query
298 |             }
299 |         except Exception as e:
300 |             debug_log(f"Error executing Cypher query: {str(e)}")
301 |             return {
302 |                 "error": "An unexpected error occurred while executing the query.",
303 |                 "details": str(e)
304 |             }
305 |     
306 |     def find_dead_code_tool(self, **args) -> Dict[str, Any]:
307 |         """Tool to find potentially dead code across the entire project."""
308 |         exclude_decorated_with = args.get("exclude_decorated_with", [])
309 |         try:
310 |             debug_log("Finding dead code.")
311 |             results = self.code_finder.find_dead_code(exclude_decorated_with=exclude_decorated_with)
312 |             
313 |             return {
314 |                 "success": True,
315 |                 "query_type": "dead_code",
316 |                 "results": results
317 |             }
318 |         except Exception as e:
319 |             debug_log(f"Error finding dead code: {str(e)}")
320 |             return {"error": f"Failed to find dead code: {str(e)}"}
321 | 
322 |     def calculate_cyclomatic_complexity_tool(self, **args) -> Dict[str, Any]:
323 |         """Tool to calculate cyclomatic complexity for a given function."""
324 |         function_name = args.get("function_name")
325 |         file_path = args.get("file_path")
326 | 
327 |         try:
328 |             debug_log(f"Calculating cyclomatic complexity for function: {function_name}")
329 |             results = self.code_finder.get_cyclomatic_complexity(function_name, file_path)
330 |             
331 |             response = {
332 |                 "success": True,
333 |                 "function_name": function_name,
334 |                 "results": results
335 |             }
336 |             if file_path:
337 |                 response["file_path"] = file_path
338 |             
339 |             return response
340 |         except Exception as e:
341 |             debug_log(f"Error calculating cyclomatic complexity: {str(e)}")
342 |             return {"error": f"Failed to calculate cyclomatic complexity: {str(e)}"}
343 | 
344 |     def find_most_complex_functions_tool(self, **args) -> Dict[str, Any]:
345 |         """Tool to find the most complex functions."""
346 |         limit = args.get("limit", 10)
347 |         try:
348 |             debug_log(f"Finding the top {limit} most complex functions.")
349 |             results = self.code_finder.find_most_complex_functions(limit)
350 |             return {
351 |                 "success": True,
352 |                 "limit": limit,
353 |                 "results": results
354 |             }
355 |         except Exception as e:
356 |             debug_log(f"Error finding most complex functions: {str(e)}")
357 |             return {"error": f"Failed to find most complex functions: {str(e)}"}
358 | 
359 |     def list_indexed_repositories_tool(self, **args) -> Dict[str, Any]:
360 |         """Tool to list indexed repositories."""
361 |         try:
362 |             debug_log("Listing indexed repositories.")
363 |             results = self.code_finder.list_indexed_repositories()
364 |             return {
365 |                 "success": True,
366 |                 "repositories": results
367 |             }
368 |         except Exception as e:
369 |             debug_log(f"Error listing indexed repositories: {str(e)}")
370 |             return {"error": f"Failed to list indexed repositories: {str(e)}"}
371 | 
372 |     def delete_repository_tool(self, **args) -> Dict[str, Any]:
373 |         """Tool to delete a repository from the graph."""
374 |         repo_path = args.get("repo_path")
375 |         try:
376 |             debug_log(f"Deleting repository: {repo_path}")
377 |             self.graph_builder.delete_repository_from_graph(repo_path)
378 |             return {
379 |                 "success": True,
380 |                 "message": f"Repository '{repo_path}' deleted successfully."
381 |             }
382 |         except Exception as e:
383 |             debug_log(f"Error deleting repository: {str(e)}")
384 |             return {"error": f"Failed to delete repository: {str(e)}"}
385 | 
386 |     def visualize_graph_query_tool(self, **args) -> Dict[str, Any]:
387 |         """Tool to generate a Neo4j browser visualization URL for a Cypher query."""
388 |         cypher_query = args.get("cypher_query")
389 |         if not cypher_query:
390 |             return {"error": "Cypher query cannot be empty."}
391 | 
392 |         try:
393 |             encoded_query = urllib.parse.quote(cypher_query)
394 |             visualization_url = f"http://localhost:7474/browser/?cmd=edit&arg={encoded_query}"
395 |             
396 |             return {
397 |                 "success": True,
398 |                 "visualization_url": visualization_url,
399 |                 "message": "Open the URL in your browser to visualize the graph query. The query will be pre-filled for editing."
400 |             }
401 |         except Exception as e:
402 |             debug_log(f"Error generating visualization URL: {str(e)}")
403 |             return {"error": f"Failed to generate visualization URL: {str(e)}"}
404 | 
405 |     def list_watched_paths_tool(self, **args) -> Dict[str, Any]:
406 |         """Tool to list all currently watched directory paths."""
407 |         try:
408 |             paths = self.code_watcher.list_watched_paths()
409 |             return {"success": True, "watched_paths": paths}
410 |         except Exception as e:
411 |             return {"error": f"Failed to list watched paths: {str(e)}"}
412 | 
413 |     def unwatch_directory_tool(self, **args) -> Dict[str, Any]:
414 |         """Tool to stop watching a directory."""
415 |         path = args.get("path")
416 |         if not path:
417 |             return {"error": "Path is a required argument."}
418 |         # The watcher class handles the logic of checking if the path is watched
419 |         # and returns an error dictionary if not, so we can just call it.
420 |         return self.code_watcher.unwatch_directory(path)
421 | 
422 |     def watch_directory_tool(self, **args) -> Dict[str, Any]:
423 |         """
424 |         Tool implementation to start watching a directory for changes.
425 |         This tool is now smart: it checks if the path exists and if it has already been indexed.
426 |         """
427 |         path = args.get("path")
428 |         if not path:
429 |             return {"error": "Path is a required argument."}
430 | 
431 |         path_obj = Path(path).resolve()
432 |         path_str = str(path_obj)
433 | 
434 |         # 1. Validate the path before the try...except block
435 |         if not path_obj.is_dir():
436 |             return {
437 |                 "success": True,
438 |                 "status": "path_not_found",
439 |                 "message": f"Path '{path_str}' does not exist or is not a directory."
440 |             }
441 |         try:
442 |             # Check if already watching
443 |             if path_str in self.code_watcher.watched_paths:
444 |                 return {"success": True, "message": f"Already watching directory: {path_str}"}
445 | 
446 |             # 2. Check if the repository is already indexed
447 |             indexed_repos = self.list_indexed_repositories_tool().get("repositories", [])
448 |             is_already_indexed = any(Path(repo["path"]).resolve() == path_obj for repo in indexed_repos)
449 | 
450 |             # 3. Decide whether to perform an initial scan
451 |             if is_already_indexed:
452 |                 # If already indexed, just start the watcher without a scan
453 |                 self.code_watcher.watch_directory(path_str, perform_initial_scan=False)
454 |                 return {
455 |                     "success": True,
456 |                     "message": f"Path '{path_str}' is already indexed. Now watching for live changes."
457 |                 }
458 |             else:
459 |                 # If not indexed, perform the scan AND start the watcher
460 |                 scan_job_result = self.add_code_to_graph_tool(path=path_str, is_dependency=False)
461 |                 if "error" in scan_job_result:
462 |                     return scan_job_result
463 |                 
464 |                 self.code_watcher.watch_directory(path_str, perform_initial_scan=True)
465 |                 
466 |                 return {
467 |                     "success": True,
468 |                     "message": f"Path '{path_str}' was not indexed. Started initial scan and now watching for live changes.",
469 |                     "job_id": scan_job_result.get("job_id"),
470 |                     "details": "Use check_job_status to monitor the initial scan."
471 |                 }
472 |             
473 |         except Exception as e:
474 |             error_logger(f"Failed to start watching directory {path}: {e}")
475 |             return {"error": f"Failed to start watching directory: {str(e)}"}        
476 |     
477 |     def add_code_to_graph_tool(self, **args) -> Dict[str, Any]:
478 |         """
479 |         Tool implementation to index a directory of code.
480 | 
481 |         This creates a background job and runs the indexing asynchronously
482 |         so the AI assistant can continue to be responsive.
483 |         """
484 |         path = args.get("path")
485 |         is_dependency = args.get("is_dependency", False)
486 |         
487 |         try:
488 |             path_obj = Path(path).resolve()
489 | 
490 |             if not path_obj.exists():
491 |                 return {
492 |                     "success": True,
493 |                     "status": "path_not_found",
494 |                     "message": f"Path '{path}' does not exist."
495 |                 }
496 | 
497 |             # Prevent re-indexing the same repository.
498 |             indexed_repos = self.list_indexed_repositories_tool().get("repositories", [])
499 |             for repo in indexed_repos:
500 |                 if Path(repo["path"]).resolve() == path_obj:
501 |                     return {
502 |                         "success": False,
503 |                         "message": f"Repository '{path}' is already indexed."
504 |                     }
505 |             
506 |             # Estimate time and create a job for the user to track.
507 |             total_files, estimated_time = self.graph_builder.estimate_processing_time(path_obj)
508 |             job_id = self.job_manager.create_job(str(path_obj), is_dependency)
509 |             self.job_manager.update_job(job_id, total_files=total_files, estimated_duration=estimated_time)
510 |             
511 |             # Create the coroutine for the background task and schedule it on the main event loop.
512 |             coro = self.graph_builder.build_graph_from_path_async(
513 |                 path_obj, is_dependency, job_id
514 |             )
515 |             asyncio.run_coroutine_threadsafe(coro, self.loop)
516 |             
517 |             debug_log(f"Started background job {job_id} for path: {str(path_obj)}, is_dependency: {is_dependency}")
518 |             
519 |             return {
520 |                 "success": True, "job_id": job_id,
521 |                 "message": f"Background processing started for {str(path_obj)}",
522 |                 "estimated_files": total_files,
523 |                 "estimated_duration_seconds": round(estimated_time, 2),
524 |                 "estimated_duration_human": f"{int(estimated_time // 60)}m {int(estimated_time % 60)}s" if estimated_time >= 60 else f"{int(estimated_time)}s",
525 |                 "instructions": f"Use 'check_job_status' with job_id '{job_id}' to monitor progress"
526 |             }
527 |         
528 |         except Exception as e:
529 |             debug_log(f"Error creating background job: {str(e)}")
530 |             return {"error": f"Failed to start background processing: {str(e)}"}
531 |     
532 |     def add_package_to_graph_tool(self, **args) -> Dict[str, Any]:
533 |         """Tool to add a package to the graph by auto-discovering its location"""
534 |         package_name = args.get("package_name")
535 |         language = args.get("language")
536 |         is_dependency = args.get("is_dependency", True)
537 | 
538 |         if not language:
539 |             return {"error": "The 'language' parameter is required."}
540 | 
541 |         try:
542 |             # Check if the package is already indexed
543 |             indexed_repos = self.list_indexed_repositories_tool().get("repositories", [])
544 |             for repo in indexed_repos:
545 |                 if repo.get("is_dependency") and (repo.get("name") == package_name or repo.get("name") == f"{package_name}.py"):
546 |                     return {
547 |                         "success": False,
548 |                         "message": f"Package '{package_name}' is already indexed."
549 |                     }
550 | 
551 |             package_path = get_local_package_path(package_name, language)
552 |             
553 |             if not package_path:
554 |                 return {"error": f"Could not find package '{package_name}' for language '{language}'. Make sure it's installed."}
555 |             
556 |             if not os.path.exists(package_path):
557 |                 return {"error": f"Package path '{package_path}' does not exist"}
558 |             
559 |             path_obj = Path(package_path)
560 |             
561 |             total_files, estimated_time = self.graph_builder.estimate_processing_time(path_obj)
562 |             
563 |             job_id = self.job_manager.create_job(package_path, is_dependency)
564 |             
565 |             self.job_manager.update_job(job_id, total_files=total_files, estimated_duration=estimated_time)
566 |             
567 |             coro = self.graph_builder.build_graph_from_path_async(
568 |                 path_obj, is_dependency, job_id
569 |             )
570 |             asyncio.run_coroutine_threadsafe(coro, self.loop)
571 |             
572 |             debug_log(f"Started background job {job_id} for package: {package_name} at {package_path}, is_dependency: {is_dependency}")
573 |             
574 |             return {
575 |                 "success": True, "job_id": job_id, "package_name": package_name,
576 |                 "discovered_path": package_path,
577 |                 "message": f"Background processing started for package '{package_name}'",
578 |                 "estimated_files": total_files,
579 |                 "estimated_duration_seconds": round(estimated_time, 2),
580 |                 "estimated_duration_human": f"{int(estimated_time // 60)}m {int(estimated_time % 60)}s" if estimated_time >= 60 else f"{int(estimated_time)}s",
581 |                 "instructions": f"Use 'check_job_status' with job_id '{job_id}' to monitor progress"
582 |             }
583 |         
584 |         except Exception as e:
585 |             debug_log(f"Error creating background job for package {package_name}: {str(e)}")
586 |             return {"error": f"Failed to start background processing for package '{package_name}': {str(e)}"}
587 |     
588 |     def check_job_status_tool(self, **args) -> Dict[str, Any]:
589 |         """Tool to check job status"""
590 |         job_id = args.get("job_id")
591 |         if not job_id:
592 |             return {"error": "Job ID is a required argument."}
593 |                 
594 |         try:
595 |             job = self.job_manager.get_job(job_id)
596 |             
597 |             if not job:
598 |                 return {
599 |                     "success": True, # Return success to avoid generic error wrapper
600 |                     "status": "not_found",
601 |                     "message": f"Job with ID '{job_id}' not found. The ID may be incorrect or the job may have been cleared after a server restart."
602 |                 }
603 |             
604 |             job_dict = asdict(job)
605 |             
606 |             if job.status == JobStatus.RUNNING:
607 |                 if job.estimated_time_remaining:
608 |                     remaining = job.estimated_time_remaining
609 |                     job_dict["estimated_time_remaining_human"] = (
610 |                         f"{int(remaining // 60)}m {int(remaining % 60)}s" 
611 |                         if remaining >= 60 else f"{int(remaining)}s"
612 |                     )
613 |                 
614 |                 if job.start_time:
615 |                     elapsed = (datetime.now() - job.start_time).total_seconds()
616 |                     job_dict["elapsed_time_human"] = (
617 |                         f"{int(elapsed // 60)}m {int(elapsed % 60)}s" 
618 |                         if elapsed >= 60 else f"{int(elapsed)}s"
619 |                     )
620 |             
621 |             elif job.status == JobStatus.COMPLETED and job.start_time and job.end_time:
622 |                 duration = (job.end_time - job.start_time).total_seconds()
623 |                 job_dict["actual_duration_human"] = (
624 |                     f"{int(duration // 60)}m {int(duration % 60)}s" 
625 |                     if duration >= 60 else f"{int(duration)}s"
626 |                 )
627 |             
628 |             job_dict["start_time"] = job.start_time.strftime("%Y-%m-%d %H:%M:%S")
629 |             if job.end_time:
630 |                 job_dict["end_time"] = job.end_time.strftime("%Y-%m-%d %H:%M:%S")
631 |             
632 |             job_dict["status"] = job.status.value
633 |             
634 |             return {"success": True, "job": job_dict}
635 |         
636 |         except Exception as e:
637 |             debug_log(f"Error checking job status: {str(e)}")
638 |             return {"error": f"Failed to check job status: {str(e)}"}
639 |     
640 |     def list_jobs_tool(self) -> Dict[str, Any]:
641 |         """Tool to list all jobs"""
642 |         try:
643 |             jobs = self.job_manager.list_jobs()
644 |             
645 |             jobs_data = []
646 |             for job in jobs:
647 |                 job_dict = asdict(job)
648 |                 job_dict["status"] = job.status.value
649 |                 job_dict["start_time"] = job.start_time.strftime("%Y-%m-%d %H:%M:%S")
650 |                 if job.end_time:
651 |                     job_dict["end_time"] = job.end_time.strftime("%Y-%m-%d %H:%M:%S")
652 |                 jobs_data.append(job_dict)
653 |             
654 |             jobs_data.sort(key=lambda x: x["start_time"], reverse=True)
655 |             
656 |             return {"success": True, "jobs": jobs_data, "total_jobs": len(jobs_data)}
657 |         
658 |         except Exception as e:
659 |             debug_log(f"Error listing jobs: {str(e)}")
660 |             return {"error": f"Failed to list jobs: {str(e)}"}
661 |     
662 |     def analyze_code_relationships_tool(self, **args) -> Dict[str, Any]:
663 |         """Tool to analyze code relationships"""
664 |         query_type = args.get("query_type")
665 |         target = args.get("target")
666 |         context = args.get("context")
667 | 
668 |         if not query_type or not target:
669 |             return {
670 |                 "error": "Both 'query_type' and 'target' are required",
671 |                 "supported_query_types": [
672 |                     "find_callers", "find_callees", "find_importers", "who_modifies",
673 |                     "class_hierarchy", "overrides", "dead_code", "call_chain",
674 |                     "module_deps", "variable_scope", "find_complexity"
675 |                 ]
676 |             }
677 |         
678 |         try:
679 |             debug_log(f"Analyzing relationships: {query_type} for {target}")
680 |             results = self.code_finder.analyze_code_relationships(query_type, target, context)
681 |             
682 |             return {
683 |                 "success": True, "query_type": query_type, "target": target,
684 |                 "context": context, "results": results
685 |             }
686 |         
687 |         except Exception as e:
688 |             debug_log(f"Error analyzing relationships: {str(e)}")
689 |             return {"error": f"Failed to analyze relationships: {str(e)}"}
690 |         
691 |     @staticmethod
692 |     def _normalize(text: str) -> str:
693 |         return text.lower().replace("_", " ").strip()
694 | 
695 |     def find_code_tool(self, **args) -> Dict[str, Any]:
696 |         """Tool to find relevant code snippets"""
697 |         query = args.get("query")
698 |         fuzzy_search = args.get("fuzzy_search", DEFAULT_FUZZY_SEARCH)
699 |         edit_distance = args.get("edit_distance", DEFAULT_EDIT_DISTANCE)
700 | 
701 |         if fuzzy_search:
702 |             query = self._normalize(query)
703 |             
704 |         try:
705 |             debug_log(f"Finding code for query: {query} with fuzzy_search={fuzzy_search}, edit_distance={edit_distance}")
706 |             results = self.code_finder.find_related_code(query, fuzzy_search, edit_distance)
707 | 
708 |             return {"success": True, "query": query, "results": results}
709 |         
710 |         except Exception as e:
711 |             debug_log(f"Error finding code: {str(e)}")
712 |             return {"error": f"Failed to find code: {str(e)}"}
713 |     
714 | 
715 |     async def handle_tool_call(self, tool_name: str, args: Dict[str, Any]) -> Dict[str, Any]:
716 |         """
717 |         Routes a tool call from the AI assistant to the appropriate handler function. 
718 |         
719 |         Args:
720 |             tool_name: The name of the tool to execute.
721 |             args: A dictionary of arguments for the tool.
722 | 
723 |         Returns:
724 |             A dictionary containing the result of the tool execution.
725 |         """
726 |         tool_map: Dict[str, Coroutine] = {
727 |             "add_package_to_graph": self.add_package_to_graph_tool,
728 |             "find_dead_code": self.find_dead_code_tool,
729 |             "find_code": self.find_code_tool,
730 |             "analyze_code_relationships": self.analyze_code_relationships_tool,
731 |             "watch_directory": self.watch_directory_tool,
732 |             "execute_cypher_query": self.execute_cypher_query_tool,
733 |             "add_code_to_graph": self.add_code_to_graph_tool,
734 |             "check_job_status": self.check_job_status_tool,
735 |             "list_jobs": self.list_jobs_tool,
736 |             "calculate_cyclomatic_complexity": self.calculate_cyclomatic_complexity_tool,
737 |             "find_most_complex_functions": self.find_most_complex_functions_tool,
738 |             "list_indexed_repositories": self.list_indexed_repositories_tool,
739 |             "delete_repository": self.delete_repository_tool,
740 |             "visualize_graph_query": self.visualize_graph_query_tool,
741 |             "list_watched_paths": self.list_watched_paths_tool,
742 |             "unwatch_directory": self.unwatch_directory_tool
743 |         }
744 |         handler = tool_map.get(tool_name)
745 |         if handler:
746 |             # Run the synchronous tool function in a separate thread to avoid
747 |             # blocking the main asyncio event loop.
748 |             return await asyncio.to_thread(handler, **args)
749 |         else:
750 |             return {"error": f"Unknown tool: {tool_name}"}
751 | 
752 |     async def run(self):
753 |         """
754 |         Runs the main server loop, listening for JSON-RPC requests from stdin.
755 |         
756 |         This loop continuously reads lines from stdin, parses them as JSON-RPC
757 |         requests, and routes them to the appropriate handlers (e.g., initialize,
758 |         tools/list, tools/call). The response is then printed to stdout.
759 |         """
760 |         debug_logger("MCP Server is running. Waiting for requests...")
761 |         self.code_watcher.start()
762 |         
763 |         loop = asyncio.get_event_loop()
764 |         while True:
765 |             try:
766 |                 # Read a request from the standard input.
767 |                 line = await loop.run_in_executor(None, sys.stdin.readline)
768 |                 if not line:
769 |                     debug_logger("Client disconnected (EOF received). Shutting down.")
770 |                     break
771 |                 
772 |                 request = json.loads(line.strip())
773 |                 method = request.get('method')
774 |                 params = request.get('params', {})
775 |                 request_id = request.get('id')
776 |                 
777 |                 response = {}
778 |                 # Route the request based on the JSON-RPC method.
779 |                 if method == 'initialize':
780 |                     response = {
781 |                         "jsonrpc": "2.0", "id": request_id,
782 |                         "result": {
783 |                             "protocolVersion": "2025-03-26",
784 |                             "serverInfo": {
785 |                                 "name": "CodeGraphContext", "version": "0.1.0",
786 |                                 "systemPrompt": LLM_SYSTEM_PROMPT
787 |                             },
788 |                             "capabilities": {"tools": {"listTools": True}},
789 |                         }
790 |                     }
791 |                 elif method == 'tools/list':
792 |                     # Return the list of tools defined in _init_tools.
793 |                     response = {
794 |                         "jsonrpc": "2.0", "id": request_id,
795 |                         "result": {"tools": list(self.tools.values())}
796 |                     }
797 |                 elif method == 'tools/call':
798 |                     # Execute a tool call and return the result.
799 |                     tool_name = params.get('name')
800 |                     args = params.get('arguments', {})
801 |                     result = await self.handle_tool_call(tool_name, args)
802 |                     
803 |                     if "error" in result:
804 |                         response = {
805 |                             "jsonrpc": "2.0", "id": request_id,
806 |                             "error": {"code": -32000, "message": "Tool execution error", "data": result}
807 |                         }
808 |                     else:
809 |                         response = {
810 |                             "jsonrpc": "2.0", "id": request_id,
811 |                             "result": {"content": [{"type": "text", "text": json.dumps(result, indent=2)}]}
812 |                         }
813 |                 elif method == 'notifications/initialized':
814 |                     # This is a notification, no response needed.
815 |                     pass
816 |                 else:
817 |                     # Handle unknown methods.
818 |                     if request_id is not None:
819 |                         response = {
820 |                             "jsonrpc": "2.0", "id": request_id,
821 |                             "error": {"code": -32601, "message": f"Method not found: {method}"}
822 |                         }
823 |                 
824 |                 # Send the response to standard output if it's not a notification.
825 |                 if request_id is not None and response:
826 |                     print(json.dumps(response), flush=True)
827 | 
828 |             except Exception as e:
829 |                 error_logger(f"Error processing request: {e}\n{traceback.format_exc()}")
830 |                 request_id = "unknown"
831 |                 if 'request' in locals() and isinstance(request, dict):
832 |                     request_id = request.get('id', "unknown")
833 | 
834 |                 error_response = {
835 |                     "jsonrpc": "2.0", "id": request_id,
836 |                     "error": {"code": -32603, "message": f"Internal error: {str(e)}", "data": traceback.format_exc()}
837 |                 }
838 |                 print(json.dumps(error_response), flush=True)
839 | 
840 |     def shutdown(self):
841 |         """Gracefully shuts down the server and its components."""
842 |         debug_logger("Shutting down server...")
843 |         self.code_watcher.stop()
844 |         self.db_manager.close_driver()
845 | 
```
Page 13/18FirstPrevNextLast