#
tokens: 47376/50000 11/837 files (page 22/37)
lines: off (toggle) GitHub
raw markdown copy
This is page 22 of 37. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── serverless-spark.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-create-project-file.md
│       │       │   ├── looker-delete-project-file.md
│       │       │   ├── looker-dev-mode.md
│       │       │   ├── looker-get-connection-databases.md
│       │       │   ├── looker-get-connection-schemas.md
│       │       │   ├── looker-get-connection-table-columns.md
│       │       │   ├── looker-get-connection-tables.md
│       │       │   ├── looker-get-connections.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-get-project-file.md
│       │       │   ├── looker-get-project-files.md
│       │       │   ├── looker-get-projects.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   ├── looker-run-look.md
│       │       │   └── looker-update-project-file.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-tables.md
│       │       │   ├── postgres-list-views.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── serverless-spark
│       │       │   ├── _index.md
│       │       │   ├── serverless-spark-get-batch.md
│       │       │   └── serverless-spark-list-batches.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── serverless-spark.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   └── bigquery.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlessspark_test.go
│   │   │   └── serverlessspark.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookercreateprojectfile
│   │   │   │   ├── lookercreateprojectfile_test.go
│   │   │   │   └── lookercreateprojectfile.go
│   │   │   ├── lookerdeleteprojectfile
│   │   │   │   ├── lookerdeleteprojectfile_test.go
│   │   │   │   └── lookerdeleteprojectfile.go
│   │   │   ├── lookerdevmode
│   │   │   │   ├── lookerdevmode_test.go
│   │   │   │   └── lookerdevmode.go
│   │   │   ├── lookergetconnectiondatabases
│   │   │   │   ├── lookergetconnectiondatabases_test.go
│   │   │   │   └── lookergetconnectiondatabases.go
│   │   │   ├── lookergetconnections
│   │   │   │   ├── lookergetconnections_test.go
│   │   │   │   └── lookergetconnections.go
│   │   │   ├── lookergetconnectionschemas
│   │   │   │   ├── lookergetconnectionschemas_test.go
│   │   │   │   └── lookergetconnectionschemas.go
│   │   │   ├── lookergetconnectiontablecolumns
│   │   │   │   ├── lookergetconnectiontablecolumns_test.go
│   │   │   │   └── lookergetconnectiontablecolumns.go
│   │   │   ├── lookergetconnectiontables
│   │   │   │   ├── lookergetconnectiontables_test.go
│   │   │   │   └── lookergetconnectiontables.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookergetprojectfile
│   │   │   │   ├── lookergetprojectfile_test.go
│   │   │   │   └── lookergetprojectfile.go
│   │   │   ├── lookergetprojectfiles
│   │   │   │   ├── lookergetprojectfiles_test.go
│   │   │   │   └── lookergetprojectfiles.go
│   │   │   ├── lookergetprojects
│   │   │   │   ├── lookergetprojects_test.go
│   │   │   │   └── lookergetprojects.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   ├── lookerrunlook
│   │   │   │   ├── lookerrunlook_test.go
│   │   │   │   └── lookerrunlook.go
│   │   │   └── lookerupdateprojectfile
│   │   │       ├── lookerupdateprojectfile_test.go
│   │   │       └── lookerupdateprojectfile.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   ├── postgreslistviews
│   │   │   │   ├── postgreslistviews_test.go
│   │   │   │   └── postgreslistviews.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlesssparkgetbatch
│   │   │   │   ├── serverlesssparkgetbatch_test.go
│   │   │   │   └── serverlesssparkgetbatch.go
│   │   │   └── serverlesssparklistbatches
│   │   │       ├── serverlesssparklistbatches_test.go
│   │   │       └── serverlesssparklistbatches.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── serverlessspark
    │   └── serverless_spark_integration_test.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/docs/en/concepts/telemetry/index.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Telemetry"
type: docs
weight: 2
description: >
  An overview of telemetry and observability in Toolbox.
---

## About

Telemetry data such as logs, metrics, and traces will help developers understand
the internal state of the system. This page walks though different types of
telemetry and observability available in Toolbox.

Toolbox exports telemetry data of logs via standard out/err, and traces/metrics
through [OpenTelemetry](https://opentelemetry.io/). Additional flags can be
passed to Toolbox to enable different logging behavior, or to export metrics
through a specific [exporter](#exporter).

## Logging

The following flags can be used to customize Toolbox logging:

| **Flag**           | **Description**                                                                         |
|--------------------|-----------------------------------------------------------------------------------------|
| `--log-level`      | Preferred log level, allowed values: `debug`, `info`, `warn`, `error`. Default: `info`. |
| `--logging-format` | Preferred logging format, allowed values: `standard`, `json`. Default: `standard`.      |

**Example:**

```bash
./toolbox --tools-file "tools.yaml" --log-level warn --logging-format json
```

### Level

Toolbox supports the following log levels, including:

| **Log level** | **Description**                                                                                                                                                                    |
|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Debug         | Debug logs typically contain information that is only useful during the debugging phase and may be of little value during production.                                              |
| Info          | Info logs include information about successful operations within the application, such as a successful start, pause, or exit of the application.                                   |
| Warn          | Warning logs are slightly less severe than error conditions. While it does not cause an error, it indicates that an operation might fail in the future if action is not taken now. |
| Error         | Error log is assigned to event logs that contain an application error message.                                                                                                     |

Toolbox will only output logs that are equal or more severe to the
level that it is set. Below are the log levels that Toolbox supports in the
order of severity.

### Format

Toolbox supports both standard and structured logging format.

The standard logging outputs log as string:

```
2024-11-12T15:08:11.451377-08:00 INFO "Initialized 0 sources.\n"
```

The structured logging outputs log as JSON:

```
{
  "timestamp":"2024-11-04T16:45:11.987299-08:00",
  "severity":"ERROR",
  "logging.googleapis.com/sourceLocation":{...},
  "message":"unable to parse tool file at \"tools.yaml\": \"cloud-sql-postgres1\" is not a valid kind of data source"
}
```

{{< notice tip >}}
`logging.googleapis.com/sourceLocation` shows the source code
location information associated with the log entry, if any.
{{< /notice >}}

## Telemetry

Toolbox is supports exporting metrics and traces to any OpenTelemetry compatible
exporter.

### Metrics

A metric is a measurement of a service captured at runtime. The collected data
can be used to provide important insights into the service. Toolbox provides the
following custom metrics:

| **Metric Name**                    | **Description**                                         |
|------------------------------------|---------------------------------------------------------|
| `toolbox.server.toolset.get.count` | Counts the number of toolset manifest requests served   |
| `toolbox.server.tool.get.count`    | Counts the number of tool manifest requests served      |
| `toolbox.server.tool.get.invoke`   | Counts the number of tool invocation requests served    |
| `toolbox.server.mcp.sse.count`     | Counts the number of mcp sse connection requests served |
| `toolbox.server.mcp.post.count`    | Counts the number of mcp post requests served           |

All custom metrics have the following attributes/labels:

| **Metric Attributes**      | **Description**                                           |
|----------------------------|-----------------------------------------------------------|
| `toolbox.name`             | Name of the toolset or tool, if applicable.               |
| `toolbox.operation.status` | Operation status code, for example: `success`, `failure`. |
| `toolbox.sse.sessionId`    | Session id for sse connection, if applicable.             |
| `toolbox.method`           | Method of JSON-RPC request, if applicable.                |

### Traces

A trace is a tree of spans that shows the path that a request makes through an
application.

Spans generated by Toolbox server is prefixed with `toolbox/server/`. For
example, when user run Toolbox, it will generate spans for the following, with
`toolbox/server/init` as the root span:

![traces](./telemetry_traces.png)

### Resource Attributes

All metrics and traces generated within Toolbox will be associated with a
unified [resource][resource]. The list of resource attributes included are:

| **Resource Name**                                                                         | **Description**                                                                                                                                               |
|-------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [TelemetrySDK](https://pkg.go.dev/go.opentelemetry.io/otel/sdk/resource#WithTelemetrySDK) | TelemetrySDK version info.                                                                                                                                    |
| [OS](https://pkg.go.dev/go.opentelemetry.io/otel/sdk/resource#WithOS)                     | OS attributes including OS description and OS type.                                                                                                           |
| [Container](https://pkg.go.dev/go.opentelemetry.io/otel/sdk/resource#WithContainer)       | Container attributes including container ID, if applicable.                                                                                                   |
| [Host](https://pkg.go.dev/go.opentelemetry.io/otel/sdk/resource#WithHost)                 | Host attributes including host name.                                                                                                                          |
| [SchemaURL](https://pkg.go.dev/go.opentelemetry.io/otel/sdk/resource#WithSchemaURL)       | Sets the schema URL for the configured resource.                                                                                                              |
| `service.name`                                                                            | Open telemetry service name. Defaulted to `toolbox`. User can set the service name via flag mentioned above to distinguish between different toolbox service. |
| `service.version`                                                                         | The version of Toolbox used.                                                                                                                                  |

[resource]: https://opentelemetry.io/docs/languages/go/resources/

### Exporter

An exporter is responsible for processing and exporting telemetry data. Toolbox
generates telemetry data within the OpenTelemetry Protocol (OTLP), and user can
choose to use exporters that are designed to support the OpenTelemetry
Protocol. Within Toolbox, we provide two types of exporter implementation to
choose from, either the Google Cloud Exporter that will send data directly to
the backend, or the OTLP Exporter along with a Collector that will act as a
proxy to collect and export data to the telemetry backend of user's choice.

![telemetry_flow](./telemetry_flow.png)

#### Google Cloud Exporter

The Google Cloud Exporter directly exports telemetry to Google Cloud Monitoring.
It utilizes the [GCP Metric Exporter][gcp-metric-exporter] and [GCP Trace
Exporter][gcp-trace-exporter].

[gcp-metric-exporter]:
    https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/tree/main/exporter/metric
[gcp-trace-exporter]:
    https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/tree/main/exporter/trace

{{< notice note >}}
If you're using Google Cloud Monitoring, the following APIs will need to be
enabled:

- [Cloud Logging API](https://cloud.google.com/logging/docs/api/enable-api)
- [Cloud Monitoring API](https://cloud.google.com/monitoring/api/enable-api)
- [Cloud Trace API](https://console.cloud.google.com/apis/enableflow?apiid=cloudtrace.googleapis.com)
{{< /notice >}}

#### OTLP Exporter

This implementation uses the default OTLP Exporter over HTTP for
[metrics][otlp-metric-exporter] and [traces][otlp-trace-exporter]. You can use
this exporter if you choose to export your telemetry data to a Collector.

[otlp-metric-exporter]: https://opentelemetry.io/docs/languages/go/exporters/#otlp-traces-over-http
[otlp-trace-exporter]: https://opentelemetry.io/docs/languages/go/exporters/#otlp-traces-over-http

### Collector

A collector acts as a proxy between the application and the telemetry backend.
It receives telemetry data, transforms it, and then exports data to backends
that can store it permanently. Toolbox provide an option to export telemetry
data to user's choice of backend(s) that are compatible with the Open Telemetry
Protocol (OTLP). If you would like to use a collector, please refer to this
[Export Telemetry using the Otel Collector](../../how-to/export_telemetry.md).

### Flags

The following flags are used to determine Toolbox's telemetry configuration:

| **flag**                   | **type** | **description**                                                                                                |
|----------------------------|----------|----------------------------------------------------------------------------------------------------------------|
| `--telemetry-gcp`          | bool     | Enable exporting directly to Google Cloud Monitoring. Default is `false`.                                      |
| `--telemetry-otlp`         | string   | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
| `--telemetry-service-name` | string   | Sets the value of the `service.name` resource attribute. Default is `toolbox`.                                 |

In addition to the flags noted above, you can also make additional configuration
for OpenTelemetry via the [General SDK Configuration][sdk-configuration] through
environmental variables.

[sdk-configuration]:
    https://opentelemetry.io/docs/languages/sdk-configuration/general/

**Examples:**

To enable Google Cloud Exporter:

```bash
./toolbox --telemetry-gcp
```

To enable OTLP Exporter, provide Collector endpoint:

```bash
./toolbox --telemetry-otlp="http://127.0.0.1:4553"
```

```

--------------------------------------------------------------------------------
/internal/tools/firestore/firestoreupdatedocument/firestoreupdatedocument_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package firestoreupdatedocument

import (
	"context"
	"strings"
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/sources"
	firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

func TestNewConfig(t *testing.T) {
	tests := []struct {
		name    string
		yaml    string
		want    Config
		wantErr bool
	}{
		{
			name: "valid config",
			yaml: `
name: test-update-document
kind: firestore-update-document
source: test-firestore
description: Update a document in Firestore
authRequired:
  - google-oauth
`,
			want: Config{
				Name:         "test-update-document",
				Kind:         "firestore-update-document",
				Source:       "test-firestore",
				Description:  "Update a document in Firestore",
				AuthRequired: []string{"google-oauth"},
			},
			wantErr: false,
		},
		{
			name: "minimal config",
			yaml: `
name: test-update-document
kind: firestore-update-document
source: test-firestore
description: Update a document
`,
			want: Config{
				Name:        "test-update-document",
				Kind:        "firestore-update-document",
				Source:      "test-firestore",
				Description: "Update a document",
			},
			wantErr: false,
		},
		{
			name: "invalid yaml",
			yaml: `
name: test-update-document
kind: [invalid
`,
			wantErr: true,
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			decoder := yaml.NewDecoder(strings.NewReader(tt.yaml))
			got, err := newConfig(context.Background(), "test-update-document", decoder)

			if tt.wantErr {
				if err == nil {
					t.Fatalf("expected error but got none")
				}
				return
			}

			if err != nil {
				t.Fatalf("unexpected error: %v", err)
			}

			if diff := cmp.Diff(tt.want, got); diff != "" {
				t.Fatalf("config mismatch (-want +got):\n%s", diff)
			}
		})
	}
}

func TestConfig_ToolConfigKind(t *testing.T) {
	cfg := Config{}
	got := cfg.ToolConfigKind()
	want := "firestore-update-document"
	if got != want {
		t.Fatalf("ToolConfigKind() = %v, want %v", got, want)
	}
}

func TestConfig_Initialize(t *testing.T) {
	tests := []struct {
		name    string
		config  Config
		sources map[string]sources.Source
		wantErr bool
		errMsg  string
	}{
		{
			name: "valid initialization",
			config: Config{
				Name:        "test-update-document",
				Kind:        "firestore-update-document",
				Source:      "test-firestore",
				Description: "Update a document",
			},
			sources: map[string]sources.Source{
				"test-firestore": &firestoreds.Source{},
			},
			wantErr: false,
		},
		{
			name: "source not found",
			config: Config{
				Name:        "test-update-document",
				Kind:        "firestore-update-document",
				Source:      "missing-source",
				Description: "Update a document",
			},
			sources: map[string]sources.Source{},
			wantErr: true,
			errMsg:  "no source named \"missing-source\" configured",
		},
		{
			name: "incompatible source",
			config: Config{
				Name:        "test-update-document",
				Kind:        "firestore-update-document",
				Source:      "wrong-source",
				Description: "Update a document",
			},
			sources: map[string]sources.Source{
				"wrong-source": &mockIncompatibleSource{},
			},
			wantErr: true,
			errMsg:  "invalid source for \"firestore-update-document\" tool",
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			tool, err := tt.config.Initialize(tt.sources)

			if tt.wantErr {
				if err == nil {
					t.Fatalf("expected error but got none")
				}
				if tt.errMsg != "" && !strings.Contains(err.Error(), tt.errMsg) {
					t.Fatalf("error message %q does not contain %q", err.Error(), tt.errMsg)
				}
				return
			}

			if err != nil {
				t.Fatalf("unexpected error: %v", err)
			}

			if tool == nil {
				t.Fatalf("expected tool to be non-nil")
			}

			// Verify tool properties
			actualTool := tool.(Tool)
			if actualTool.Name != tt.config.Name {
				t.Fatalf("tool.Name = %v, want %v", actualTool.Name, tt.config.Name)
			}
			if actualTool.Kind != "firestore-update-document" {
				t.Fatalf("tool.Kind = %v, want %v", actualTool.Kind, "firestore-update-document")
			}
			if diff := cmp.Diff(tt.config.AuthRequired, actualTool.AuthRequired); diff != "" {
				t.Fatalf("AuthRequired mismatch (-want +got):\n%s", diff)
			}
			if actualTool.Parameters == nil {
				t.Fatalf("expected Parameters to be non-nil")
			}
			if len(actualTool.Parameters) != 4 {
				t.Fatalf("len(Parameters) = %v, want 4", len(actualTool.Parameters))
			}
		})
	}
}

func TestTool_ParseParams(t *testing.T) {
	tool := Tool{
		Parameters: tools.Parameters{
			tools.NewStringParameter("documentPath", "Document path"),
			tools.NewMapParameter("documentData", "Document data", ""),
			tools.NewArrayParameterWithRequired("updateMask", "Update mask", false, tools.NewStringParameter("field", "Field")),
			tools.NewBooleanParameterWithDefault("returnData", false, "Return data"),
		},
	}

	tests := []struct {
		name    string
		data    map[string]any
		claims  map[string]map[string]any
		wantErr bool
	}{
		{
			name: "valid params with all fields",
			data: map[string]any{
				"documentPath": "users/user1",
				"documentData": map[string]any{
					"name": map[string]any{"stringValue": "John"},
				},
				"updateMask": []any{"name"},
				"returnData": true,
			},
			wantErr: false,
		},
		{
			name: "valid params without optional fields",
			data: map[string]any{
				"documentPath": "users/user1",
				"documentData": map[string]any{
					"name": map[string]any{"stringValue": "John"},
				},
			},
			wantErr: false,
		},
		{
			name: "missing required documentPath",
			data: map[string]any{
				"documentData": map[string]any{
					"name": map[string]any{"stringValue": "John"},
				},
			},
			wantErr: true,
		},
		{
			name: "missing required documentData",
			data: map[string]any{
				"documentPath": "users/user1",
			},
			wantErr: true,
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			params, err := tool.ParseParams(tt.data, tt.claims)

			if tt.wantErr {
				if err == nil {
					t.Fatalf("expected error but got none")
				}
				return
			}

			if err != nil {
				t.Fatalf("unexpected error: %v", err)
			}

			if params == nil {
				t.Fatalf("expected params to be non-nil")
			}
		})
	}
}

func TestTool_Manifest(t *testing.T) {
	tool := Tool{
		manifest: tools.Manifest{
			Description: "Test description",
			Parameters: []tools.ParameterManifest{
				{
					Name:        "documentPath",
					Type:        "string",
					Description: "Document path",
					Required:    true,
				},
			},
			AuthRequired: []string{"google-oauth"},
		},
	}

	manifest := tool.Manifest()
	if manifest.Description != "Test description" {
		t.Fatalf("manifest.Description = %v, want %v", manifest.Description, "Test description")
	}
	if len(manifest.Parameters) != 1 {
		t.Fatalf("len(manifest.Parameters) = %v, want 1", len(manifest.Parameters))
	}
	if diff := cmp.Diff([]string{"google-oauth"}, manifest.AuthRequired); diff != "" {
		t.Fatalf("AuthRequired mismatch (-want +got):\n%s", diff)
	}
}

func TestTool_McpManifest(t *testing.T) {
	tool := Tool{
		mcpManifest: tools.McpManifest{
			Name:        "test-update-document",
			Description: "Test description",
			InputSchema: tools.McpToolsSchema{
				Type: "object",
				Properties: map[string]tools.ParameterMcpManifest{
					"documentPath": {
						Type:        "string",
						Description: "Document path",
					},
				},
				Required: []string{"documentPath"},
			},
		},
	}

	mcpManifest := tool.McpManifest()
	if mcpManifest.Name != "test-update-document" {
		t.Fatalf("mcpManifest.Name = %v, want %v", mcpManifest.Name, "test-update-document")
	}
	if mcpManifest.Description != "Test description" {
		t.Fatalf("mcpManifest.Description = %v, want %v", mcpManifest.Description, "Test description")
	}
	if mcpManifest.InputSchema.Type == "" {
		t.Fatalf("expected InputSchema to be non-empty")
	}
}

func TestTool_Authorized(t *testing.T) {
	tests := []struct {
		name                 string
		authRequired         []string
		verifiedAuthServices []string
		want                 bool
	}{
		{
			name:                 "no auth required",
			authRequired:         nil,
			verifiedAuthServices: nil,
			want:                 true,
		},
		{
			name:                 "auth required and provided",
			authRequired:         []string{"google-oauth"},
			verifiedAuthServices: []string{"google-oauth"},
			want:                 true,
		},
		{
			name:                 "auth required but not provided",
			authRequired:         []string{"google-oauth"},
			verifiedAuthServices: []string{"api-key"},
			want:                 false,
		},
		{
			name:                 "multiple auth required, one provided",
			authRequired:         []string{"google-oauth", "api-key"},
			verifiedAuthServices: []string{"google-oauth"},
			want:                 true,
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			tool := Tool{
				AuthRequired: tt.authRequired,
			}
			got := tool.Authorized(tt.verifiedAuthServices)
			if got != tt.want {
				t.Fatalf("Authorized() = %v, want %v", got, tt.want)
			}
		})
	}
}

func TestGetFieldValue(t *testing.T) {
	tests := []struct {
		name   string
		data   map[string]interface{}
		path   string
		want   interface{}
		exists bool
	}{
		{
			name: "simple field",
			data: map[string]interface{}{
				"name": "John",
			},
			path:   "name",
			want:   "John",
			exists: true,
		},
		{
			name: "nested field",
			data: map[string]interface{}{
				"user": map[string]interface{}{
					"name": "John",
				},
			},
			path:   "user.name",
			want:   "John",
			exists: true,
		},
		{
			name: "deeply nested field",
			data: map[string]interface{}{
				"level1": map[string]interface{}{
					"level2": map[string]interface{}{
						"level3": "value",
					},
				},
			},
			path:   "level1.level2.level3",
			want:   "value",
			exists: true,
		},
		{
			name: "non-existent field",
			data: map[string]interface{}{
				"name": "John",
			},
			path:   "age",
			want:   nil,
			exists: false,
		},
		{
			name: "non-existent nested field",
			data: map[string]interface{}{
				"user": map[string]interface{}{
					"name": "John",
				},
			},
			path:   "user.age",
			want:   nil,
			exists: false,
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			got, exists := getFieldValue(tt.data, tt.path)
			if exists != tt.exists {
				t.Fatalf("exists = %v, want %v", exists, tt.exists)
			}
			if tt.exists {
				if diff := cmp.Diff(tt.want, got); diff != "" {
					t.Fatalf("value mismatch (-want +got):\n%s", diff)
				}
			}
		})
	}
}

// mockIncompatibleSource is a mock source that doesn't implement compatibleSource
type mockIncompatibleSource struct{}

func (m *mockIncompatibleSource) SourceKind() string {
	return "mock"
}

```

--------------------------------------------------------------------------------
/internal/tools/cloudsql/cloudsqlwaitforoperation/cloudsqlwaitforoperation.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqlwaitforoperation

import (
	"context"
	"encoding/json"
	"fmt"
	"regexp"
	"strings"
	"text/template"
	"time"

	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

const kind string = "cloud-sql-wait-for-operation"

var cloudSQLConnectionMessageTemplate = `Your Cloud SQL resource is ready.

To connect, please configure your environment. The method depends on how you are running the toolbox:

**If running locally via stdio:**
Update the MCP server configuration with the following environment variables:
` + "```json" + `
{
  "mcpServers": {
    "cloud-sql-{{.DBType}}": {
      "command": "./PATH/TO/toolbox",
      "args": ["--prebuilt","cloud-sql-{{.DBType}}","--stdio"],
      "env": {
          "CLOUD_SQL_{{.DBTypeUpper}}_PROJECT": "{{.Project}}",
          "CLOUD_SQL_{{.DBTypeUpper}}_REGION": "{{.Region}}",
          "CLOUD_SQL_{{.DBTypeUpper}}_INSTANCE": "{{.Instance}}",
          "CLOUD_SQL_{{.DBTypeUpper}}_DATABASE": "{{.Database}}",
          "CLOUD_SQL_{{.DBTypeUpper}}_USER": "<your-user>",
          "CLOUD_SQL_{{.DBTypeUpper}}_PASSWORD": "<your-password>"
      }
    }
  }
}
` + "```" + `

**If running remotely:**
For remote deployments, you will need to set the following environment variables in your deployment configuration:
` + "```" + `
CLOUD_SQL_{{.DBTypeUpper}}_PROJECT={{.Project}}
CLOUD_SQL_{{.DBTypeUpper}}_REGION={{.Region}}
CLOUD_SQL_{{.DBTypeUpper}}_INSTANCE={{.Instance}}
CLOUD_SQL_{{.DBTypeUpper}}_DATABASE={{.Database}}
CLOUD_SQL_{{.DBTypeUpper}}_USER=<your-user>
CLOUD_SQL_{{.DBTypeUpper}}_PASSWORD=<your-password>
` + "```" + `

Please refer to the official documentation for guidance on deploying the toolbox:
- Deploying the Toolbox: https://googleapis.github.io/genai-toolbox/how-to/deploy_toolbox/
- Deploying on GKE: https://googleapis.github.io/genai-toolbox/how-to/deploy_gke/
`

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

// Config defines the configuration for the wait-for-operation tool.
type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Source       string   `yaml:"source" validate:"required"`
	Description  string   `yaml:"description"`
	AuthRequired []string `yaml:"authRequired"`
	BaseURL      string   `yaml:"baseURL"`

	// Polling configuration
	Delay      string  `yaml:"delay"`
	MaxDelay   string  `yaml:"maxDelay"`
	Multiplier float64 `yaml:"multiplier"`
	MaxRetries int     `yaml:"maxRetries"`
}

// validate interface
var _ tools.ToolConfig = Config{}

// ToolConfigKind returns the kind of the tool.
func (cfg Config) ToolConfigKind() string {
	return kind
}

// Initialize initializes the tool from the configuration.
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	rawS, ok := srcs[cfg.Source]
	if !ok {
		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
	}

	s, ok := rawS.(*cloudsqladmin.Source)
	if !ok {
		return nil, fmt.Errorf("invalid source for %q tool: source kind must be `cloud-sql-admin`", kind)
	}

	allParameters := tools.Parameters{
		tools.NewStringParameter("project", "The project ID"),
		tools.NewStringParameter("operation", "The operation ID"),
	}
	paramManifest := allParameters.Manifest()

	description := cfg.Description
	if description == "" {
		description = "This will poll on operations API until the operation is done. For checking operation status we need projectId and operationId. Once instance is created give follow up steps on how to use the variables to bring data plane MCP server up in local and remote setup."
	}
	mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters)

	var delay time.Duration
	if cfg.Delay == "" {
		delay = 3 * time.Second
	} else {
		var err error
		delay, err = time.ParseDuration(cfg.Delay)
		if err != nil {
			return nil, fmt.Errorf("invalid value for delay: %w", err)
		}
	}

	var maxDelay time.Duration
	if cfg.MaxDelay == "" {
		maxDelay = 4 * time.Minute
	} else {
		var err error
		maxDelay, err = time.ParseDuration(cfg.MaxDelay)
		if err != nil {
			return nil, fmt.Errorf("invalid value for maxDelay: %w", err)
		}
	}

	multiplier := cfg.Multiplier
	if multiplier == 0 {
		multiplier = 2.0
	}

	maxRetries := cfg.MaxRetries
	if maxRetries == 0 {
		maxRetries = 10
	}

	return Tool{
		Name:         cfg.Name,
		Kind:         kind,
		AuthRequired: cfg.AuthRequired,
		Source:       s,
		AllParams:    allParameters,
		manifest:     tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
		mcpManifest:  mcpManifest,
		Delay:        delay,
		MaxDelay:     maxDelay,
		Multiplier:   multiplier,
		MaxRetries:   maxRetries,
	}, nil
}

// Tool represents the wait-for-operation tool.
type Tool struct {
	Name         string   `yaml:"name"`
	Kind         string   `yaml:"kind"`
	Description  string   `yaml:"description"`
	AuthRequired []string `yaml:"authRequired"`

	Source    *cloudsqladmin.Source
	AllParams tools.Parameters `yaml:"allParams"`

	// Polling configuration
	Delay      time.Duration
	MaxDelay   time.Duration
	Multiplier float64
	MaxRetries int

	manifest    tools.Manifest
	mcpManifest tools.McpManifest
}

// Invoke executes the tool's logic.
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	paramsMap := params.AsMap()

	project, ok := paramsMap["project"].(string)
	if !ok {
		return nil, fmt.Errorf("missing 'project' parameter")
	}
	operationID, ok := paramsMap["operation"].(string)
	if !ok {
		return nil, fmt.Errorf("missing 'operation' parameter")
	}

	service, err := t.Source.GetService(ctx, string(accessToken))
	if err != nil {
		return nil, err
	}

	ctx, cancel := context.WithTimeout(ctx, 30*time.Minute)
	defer cancel()

	delay := t.Delay
	maxDelay := t.MaxDelay
	multiplier := t.Multiplier
	maxRetries := t.MaxRetries
	retries := 0

	for retries < maxRetries {
		select {
		case <-ctx.Done():
			return nil, fmt.Errorf("timed out waiting for operation: %w", ctx.Err())
		default:
		}

		op, err := service.Operations.Get(project, operationID).Do()
		if err != nil {
			fmt.Printf("error getting operation: %s, retrying in %v\n", err, delay)
		} else {
			if op.Status == "DONE" {
				if op.Error != nil {
					var errorBytes []byte
					errorBytes, err = json.Marshal(op.Error)
					if err != nil {
						return nil, fmt.Errorf("operation finished with error but could not marshal error object: %w", err)
					}
					return nil, fmt.Errorf("operation finished with error: %s", string(errorBytes))
				}

				var opBytes []byte
				opBytes, err = op.MarshalJSON()
				if err != nil {
					return nil, fmt.Errorf("could not marshal operation: %w", err)
				}

				var data map[string]any
				if err := json.Unmarshal(opBytes, &data); err != nil {
					return nil, fmt.Errorf("could not unmarshal operation: %w", err)
				}

				if msg, ok := t.generateCloudSQLConnectionMessage(data); ok {
					return msg, nil
				}
				return string(opBytes), nil
			}
			fmt.Printf("Operation not complete, retrying in %v\n", delay)
		}

		time.Sleep(delay)
		delay = time.Duration(float64(delay) * multiplier)
		if delay > maxDelay {
			delay = maxDelay
		}
		retries++
	}
	return nil, fmt.Errorf("exceeded max retries waiting for operation")
}

// ParseParams parses the parameters for the tool.
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.AllParams, data, claims)
}

// Manifest returns the tool's manifest.
func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

// McpManifest returns the tool's MCP manifest.
func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

// Authorized checks if the tool is authorized.
func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return true
}

func (t Tool) RequiresClientAuthorization() bool {
	return t.Source.UseClientAuthorization()
}

func (t Tool) generateCloudSQLConnectionMessage(opResponse map[string]any) (string, bool) {
	operationType, ok := opResponse["operationType"].(string)
	if !ok || operationType != "CREATE_DATABASE" {
		return "", false
	}

	targetLink, ok := opResponse["targetLink"].(string)
	if !ok {
		return "", false
	}

	r := regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
	matches := r.FindStringSubmatch(targetLink)
	if len(matches) < 4 {
		return "", false
	}
	project := matches[1]
	instance := matches[2]
	database := matches[3]

	instanceData, err := t.fetchInstanceData(context.Background(), project, instance)
	if err != nil {
		fmt.Printf("error fetching instance data: %v\n", err)
		return "", false
	}

	region, ok := instanceData["region"].(string)
	if !ok {
		return "", false
	}

	databaseVersion, ok := instanceData["databaseVersion"].(string)
	if !ok {
		return "", false
	}

	var dbType string
	if strings.Contains(databaseVersion, "POSTGRES") {
		dbType = "postgres"
	} else if strings.Contains(databaseVersion, "MYSQL") {
		dbType = "mysql"
	} else if strings.Contains(databaseVersion, "SQLSERVER") {
		dbType = "mssql"
	} else {
		return "", false
	}

	tmpl, err := template.New("cloud-sql-connection").Parse(cloudSQLConnectionMessageTemplate)
	if err != nil {
		return fmt.Sprintf("template parsing error: %v", err), false
	}

	data := struct {
		Project     string
		Region      string
		Instance    string
		DBType      string
		DBTypeUpper string
		Database    string
	}{
		Project:     project,
		Region:      region,
		Instance:    instance,
		DBType:      dbType,
		DBTypeUpper: strings.ToUpper(dbType),
		Database:    database,
	}

	var b strings.Builder
	if err := tmpl.Execute(&b, data); err != nil {
		return fmt.Sprintf("template execution error: %v", err), false
	}

	return b.String(), true
}

func (t Tool) fetchInstanceData(ctx context.Context, project, instance string) (map[string]any, error) {
	service, err := t.Source.GetService(ctx, "")
	if err != nil {
		return nil, err
	}

	resp, err := service.Instances.Get(project, instance).Do()
	if err != nil {
		return nil, fmt.Errorf("error getting instance: %w", err)
	}

	var data map[string]any
	var b []byte
	b, err = resp.MarshalJSON()
	if err != nil {
		return nil, fmt.Errorf("error marshalling response: %w", err)
	}
	if err := json.Unmarshal(b, &data); err != nil {
		return nil, fmt.Errorf("error unmarshalling response body: %w", err)
	}
	return data, nil
}

```

--------------------------------------------------------------------------------
/internal/tools/looker/lookercommon/lookercommon.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//	http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lookercommon

import (
	"context"
	"crypto/tls"
	"fmt"
	"net/http"
	"net/url"
	"strings"

	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/util"
	rtl "github.com/looker-open-source/sdk-codegen/go/rtl"
	v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
	"github.com/thlib/go-timezone-local/tzlocal"
)

// Make types for RoundTripper
type transportWithAuthHeader struct {
	Base      http.RoundTripper
	AuthToken tools.AccessToken
}

func (t *transportWithAuthHeader) RoundTrip(req *http.Request) (*http.Response, error) {
	req.Header.Set("x-looker-appid", "go-sdk")
	req.Header.Set("Authorization", string(t.AuthToken))
	return t.Base.RoundTrip(req)
}

func GetLookerSDK(useClientOAuth bool, config *rtl.ApiSettings, client *v4.LookerSDK, accessToken tools.AccessToken) (*v4.LookerSDK, error) {

	if useClientOAuth {
		if accessToken == "" {
			return nil, fmt.Errorf("no access token supplied with request")
		}
		// Configure base transport with TLS
		transport := &http.Transport{
			TLSClientConfig: &tls.Config{
				InsecureSkipVerify: !config.VerifySsl,
			},
		}

		// Build transport for end user token
		newTransport := &transportWithAuthHeader{
			Base:      transport,
			AuthToken: accessToken,
		}

		// return SDK with new Transport
		return v4.NewLookerSDK(&rtl.AuthSession{
			Config: *config,
			Client: http.Client{Transport: newTransport},
		}), nil
	}

	if client == nil {
		return nil, fmt.Errorf("client id or client secret not valid")
	}
	return client, nil
}

const (
	DimensionsFields = "fields(dimensions(name,type,label,label_short,description,synonyms,tags,hidden,suggestable,suggestions,suggest_dimension,suggest_explore))"
	FiltersFields    = "fields(filters(name,type,label,label_short,description,synonyms,tags,hidden,suggestable,suggestions,suggest_dimension,suggest_explore))"
	MeasuresFields   = "fields(measures(name,type,label,label_short,description,synonyms,tags,hidden,suggestable,suggestions,suggest_dimension,suggest_explore))"
	ParametersFields = "fields(parameters(name,type,label,label_short,description,synonyms,tags,hidden,suggestable,suggestions,suggest_dimension,suggest_explore))"
)

// ExtractLookerFieldProperties extracts common properties from Looker field objects.
func ExtractLookerFieldProperties(ctx context.Context, fields *[]v4.LookmlModelExploreField, showHiddenFields bool) ([]any, error) {
	data := make([]any, 0)

	// Handle nil fields pointer
	if fields == nil {
		return data, nil
	}

	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		// This should ideally not happen if the context is properly set up.
		// Log and return an empty map or handle as appropriate for your error strategy.
		return data, fmt.Errorf("error getting logger from context in ExtractLookerFieldProperties: %v", err)
	}

	for _, v := range *fields {
		logger.DebugContext(ctx, "Got response element of %v\n", v)
		if v.Name != nil && strings.HasSuffix(*v.Name, "_raw") {
			continue
		}
		if !showHiddenFields && v.Hidden != nil && *v.Hidden {
			continue
		}
		vMap := make(map[string]any)
		if v.Name != nil {
			vMap["name"] = *v.Name
		}
		if v.Type != nil {
			vMap["type"] = *v.Type
		}
		if v.Label != nil {
			vMap["label"] = *v.Label
		}
		if v.LabelShort != nil {
			vMap["label_short"] = *v.LabelShort
		}
		if v.Description != nil {
			vMap["description"] = *v.Description
		}
		if v.Tags != nil && len(*v.Tags) > 0 {
			vMap["tags"] = *v.Tags
		}
		if v.Synonyms != nil && len(*v.Synonyms) > 0 {
			vMap["synonyms"] = *v.Synonyms
		}
		if v.Suggestable != nil && *v.Suggestable {
			if v.Suggestions != nil && len(*v.Suggestions) > 0 {
				vMap["suggestions"] = *v.Suggestions
			}
			if v.SuggestExplore != nil && v.SuggestDimension != nil {
				vMap["suggest_explore"] = *v.SuggestExplore
				vMap["suggest_dimension"] = *v.SuggestDimension
			}
		}
		logger.DebugContext(ctx, "Converted to %v\n", vMap)
		data = append(data, vMap)
	}

	return data, nil
}

// CheckLookerExploreFields checks if the Fields object in LookmlModelExplore is nil before accessing its sub-fields.
func CheckLookerExploreFields(resp *v4.LookmlModelExplore) error {
	if resp == nil || resp.Fields == nil {
		return fmt.Errorf("looker API response or its fields object is nil")
	}
	return nil
}

func GetFieldParameters() tools.Parameters {
	modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
	exploreParameter := tools.NewStringParameter("explore", "The explore containing the fields.")
	return tools.Parameters{modelParameter, exploreParameter}
}

func GetQueryParameters() tools.Parameters {
	modelParameter := tools.NewStringParameter("model", "The model containing the explore.")
	exploreParameter := tools.NewStringParameter("explore", "The explore to be queried.")
	fieldsParameter := tools.NewArrayParameter("fields",
		"The fields to be retrieved.",
		tools.NewStringParameter("field", "A field to be returned in the query"),
	)
	filtersParameter := tools.NewMapParameterWithDefault("filters",
		map[string]any{},
		"The filters for the query",
		"",
	)
	pivotsParameter := tools.NewArrayParameterWithDefault("pivots",
		[]any{},
		"The query pivots (must be included in fields as well).",
		tools.NewStringParameter("pivot_field", "A field to be used as a pivot in the query"),
	)
	sortsParameter := tools.NewArrayParameterWithDefault("sorts",
		[]any{},
		"The sorts like \"field.id desc 0\".",
		tools.NewStringParameter("sort_field", "A field to be used as a sort in the query"),
	)
	limitParameter := tools.NewIntParameterWithDefault("limit", 500, "The row limit.")
	tzParameter := tools.NewStringParameterWithRequired("tz", "The query timezone.", false)

	return tools.Parameters{
		modelParameter,
		exploreParameter,
		fieldsParameter,
		filtersParameter,
		pivotsParameter,
		sortsParameter,
		limitParameter,
		tzParameter,
	}
}

func ProcessFieldArgs(ctx context.Context, params tools.ParamValues) (*string, *string, error) {
	mapParams := params.AsMap()
	model, ok := mapParams["model"].(string)
	if !ok {
		return nil, nil, fmt.Errorf("'model' must be a string, got %T", mapParams["model"])
	}
	explore, ok := mapParams["explore"].(string)
	if !ok {
		return nil, nil, fmt.Errorf("'explore' must be a string, got %T", mapParams["explore"])
	}
	return &model, &explore, nil
}

func ProcessQueryArgs(ctx context.Context, params tools.ParamValues) (*v4.WriteQuery, error) {
	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
	}

	logger.DebugContext(ctx, "params = ", params)
	paramsMap := params.AsMap()

	f, err := tools.ConvertAnySliceToTyped(paramsMap["fields"].([]any), "string")
	if err != nil {
		return nil, fmt.Errorf("can't convert fields to array of strings: %s", err)
	}
	fields := f.([]string)
	filters := paramsMap["filters"].(map[string]any)
	// Sometimes filters come as "'field.id'": "expression" so strip extra ''
	for k, v := range filters {
		if len(k) > 0 && k[0] == '\'' && k[len(k)-1] == '\'' {
			delete(filters, k)
			filters[k[1:len(k)-1]] = v
		}
	}
	p, err := tools.ConvertAnySliceToTyped(paramsMap["pivots"].([]any), "string")
	if err != nil {
		return nil, fmt.Errorf("can't convert pivots to array of strings: %s", err)
	}
	pivots := p.([]string)
	s, err := tools.ConvertAnySliceToTyped(paramsMap["sorts"].([]any), "string")
	if err != nil {
		return nil, fmt.Errorf("can't convert sorts to array of strings: %s", err)
	}
	sorts := s.([]string)
	limit := fmt.Sprintf("%v", paramsMap["limit"].(int))

	var tz string
	if paramsMap["tz"] != nil {
		tz = paramsMap["tz"].(string)
	} else {
		tzname, err := tzlocal.RuntimeTZ()
		if err != nil {
			logger.ErrorContext(ctx, fmt.Sprintf("Error getting local timezone: %s", err))
			tzname = "Etc/UTC"
		}
		tz = tzname
	}

	wq := v4.WriteQuery{
		Model:         paramsMap["model"].(string),
		View:          paramsMap["explore"].(string),
		Fields:        &fields,
		Pivots:        &pivots,
		Filters:       &filters,
		Sorts:         &sorts,
		QueryTimezone: &tz,
		Limit:         &limit,
	}
	return &wq, nil
}

type QueryApiClientContext struct {
	Name            string            `json:"name"`
	Attributes      map[string]string `json:"attributes,omitempty"`
	ExtraAttributes map[string]string `json:"extra_attributes,omitempty"`
}

type RenderOptions struct {
	Format string `json:"format"`
}

type RequestRunInlineQuery2 struct {
	Query             v4.WriteQuery         `json:"query"`
	RenderOpts        RenderOptions         `json:"render_options"`
	QueryApiClientCtx QueryApiClientContext `json:"query_api_client_context"`
}

func RunInlineQuery2(l *v4.LookerSDK, request RequestRunInlineQuery2, options *rtl.ApiSettings) (string, error) {
	var result string
	err := l.AuthSession.Do(&result, "POST", "/4.0", "/queries/run_inline", nil, request, options)
	return result, err
}

func RunInlineQuery(ctx context.Context, sdk *v4.LookerSDK, wq *v4.WriteQuery, format string, options *rtl.ApiSettings) (string, error) {
	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		return "", fmt.Errorf("unable to get logger from ctx: %s", err)
	}
	req := v4.RequestRunInlineQuery{
		Body:         *wq,
		ResultFormat: format,
	}
	req2 := RequestRunInlineQuery2{
		Query: *wq,
		RenderOpts: RenderOptions{
			Format: format,
		},
		QueryApiClientCtx: QueryApiClientContext{
			Name: "MCP Toolbox",
		},
	}
	resp, err := RunInlineQuery2(sdk, req2, options)
	if err != nil {
		logger.DebugContext(ctx, "error querying with new endpoint, trying again with original", err)
		resp, err = sdk.RunInlineQuery(req, options)
	}
	return resp, err
}

func GetProjectFileContent(l *v4.LookerSDK, projectId string, filePath string, options *rtl.ApiSettings) (string, error) {
	var result string
	path := fmt.Sprintf("/projects/%s/file/content", url.PathEscape(projectId))
	query := map[string]any{
		"file_path": filePath,
	}
	err := l.AuthSession.Do(&result, "GET", "/4.0", path, query, nil, options)
	return result, err
}

func DeleteProjectFile(l *v4.LookerSDK, projectId string, filePath string, options *rtl.ApiSettings) error {
	path := fmt.Sprintf("/projects/%s/files", url.PathEscape(projectId))
	query := map[string]any{
		"file_path": filePath,
	}
	err := l.AuthSession.Do(nil, "DELETE", "/4.0", path, query, nil, options)
	return err
}

type FileContent struct {
	Path    string `json:"path"`
	Content string `json:"content"`
}

func CreateProjectFile(l *v4.LookerSDK, projectId string, fileContent FileContent, options *rtl.ApiSettings) error {
	path := fmt.Sprintf("/projects/%s/files", url.PathEscape(projectId))
	err := l.AuthSession.Do(nil, "POST", "/4.0", path, nil, fileContent, options)
	return err
}

func UpdateProjectFile(l *v4.LookerSDK, projectId string, fileContent FileContent, options *rtl.ApiSettings) error {
	path := fmt.Sprintf("/projects/%s/files", url.PathEscape(projectId))
	err := l.AuthSession.Do(nil, "PUT", "/4.0", path, nil, fileContent, options)
	return err
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/bigquery.md:
--------------------------------------------------------------------------------

```markdown
---
title: "BigQuery"
type: docs
weight: 1
description: >
  BigQuery is Google Cloud's fully managed, petabyte-scale, and cost-effective
  analytics data warehouse that lets you run analytics over vast amounts of 
  data in near real time. With BigQuery, there's no infrastructure to set 
  up or manage, letting you focus on finding meaningful insights using 
  GoogleSQL and taking advantage of flexible pricing models across on-demand 
  and flat-rate options.
---

# BigQuery Source

[BigQuery][bigquery-docs] is Google Cloud's fully managed, petabyte-scale,
and cost-effective analytics data warehouse that lets you run analytics
over vast amounts of data in near real time. With BigQuery, there's no
infrastructure to set up or manage, letting you focus on finding meaningful
insights using GoogleSQL and taking advantage of flexible pricing models
across on-demand and flat-rate options.

If you are new to BigQuery, you can try to
[load and query data with the bq tool][bigquery-quickstart-cli].

BigQuery uses [GoogleSQL][bigquery-googlesql] for querying data. GoogleSQL
is an ANSI-compliant structured query language (SQL) that is also implemented
for other Google Cloud services. SQL queries are handled by cluster nodes
in the same way as NoSQL data requests. Therefore, the same best practices
apply when creating SQL queries to run against your BigQuery data, such as
avoiding full table scans or complex filters.

[bigquery-docs]: https://cloud.google.com/bigquery/docs
[bigquery-quickstart-cli]:
    https://cloud.google.com/bigquery/docs/quickstarts/quickstart-command-line
[bigquery-googlesql]:
    https://cloud.google.com/bigquery/docs/reference/standard-sql/

## Available Tools

- [`bigquery-analyze-contribution`](../tools/bigquery/bigquery-analyze-contribution.md)
  Performs contribution analysis, also called key driver analysis in BigQuery.

- [`bigquery-conversational-analytics`](../tools/bigquery/bigquery-conversational-analytics.md)
  Allows conversational interaction with a BigQuery source.

- [`bigquery-execute-sql`](../tools/bigquery/bigquery-execute-sql.md)  
  Execute structured queries using parameters.

- [`bigquery-forecast`](../tools/bigquery/bigquery-forecast.md)
  Forecasts time series data in BigQuery.

- [`bigquery-get-dataset-info`](../tools/bigquery/bigquery-get-dataset-info.md)  
  Retrieve metadata for a specific dataset.

- [`bigquery-get-table-info`](../tools/bigquery/bigquery-get-table-info.md)  
  Retrieve metadata for a specific table.

- [`bigquery-list-dataset-ids`](../tools/bigquery/bigquery-list-dataset-ids.md)  
  List available dataset IDs.

- [`bigquery-list-table-ids`](../tools/bigquery/bigquery-list-table-ids.md)  
  List tables in a given dataset.

- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)  
  Run SQL queries directly against BigQuery datasets.

- [`bigquery-search-catalog`](../tools/bigquery/bigquery-search-catalog.md)
  List all entries in Dataplex Catalog (e.g. tables, views, models) that matches
  given user query.

### Pre-built Configurations

- [BigQuery using
  MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
  Connect your IDE to BigQuery using Toolbox.

## Requirements

### IAM Permissions

BigQuery uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to BigQuery resources like projects, datasets, and tables.

### Authentication via Application Default Credentials (ADC)

By **default**, Toolbox will use your [Application Default Credentials
(ADC)][adc] to authorize and authenticate when interacting with
[BigQuery][bigquery-docs].

When using this method, you need to ensure the IAM identity associated with your
ADC (such as a service account) has the correct permissions for the queries you
intend to run. Common roles include `roles/bigquery.user` (which includes
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
Follow this [guide][set-adc] to set up your ADC.

### Authentication via User's OAuth Access Token

If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
OAuth access token for authentication. This token is parsed from the
`Authorization` header passed in with the tool invocation request. This method
allows Toolbox to make queries to [BigQuery][bigquery-docs] on behalf of the
client or the end-user.

When using this on-behalf-of authentication, you must ensure that the
identity used has been granted the correct IAM permissions.

[iam-overview]: <https://cloud.google.com/bigquery/docs/access-control>
[adc]: <https://cloud.google.com/docs/authentication#adc>
[set-adc]: <https://cloud.google.com/docs/authentication/provide-credentials-adc>

## Example

Initialize a BigQuery source that uses ADC:

```yaml
sources:
  my-bigquery-source:
    kind: "bigquery"
    project: "my-project-id"
    # location: "US" # Optional: Specifies the location for query jobs.
    # writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
    # allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
    #   - "my_dataset_1"
    #   - "other_project.my_dataset_2"
    # impersonateServiceAccount: "[email protected]" # Optional: Service account to impersonate
```

Initialize a BigQuery source that uses the client's access token:

```yaml
sources:
  my-bigquery-client-auth-source:
    kind: "bigquery"
    project: "my-project-id"
    useClientOAuth: true
    # location: "US" # Optional: Specifies the location for query jobs.
    # writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
    # allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
    #   - "my_dataset_1"
    #   - "other_project.my_dataset_2"
    # impersonateServiceAccount: "[email protected]" # Optional: Service account to impersonate
```

## Reference

| **field**                 | **type** | **required** | **description**                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                     |
|---------------------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind                      |  string  |     true     | Must be "bigquery".                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 |
| project                   |  string  |     true     | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources.                                                                                                                                                                                                                                                                                                                                                                                                                |
| location                  |  string  |    false     | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations)                                                                                                                                                                                                    |
| writeMode                 |  string  |    false     | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
| allowedDatasets           | []string |    false     | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
| useClientOAuth            |   bool   |    false     | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`.                                                                                                                                                                                                                                                                                                                                                |
| impersonateServiceAccount |  string  |    false     | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation)                                                                                                                                                                                                                                |

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/firestore/firestore-update-document.md:
--------------------------------------------------------------------------------

```markdown
---
title: "firestore-update-document"
type: docs
weight: 1
description: >
  A "firestore-update-document" tool updates an existing document in Firestore.
aliases:
- /resources/tools/firestore-update-document
---
## Description

The `firestore-update-document` tool allows you to update existing documents in
Firestore. It supports all Firestore data types using Firestore's native JSON
format. The tool can perform both full document updates (replacing all fields)
or selective field updates using an update mask. When using an update mask,
fields referenced in the mask but not present in the document data will be
deleted from the document, following Firestore's native behavior.

## Parameters

| Parameter      | Type    | Required | Description                                                                                                                                                                                                                                            |
|----------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `documentPath` | string  | Yes      | The path of the document which needs to be updated                                                                                                                                                                                                     |
| `documentData` | map     | Yes      | The data to update in the document. Must use [Firestore's native JSON format](https://cloud.google.com/firestore/docs/reference/rest/Shared.Types/ArrayValue#Value) with typed values                                                                  |
| `updateMask`   | array   | No       | The selective fields to update. If not provided, all fields in documentData will be updated. When provided, only the specified fields will be updated. Fields referenced in the mask but not present in documentData will be deleted from the document |
| `returnData`   | boolean | No       | If set to true, the output will include the data of the updated document. Defaults to false to help avoid overloading the context                                                                                                                      |

## Output

The tool returns a map containing:

| Field          | Type   | Description                                                                                 |
|----------------|--------|---------------------------------------------------------------------------------------------|
| `documentPath` | string | The full path of the updated document                                                       |
| `updateTime`   | string | The timestamp when the document was updated                                                 |
| `documentData` | map    | The current data of the document after the update (only included when `returnData` is true) |

## Data Type Format

The tool requires Firestore's native JSON format for document data. Each field
must be wrapped with its type indicator:

### Basic Types

- **String**: `{"stringValue": "your string"}`
- **Integer**: `{"integerValue": "123"}` or `{"integerValue": 123}`
- **Double**: `{"doubleValue": 123.45}`
- **Boolean**: `{"booleanValue": true}`
- **Null**: `{"nullValue": null}`
- **Bytes**: `{"bytesValue": "base64EncodedString"}`
- **Timestamp**: `{"timestampValue": "2025-01-07T10:00:00Z"}` (RFC3339 format)

### Complex Types

- **GeoPoint**: `{"geoPointValue": {"latitude": 34.052235, "longitude": -118.243683}}`
- **Array**: `{"arrayValue": {"values": [{"stringValue": "item1"}, {"integerValue": "2"}]}}`
- **Map**: `{"mapValue": {"fields": {"key1": {"stringValue": "value1"}, "key2": {"booleanValue": true}}}}`
- **Reference**: `{"referenceValue": "collection/document"}`

## Update Modes

### Full Document Update (Merge All)

When `updateMask` is not provided, the tool performs a merge operation that
updates all fields specified in `documentData` while preserving other existing
fields in the document.

### Selective Field Update

When `updateMask` is provided, only the fields listed in the mask are updated.
This allows for precise control over which fields are modified, added, or
deleted. To delete a field, include it in the `updateMask` but omit it from
`documentData`.

## Reference

| **field**   |     **type**   | **required** | **description**                                          |
|-------------|:--------------:|:------------:|----------------------------------------------------------|
| kind        |     string     |     true     | Must be "firestore-update-document".                     |
| source      |     string     |     true     | Name of the Firestore source to update documents in.     |
| description |     string     |     true     | Description of the tool that is passed to the LLM.       |

## Examples

### Basic Document Update (Full Merge)

```yaml
tools:
  update-user-doc:
    kind: firestore-update-document
    source: my-firestore
    description: Update a user document
```
Usage:
```json
{
  "documentPath": "users/user123",
  "documentData": {
    "name": {
      "stringValue": "Jane Doe"
    },
    "lastUpdated": {
      "timestampValue": "2025-01-15T10:30:00Z"
    },
    "status": {
      "stringValue": "active"
    },
    "score": {
      "integerValue": "150"
    }
  }
}
```

### Selective Field Update with Update Mask

```json
{
  "documentPath": "users/user123",
  "documentData": {
    "email": {
      "stringValue": "[email protected]"
    },
    "profile": {
      "mapValue": {
        "fields": {
          "bio": {
            "stringValue": "Updated bio text"
          },
          "avatar": {
            "stringValue": "https://example.com/new-avatar.jpg"
          }
        }
      }
    }
  },
  "updateMask": ["email", "profile.bio", "profile.avatar"]
}
```

### Update with Field Deletion

To delete fields, include them in the `updateMask` but omit them from `documentData`:

```json
{
  "documentPath": "users/user123",
  "documentData": {
    "name": {
      "stringValue": "John Smith"
    }
  },
  "updateMask": ["name", "temporaryField", "obsoleteData"],
  "returnData": true
}
```

In this example:

- `name` will be updated to "John Smith"
- `temporaryField` and `obsoleteData` will be deleted from the document (they are in the mask but not in the data)

### Complex Update with Nested Data

```json
{
  "documentPath": "companies/company456",
  "documentData": {
    "metadata": {
      "mapValue": {
        "fields": {
          "lastModified": {
            "timestampValue": "2025-01-15T14:30:00Z"
          },
          "modifiedBy": {
            "stringValue": "[email protected]"
          }
        }
      }
    },
    "locations": {
      "arrayValue": {
        "values": [
          {
            "mapValue": {
              "fields": {
                "city": {
                  "stringValue": "San Francisco"
                },
                "coordinates": {
                  "geoPointValue": {
                    "latitude": 37.7749,
                    "longitude": -122.4194
                  }
                }
              }
            }
          },
          {
            "mapValue": {
              "fields": {
                "city": {
                  "stringValue": "New York"
                },
                "coordinates": {
                  "geoPointValue": {
                    "latitude": 40.7128,
                    "longitude": -74.0060
                  }
                }
              }
            }
          }
        ]
      }
    },
    "revenue": {
      "doubleValue": 5678901.23
    }
  },
  "updateMask": ["metadata", "locations", "revenue"]
}
```

### Update with All Data Types

```json
{
  "documentPath": "test-documents/doc789",
  "documentData": {
    "stringField": {
      "stringValue": "Updated string"
    },
    "integerField": {
      "integerValue": "999"
    },
    "doubleField": {
      "doubleValue": 2.71828
    },
    "booleanField": {
      "booleanValue": false
    },
    "nullField": {
      "nullValue": null
    },
    "timestampField": {
      "timestampValue": "2025-01-15T16:45:00Z"
    },
    "geoPointField": {
      "geoPointValue": {
        "latitude": 51.5074,
        "longitude": -0.1278
      }
    },
    "bytesField": {
      "bytesValue": "VXBkYXRlZCBkYXRh"
    },
    "arrayField": {
      "arrayValue": {
        "values": [
          {
            "stringValue": "updated1"
          },
          {
            "integerValue": "200"
          },
          {
            "booleanValue": true
          }
        ]
      }
    },
    "mapField": {
      "mapValue": {
        "fields": {
          "nestedString": {
            "stringValue": "updated nested value"
          },
          "nestedNumber": {
            "doubleValue": 88.88
          }
        }
      }
    },
    "referenceField": {
      "referenceValue": "users/updatedUser"
    }
  },
  "returnData": true
}
```

## Authentication

The tool can be configured to require authentication:

```yaml
tools:
  secure-update-doc:
    kind: firestore-update-document
    source: prod-firestore
    description: Update documents with authentication required
    authRequired:
      - google-oauth
      - api-key
```

## Error Handling

Common errors include:

- Document not found (when using update with a non-existent document)
- Invalid document path
- Missing or invalid document data
- Permission denied (if Firestore security rules block the operation)
- Invalid data type conversions

## Best Practices

1. **Use update masks for precision**: When you only need to update specific fields, use the `updateMask` parameter to avoid unintended changes
2. **Always use typed values**: Every field must be wrapped with its appropriate type indicator (e.g., `{"stringValue": "text"}`)
3. **Integer values can be strings**: The tool accepts integer values as strings (e.g., `{"integerValue": "1500"}`)
4. **Use returnData sparingly**: Only set to true when you need to verify the exact data after the update
5. **Validate data before sending**: Ensure your data matches Firestore's native JSON format
6. **Handle timestamps properly**: Use RFC3339 format for timestamp strings
7. **Base64 encode binary data**: Binary data must be base64 encoded in the `bytesValue` field
8. **Consider security rules**: Ensure your Firestore security rules allow document updates
9. **Delete fields using update mask**: To delete fields, include them in the `updateMask` but omit them from `documentData`
10. **Test with non-production data first**: Always test your updates on non-critical documents first

## Differences from Add Documents

- **Purpose**: Updates existing documents vs. creating new ones
- **Document must exist**: For standard updates (though not using updateMask will create if missing with given document id)
- **Update mask support**: Allows selective field updates
- **Field deletion**: Supports removing specific fields by including them in the mask but not in the data
- **Returns updateTime**: Instead of createTime

## Related Tools

- [`firestore-add-documents`](firestore-add-documents.md) - Add new documents to Firestore
- [`firestore-get-documents`](firestore-get-documents.md) - Retrieve documents by their paths
- [`firestore-query-collection`](firestore-query-collection.md) - Query documents in a collection
- [`firestore-delete-documents`](firestore-delete-documents.md) - Delete documents from Firestore

```

--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerycommon/table_name_parser.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package bigquerycommon

import (
	"fmt"
	"strings"
	"unicode"
)

// parserState defines the state of the SQL parser's state machine.
type parserState int

const (
	stateNormal parserState = iota
	// String states
	stateInSingleQuoteString
	stateInDoubleQuoteString
	stateInTripleSingleQuoteString
	stateInTripleDoubleQuoteString
	stateInRawSingleQuoteString
	stateInRawDoubleQuoteString
	stateInRawTripleSingleQuoteString
	stateInRawTripleDoubleQuoteString
	// Comment states
	stateInSingleLineCommentDash
	stateInSingleLineCommentHash
	stateInMultiLineComment
)

// SQL statement verbs
const (
	verbCreate = "create"
	verbAlter  = "alter"
	verbDrop   = "drop"
	verbSelect = "select"
	verbInsert = "insert"
	verbUpdate = "update"
	verbDelete = "delete"
	verbMerge  = "merge"
)

var tableFollowsKeywords = map[string]bool{
	"from":   true,
	"join":   true,
	"update": true,
	"into":   true, // INSERT INTO, MERGE INTO
	"table":  true, // CREATE TABLE, ALTER TABLE
	"using":  true, // MERGE ... USING
	"insert": true, // INSERT my_table
	"merge":  true, // MERGE my_table
}

var tableContextExitKeywords = map[string]bool{
	"where":  true,
	"group":  true, // GROUP BY
	"having": true,
	"order":  true, // ORDER BY
	"limit":  true,
	"window": true,
	"on":     true, // JOIN ... ON
	"set":    true, // UPDATE ... SET
	"when":   true, // MERGE ... WHEN
}

// TableParser is the main entry point for parsing a SQL string to find all referenced table IDs.
// It handles multi-statement SQL, comments, and recursive parsing of EXECUTE IMMEDIATE statements.
func TableParser(sql, defaultProjectID string) ([]string, error) {
	tableIDSet := make(map[string]struct{})
	visitedSQLs := make(map[string]struct{})
	if _, err := parseSQL(sql, defaultProjectID, tableIDSet, visitedSQLs, false); err != nil {
		return nil, err
	}

	tableIDs := make([]string, 0, len(tableIDSet))
	for id := range tableIDSet {
		tableIDs = append(tableIDs, id)
	}
	return tableIDs, nil
}

// parseSQL is the core recursive function that processes SQL strings.
// It uses a state machine to find table names and recursively parse EXECUTE IMMEDIATE.
func parseSQL(sql, defaultProjectID string, tableIDSet map[string]struct{}, visitedSQLs map[string]struct{}, inSubquery bool) (int, error) {
	// Prevent infinite recursion.
	if _, ok := visitedSQLs[sql]; ok {
		return len(sql), nil
	}
	visitedSQLs[sql] = struct{}{}

	state := stateNormal
	expectingTable := false
	var lastTableKeyword, lastToken, statementVerb string
	runes := []rune(sql)

	for i := 0; i < len(runes); {
		char := runes[i]
		remaining := sql[i:]

		switch state {
		case stateNormal:
			if strings.HasPrefix(remaining, "--") {
				state = stateInSingleLineCommentDash
				i += 2
				continue
			}
			if strings.HasPrefix(remaining, "#") {
				state = stateInSingleLineCommentHash
				i++
				continue
			}
			if strings.HasPrefix(remaining, "/*") {
				state = stateInMultiLineComment
				i += 2
				continue
			}
			if char == '(' {
				if expectingTable {
					// The subquery starts after '('.
					consumed, err := parseSQL(remaining[1:], defaultProjectID, tableIDSet, visitedSQLs, true)
					if err != nil {
						return 0, err
					}
					// Advance i by the length of the subquery + the opening parenthesis.
					// The recursive call returns what it consumed, including the closing parenthesis.
					i += consumed + 1
					// For most keywords, we expect only one table. `from` can have multiple "tables" (subqueries).
					if lastTableKeyword != "from" {
						expectingTable = false
					}
					continue
				}
			}
			if char == ')' {
				if inSubquery {
					return i + 1, nil
				}
			}

			if char == ';' {
				statementVerb = ""
				lastToken = ""
				i++
				continue

			}

			// Raw strings must be checked before regular strings.
			if strings.HasPrefix(remaining, "r'''") || strings.HasPrefix(remaining, "R'''") {
				state = stateInRawTripleSingleQuoteString
				i += 4
				continue
			}
			if strings.HasPrefix(remaining, `r"""`) || strings.HasPrefix(remaining, `R"""`) {
				state = stateInRawTripleDoubleQuoteString
				i += 4
				continue
			}
			if strings.HasPrefix(remaining, "r'") || strings.HasPrefix(remaining, "R'") {
				state = stateInRawSingleQuoteString
				i += 2
				continue
			}
			if strings.HasPrefix(remaining, `r"`) || strings.HasPrefix(remaining, `R"`) {
				state = stateInRawDoubleQuoteString
				i += 2
				continue
			}
			if strings.HasPrefix(remaining, "'''") {
				state = stateInTripleSingleQuoteString
				i += 3
				continue
			}
			if strings.HasPrefix(remaining, `"""`) {
				state = stateInTripleDoubleQuoteString
				i += 3
				continue
			}
			if char == '\'' {
				state = stateInSingleQuoteString
				i++
				continue
			}
			if char == '"' {
				state = stateInDoubleQuoteString
				i++
				continue
			}

			if unicode.IsLetter(char) || char == '`' {
				parts, consumed, err := parseIdentifierSequence(remaining)
				if err != nil {
					return 0, err
				}
				if consumed == 0 {
					i++
					continue
				}

				if len(parts) == 1 {
					keyword := strings.ToLower(parts[0])
					switch keyword {
					case "call":
						return 0, fmt.Errorf("CALL is not allowed when dataset restrictions are in place, as the called procedure's contents cannot be safely analyzed")
					case "immediate":
						if lastToken == "execute" {
							return 0, fmt.Errorf("EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place, as its contents cannot be safely analyzed")
						}
					case "procedure", "function":
						if lastToken == "create" || lastToken == "create or replace" {
							return 0, fmt.Errorf("unanalyzable statements like '%s %s' are not allowed", strings.ToUpper(lastToken), strings.ToUpper(keyword))
						}
					case verbCreate, verbAlter, verbDrop, verbSelect, verbInsert, verbUpdate, verbDelete, verbMerge:
						if statementVerb == "" {
							statementVerb = keyword
						}
					}

					if statementVerb == verbCreate || statementVerb == verbAlter || statementVerb == verbDrop {
						if keyword == "schema" || keyword == "dataset" {
							return 0, fmt.Errorf("dataset-level operations like '%s %s' are not allowed when dataset restrictions are in place", strings.ToUpper(statementVerb), strings.ToUpper(keyword))
						}
					}

					if _, ok := tableFollowsKeywords[keyword]; ok {
						expectingTable = true
						lastTableKeyword = keyword
					} else if _, ok := tableContextExitKeywords[keyword]; ok {
						expectingTable = false
						lastTableKeyword = ""
					}
					if lastToken == "create" && keyword == "or" {
						lastToken = "create or"
					} else if lastToken == "create or" && keyword == "replace" {
						lastToken = "create or replace"
					} else {
						lastToken = keyword
					}
				} else if len(parts) >= 2 {
					// This is a multi-part identifier. If we were expecting a table, this is it.
					if expectingTable {
						tableID, err := formatTableID(parts, defaultProjectID)
						if err != nil {
							return 0, err
						}
						if tableID != "" {
							tableIDSet[tableID] = struct{}{}
						}
						// For most keywords, we expect only one table.
						if lastTableKeyword != "from" {
							expectingTable = false
						}
					}
					lastToken = ""
				}

				i += consumed
				continue
			}
			i++

		case stateInSingleQuoteString:
			if char == '\\' {
				i += 2 // Skip backslash and the escaped character.
				continue
			}
			if char == '\'' {
				state = stateNormal
			}
			i++
		case stateInDoubleQuoteString:
			if char == '\\' {
				i += 2 // Skip backslash and the escaped character.
				continue
			}
			if char == '"' {
				state = stateNormal
			}
			i++
		case stateInTripleSingleQuoteString:
			if strings.HasPrefix(remaining, "'''") {
				state = stateNormal
				i += 3
			} else {
				i++
			}
		case stateInTripleDoubleQuoteString:
			if strings.HasPrefix(remaining, `"""`) {
				state = stateNormal
				i += 3
			} else {
				i++
			}
		case stateInSingleLineCommentDash, stateInSingleLineCommentHash:
			if char == '\n' {
				state = stateNormal
			}
			i++
		case stateInMultiLineComment:
			if strings.HasPrefix(remaining, "*/") {
				state = stateNormal
				i += 2
			} else {
				i++
			}
		case stateInRawSingleQuoteString:
			if char == '\'' {
				state = stateNormal
			}
			i++
		case stateInRawDoubleQuoteString:
			if char == '"' {
				state = stateNormal
			}
			i++
		case stateInRawTripleSingleQuoteString:
			if strings.HasPrefix(remaining, "'''") {
				state = stateNormal
				i += 3
			} else {
				i++
			}
		case stateInRawTripleDoubleQuoteString:
			if strings.HasPrefix(remaining, `"""`) {
				state = stateNormal
				i += 3
			} else {
				i++
			}
		}
	}

	if inSubquery {
		return 0, fmt.Errorf("unclosed subquery parenthesis")
	}
	return len(sql), nil
}

// parseIdentifierSequence parses a sequence of dot-separated identifiers.
// It returns the parts of the identifier, the number of characters consumed, and an error.
func parseIdentifierSequence(s string) ([]string, int, error) {
	var parts []string
	var totalConsumed int

	for {
		remaining := s[totalConsumed:]
		trimmed := strings.TrimLeftFunc(remaining, unicode.IsSpace)
		totalConsumed += len(remaining) - len(trimmed)
		current := s[totalConsumed:]

		if len(current) == 0 {
			break
		}

		var part string
		var consumed int

		if current[0] == '`' {
			end := strings.Index(current[1:], "`")
			if end == -1 {
				return nil, 0, fmt.Errorf("unclosed backtick identifier")
			}
			part = current[1 : end+1]
			consumed = end + 2
		} else if len(current) > 0 && unicode.IsLetter(rune(current[0])) {
			end := strings.IndexFunc(current, func(r rune) bool {
				return !unicode.IsLetter(r) && !unicode.IsNumber(r) && r != '_' && r != '-'
			})
			if end == -1 {
				part = current
				consumed = len(current)
			} else {
				part = current[:end]
				consumed = end
			}
		} else {
			break
		}

		if current[0] == '`' && strings.Contains(part, ".") {
			// This handles cases like `project.dataset.table` but not `project.dataset`.table.
			// If the character after the quoted identifier is not a dot, we treat it as a full name.
			if len(current) <= consumed || current[consumed] != '.' {
				parts = append(parts, strings.Split(part, ".")...)
				totalConsumed += consumed
				break
			}
		}

		parts = append(parts, strings.Split(part, ".")...)
		totalConsumed += consumed

		if len(s) <= totalConsumed || s[totalConsumed] != '.' {
			break
		}
		totalConsumed++
	}
	return parts, totalConsumed, nil
}

func formatTableID(parts []string, defaultProjectID string) (string, error) {
	if len(parts) < 2 || len(parts) > 3 {
		// Not a table identifier (could be a CTE, column, etc.).
		// Return the consumed length so the main loop can skip this identifier.
		return "", nil
	}

	var tableID string
	if len(parts) == 3 { // project.dataset.table
		tableID = strings.Join(parts, ".")
	} else { // dataset.table
		if defaultProjectID == "" {
			return "", fmt.Errorf("query contains table '%s' without project ID, and no default project ID is provided", strings.Join(parts, "."))
		}
		tableID = fmt.Sprintf("%s.%s", defaultProjectID, strings.Join(parts, "."))
	}

	return tableID, nil
}

```

--------------------------------------------------------------------------------
/internal/tools/bigquery/bigqueryforecast/bigqueryforecast.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package bigqueryforecast

import (
	"context"
	"fmt"
	"strings"

	bigqueryapi "cloud.google.com/go/bigquery"
	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
	"github.com/googleapis/genai-toolbox/internal/tools"
	bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
	"github.com/googleapis/genai-toolbox/internal/util"
	bigqueryrestapi "google.golang.org/api/bigquery/v2"
	"google.golang.org/api/iterator"
)

const kind string = "bigquery-forecast"

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type compatibleSource interface {
	BigQueryClient() *bigqueryapi.Client
	BigQueryRestService() *bigqueryrestapi.Service
	BigQueryClientCreator() bigqueryds.BigqueryClientCreator
	UseClientAuthorization() bool
	IsDatasetAllowed(projectID, datasetID string) bool
	BigQueryAllowedDatasets() []string
	BigQuerySession() bigqueryds.BigQuerySessionProvider
}

// validate compatible sources are still compatible
var _ compatibleSource = &bigqueryds.Source{}

var compatibleSources = [...]string{bigqueryds.SourceKind}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Source       string   `yaml:"source" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

// validate interface
var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	// verify source exists
	rawS, ok := srcs[cfg.Source]
	if !ok {
		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
	}

	// verify the source is compatible
	s, ok := rawS.(compatibleSource)
	if !ok {
		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
	}

	allowedDatasets := s.BigQueryAllowedDatasets()
	historyDataDescription := "The table id or the query of the history time series data."
	if len(allowedDatasets) > 0 {
		datasetIDs := []string{}
		for _, ds := range allowedDatasets {
			datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds))
		}
		historyDataDescription += fmt.Sprintf(" The query or table must only access datasets from the following list: %s.", strings.Join(datasetIDs, ", "))
	}

	historyDataParameter := tools.NewStringParameter("history_data", historyDataDescription)
	timestampColumnNameParameter := tools.NewStringParameter("timestamp_col",
		"The name of the time series timestamp column.")
	dataColumnNameParameter := tools.NewStringParameter("data_col",
		"The name of the time series data column.")
	idColumnNameParameter := tools.NewArrayParameterWithDefault("id_cols", []any{},
		"An array of the time series id column names.",
		tools.NewStringParameter("id_col", "The name of time series id column."))
	horizonParameter := tools.NewIntParameterWithDefault("horizon", 10, "The number of forecasting steps.")
	parameters := tools.Parameters{historyDataParameter,
		timestampColumnNameParameter, dataColumnNameParameter, idColumnNameParameter, horizonParameter}

	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)

	// finish tool setup
	t := Tool{
		Name:             cfg.Name,
		Kind:             kind,
		Parameters:       parameters,
		AuthRequired:     cfg.AuthRequired,
		UseClientOAuth:   s.UseClientAuthorization(),
		ClientCreator:    s.BigQueryClientCreator(),
		Client:           s.BigQueryClient(),
		RestService:      s.BigQueryRestService(),
		IsDatasetAllowed: s.IsDatasetAllowed,
		SessionProvider:  s.BigQuerySession(),
		AllowedDatasets:  allowedDatasets,
		manifest:         tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
		mcpManifest:      mcpManifest,
	}
	return t, nil
}

// validate interface
var _ tools.Tool = Tool{}

type Tool struct {
	Name           string           `yaml:"name"`
	Kind           string           `yaml:"kind"`
	AuthRequired   []string         `yaml:"authRequired"`
	UseClientOAuth bool             `yaml:"useClientOAuth"`
	Parameters     tools.Parameters `yaml:"parameters"`

	Client           *bigqueryapi.Client
	RestService      *bigqueryrestapi.Service
	ClientCreator    bigqueryds.BigqueryClientCreator
	IsDatasetAllowed func(projectID, datasetID string) bool
	AllowedDatasets  []string
	SessionProvider  bigqueryds.BigQuerySessionProvider
	manifest         tools.Manifest
	mcpManifest      tools.McpManifest
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	paramsMap := params.AsMap()
	historyData, ok := paramsMap["history_data"].(string)
	if !ok {
		return nil, fmt.Errorf("unable to cast history_data parameter %v", paramsMap["history_data"])
	}
	timestampCol, ok := paramsMap["timestamp_col"].(string)
	if !ok {
		return nil, fmt.Errorf("unable to cast timestamp_col parameter %v", paramsMap["timestamp_col"])
	}
	dataCol, ok := paramsMap["data_col"].(string)
	if !ok {
		return nil, fmt.Errorf("unable to cast data_col parameter %v", paramsMap["data_col"])
	}
	idColsRaw, ok := paramsMap["id_cols"].([]any)
	if !ok {
		return nil, fmt.Errorf("unable to cast id_cols parameter %v", paramsMap["id_cols"])
	}
	var idCols []string
	for _, v := range idColsRaw {
		s, ok := v.(string)
		if !ok {
			return nil, fmt.Errorf("id_cols contains non-string value: %v", v)
		}
		idCols = append(idCols, s)
	}
	horizon, ok := paramsMap["horizon"].(int)
	if !ok {
		if h, ok := paramsMap["horizon"].(float64); ok {
			horizon = int(h)
		} else {
			return nil, fmt.Errorf("unable to cast horizon parameter %v", paramsMap["horizon"])
		}
	}

	bqClient := t.Client
	restService := t.RestService
	var err error

	// Initialize new client if using user OAuth token
	if t.UseClientOAuth {
		tokenStr, err := accessToken.ParseBearerToken()
		if err != nil {
			return nil, fmt.Errorf("error parsing access token: %w", err)
		}
		bqClient, restService, err = t.ClientCreator(tokenStr, false)
		if err != nil {
			return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
		}
	}

	var historyDataSource string
	trimmedUpperHistoryData := strings.TrimSpace(strings.ToUpper(historyData))
	if strings.HasPrefix(trimmedUpperHistoryData, "SELECT") || strings.HasPrefix(trimmedUpperHistoryData, "WITH") {
		if len(t.AllowedDatasets) > 0 {
			var connProps []*bigqueryapi.ConnectionProperty
			session, err := t.SessionProvider(ctx)
			if err != nil {
				return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
			}
			if session != nil {
				connProps = []*bigqueryapi.ConnectionProperty{
					{Key: "session_id", Value: session.ID},
				}
			}
			dryRunJob, err := bqutil.DryRunQuery(ctx, restService, t.Client.Project(), t.Client.Location, historyData, nil, connProps)
			if err != nil {
				return nil, fmt.Errorf("query validation failed: %w", err)
			}
			statementType := dryRunJob.Statistics.Query.StatementType
			if statementType != "SELECT" {
				return nil, fmt.Errorf("the 'history_data' parameter only supports a table ID or a SELECT query. The provided query has statement type '%s'", statementType)
			}

			queryStats := dryRunJob.Statistics.Query
			if queryStats != nil {
				for _, tableRef := range queryStats.ReferencedTables {
					if !t.IsDatasetAllowed(tableRef.ProjectId, tableRef.DatasetId) {
						return nil, fmt.Errorf("query in history_data accesses dataset '%s.%s', which is not in the allowed list", tableRef.ProjectId, tableRef.DatasetId)
					}
				}
			} else {
				return nil, fmt.Errorf("could not analyze query in history_data to validate against allowed datasets")
			}
		}
		historyDataSource = fmt.Sprintf("(%s)", historyData)
	} else {
		if len(t.AllowedDatasets) > 0 {
			parts := strings.Split(historyData, ".")
			var projectID, datasetID string

			switch len(parts) {
			case 3: // project.dataset.table
				projectID = parts[0]
				datasetID = parts[1]
			case 2: // dataset.table
				projectID = t.Client.Project()
				datasetID = parts[0]
			default:
				return nil, fmt.Errorf("invalid table ID format for 'history_data': %q. Expected 'dataset.table' or 'project.dataset.table'", historyData)
			}

			if !t.IsDatasetAllowed(projectID, datasetID) {
				return nil, fmt.Errorf("access to dataset '%s.%s' (from table '%s') is not allowed", projectID, datasetID, historyData)
			}
		}
		historyDataSource = fmt.Sprintf("TABLE `%s`", historyData)
	}

	idColsArg := ""
	if len(idCols) > 0 {
		idColsFormatted := fmt.Sprintf("['%s']", strings.Join(idCols, "', '"))
		idColsArg = fmt.Sprintf(", id_cols => %s", idColsFormatted)
	}

	sql := fmt.Sprintf(`SELECT * 
		FROM AI.FORECAST(
			%s,
			data_col => '%s',
			timestamp_col => '%s',
			horizon => %d%s)`,
		historyDataSource, dataCol, timestampCol, horizon, idColsArg)

	// JobStatistics.QueryStatistics.StatementType
	query := bqClient.Query(sql)
	query.Location = bqClient.Location
	session, err := t.SessionProvider(ctx)
	if err != nil {
		return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
	}
	if session != nil {
		// Add session ID to the connection properties for subsequent calls.
		query.ConnectionProperties = []*bigqueryapi.ConnectionProperty{
			{Key: "session_id", Value: session.ID},
		}
	}

	// Log the query executed for debugging.
	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("error getting logger: %s", err)
	}
	logger.DebugContext(ctx, "executing `%s` tool query: %s", kind, sql)

	// This block handles SELECT statements, which return a row set.
	// We iterate through the results, convert each row into a map of
	// column names to values, and return the collection of rows.
	var out []any
	job, err := query.Run(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to execute query: %w", err)
	}
	it, err := job.Read(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to read query results: %w", err)
	}
	for {
		var row map[string]bigqueryapi.Value
		err = it.Next(&row)
		if err == iterator.Done {
			break
		}
		if err != nil {
			return nil, fmt.Errorf("unable to iterate through query results: %w", err)
		}
		vMap := make(map[string]any)
		for key, value := range row {
			vMap[key] = value
		}
		out = append(out, vMap)
	}
	// If the query returned any rows, return them directly.
	if len(out) > 0 {
		return out, nil
	}

	// This handles the standard case for a SELECT query that successfully
	return "The query returned 0 rows.", nil
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.Parameters, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}

func (t Tool) RequiresClientAuthorization() bool {
	return t.UseClientOAuth
}

```

--------------------------------------------------------------------------------
/internal/server/server.go:
--------------------------------------------------------------------------------

```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package server

import (
	"context"
	"fmt"
	"io"
	"net"
	"net/http"
	"strconv"
	"strings"
	"sync"
	"time"

	"github.com/go-chi/chi/v5"
	"github.com/go-chi/chi/v5/middleware"
	"github.com/go-chi/httplog/v2"
	"github.com/googleapis/genai-toolbox/internal/auth"
	"github.com/googleapis/genai-toolbox/internal/log"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/telemetry"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/util"
	"go.opentelemetry.io/otel/attribute"
	"go.opentelemetry.io/otel/trace"
)

// Server contains info for running an instance of Toolbox. Should be instantiated with NewServer().
type Server struct {
	version         string
	srv             *http.Server
	listener        net.Listener
	root            chi.Router
	logger          log.Logger
	instrumentation *telemetry.Instrumentation
	sseManager      *sseManager
	ResourceMgr     *ResourceManager
}

// ResourceManager contains available resources for the server. Should be initialized with NewResourceManager().
type ResourceManager struct {
	mu           sync.RWMutex
	sources      map[string]sources.Source
	authServices map[string]auth.AuthService
	tools        map[string]tools.Tool
	toolsets     map[string]tools.Toolset
}

func NewResourceManager(
	sourcesMap map[string]sources.Source,
	authServicesMap map[string]auth.AuthService,
	toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset,
) *ResourceManager {
	resourceMgr := &ResourceManager{
		mu:           sync.RWMutex{},
		sources:      sourcesMap,
		authServices: authServicesMap,
		tools:        toolsMap,
		toolsets:     toolsetsMap,
	}

	return resourceMgr
}

func (r *ResourceManager) GetSource(sourceName string) (sources.Source, bool) {
	r.mu.RLock()
	defer r.mu.RUnlock()
	source, ok := r.sources[sourceName]
	return source, ok
}

func (r *ResourceManager) GetAuthService(authServiceName string) (auth.AuthService, bool) {
	r.mu.RLock()
	defer r.mu.RUnlock()
	authService, ok := r.authServices[authServiceName]
	return authService, ok
}

func (r *ResourceManager) GetTool(toolName string) (tools.Tool, bool) {
	r.mu.RLock()
	defer r.mu.RUnlock()
	tool, ok := r.tools[toolName]
	return tool, ok
}

func (r *ResourceManager) GetToolset(toolsetName string) (tools.Toolset, bool) {
	r.mu.RLock()
	defer r.mu.RUnlock()
	toolset, ok := r.toolsets[toolsetName]
	return toolset, ok
}

func (r *ResourceManager) SetResources(sourcesMap map[string]sources.Source, authServicesMap map[string]auth.AuthService, toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset) {
	r.mu.Lock()
	defer r.mu.Unlock()
	r.sources = sourcesMap
	r.authServices = authServicesMap
	r.tools = toolsMap
	r.toolsets = toolsetsMap
}

func (r *ResourceManager) GetAuthServiceMap() map[string]auth.AuthService {
	r.mu.RLock()
	defer r.mu.RUnlock()
	return r.authServices
}

func (r *ResourceManager) GetToolsMap() map[string]tools.Tool {
	r.mu.RLock()
	defer r.mu.RUnlock()
	return r.tools
}

func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
	map[string]sources.Source,
	map[string]auth.AuthService,
	map[string]tools.Tool,
	map[string]tools.Toolset,
	error,
) {
	ctx = util.WithUserAgent(ctx, cfg.Version)
	instrumentation, err := util.InstrumentationFromContext(ctx)
	if err != nil {
		panic(err)
	}

	l, err := util.LoggerFromContext(ctx)
	if err != nil {
		panic(err)
	}

	// initialize and validate the sources from configs
	sourcesMap := make(map[string]sources.Source)
	for name, sc := range cfg.SourceConfigs {
		s, err := func() (sources.Source, error) {
			childCtx, span := instrumentation.Tracer.Start(
				ctx,
				"toolbox/server/source/init",
				trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())),
				trace.WithAttributes(attribute.String("source_name", name)),
			)
			defer span.End()
			s, err := sc.Initialize(childCtx, instrumentation.Tracer)
			if err != nil {
				return nil, fmt.Errorf("unable to initialize source %q: %w", name, err)
			}
			return s, nil
		}()
		if err != nil {
			return nil, nil, nil, nil, err
		}
		sourcesMap[name] = s
	}
	sourceNames := make([]string, 0, len(sourcesMap))
	for name := range sourcesMap {
		sourceNames = append(sourceNames, name)
	}
	l.InfoContext(ctx, fmt.Sprintf("Initialized %d sources: %s", len(sourcesMap), strings.Join(sourceNames, ", ")))

	// initialize and validate the auth services from configs
	authServicesMap := make(map[string]auth.AuthService)
	for name, sc := range cfg.AuthServiceConfigs {
		a, err := func() (auth.AuthService, error) {
			_, span := instrumentation.Tracer.Start(
				ctx,
				"toolbox/server/auth/init",
				trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())),
				trace.WithAttributes(attribute.String("auth_name", name)),
			)
			defer span.End()
			a, err := sc.Initialize()
			if err != nil {
				return nil, fmt.Errorf("unable to initialize auth service %q: %w", name, err)
			}
			return a, nil
		}()
		if err != nil {
			return nil, nil, nil, nil, err
		}
		authServicesMap[name] = a
	}
	authServiceNames := make([]string, 0, len(authServicesMap))
	for name := range authServicesMap {
		authServiceNames = append(authServiceNames, name)
	}
	l.InfoContext(ctx, fmt.Sprintf("Initialized %d authServices: %s", len(authServicesMap), strings.Join(authServiceNames, ", ")))

	// initialize and validate the tools from configs
	toolsMap := make(map[string]tools.Tool)
	for name, tc := range cfg.ToolConfigs {
		t, err := func() (tools.Tool, error) {
			_, span := instrumentation.Tracer.Start(
				ctx,
				"toolbox/server/tool/init",
				trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())),
				trace.WithAttributes(attribute.String("tool_name", name)),
			)
			defer span.End()
			t, err := tc.Initialize(sourcesMap)
			if err != nil {
				return nil, fmt.Errorf("unable to initialize tool %q: %w", name, err)
			}
			return t, nil
		}()
		if err != nil {
			return nil, nil, nil, nil, err
		}
		toolsMap[name] = t
	}
	toolNames := make([]string, 0, len(toolsMap))
	for name := range toolsMap {
		toolNames = append(toolNames, name)
	}
	l.InfoContext(ctx, fmt.Sprintf("Initialized %d tools: %s", len(toolsMap), strings.Join(toolNames, ", ")))

	// create a default toolset that contains all tools
	allToolNames := make([]string, 0, len(toolsMap))
	for name := range toolsMap {
		allToolNames = append(allToolNames, name)
	}
	if cfg.ToolsetConfigs == nil {
		cfg.ToolsetConfigs = make(ToolsetConfigs)
	}
	cfg.ToolsetConfigs[""] = tools.ToolsetConfig{Name: "", ToolNames: allToolNames}

	// initialize and validate the toolsets from configs
	toolsetsMap := make(map[string]tools.Toolset)
	for name, tc := range cfg.ToolsetConfigs {
		t, err := func() (tools.Toolset, error) {
			_, span := instrumentation.Tracer.Start(
				ctx,
				"toolbox/server/toolset/init",
				trace.WithAttributes(attribute.String("toolset_name", name)),
			)
			defer span.End()
			t, err := tc.Initialize(cfg.Version, toolsMap)
			if err != nil {
				return tools.Toolset{}, fmt.Errorf("unable to initialize toolset %q: %w", name, err)
			}
			return t, err
		}()
		if err != nil {
			return nil, nil, nil, nil, err
		}
		toolsetsMap[name] = t
	}
	toolsetNames := make([]string, 0, len(toolsetsMap))
	for name := range toolsetsMap {
		if name == "" {
			toolsetNames = append(toolsetNames, "default")
		} else {
			toolsetNames = append(toolsetNames, name)
		}
	}
	l.InfoContext(ctx, fmt.Sprintf("Initialized %d toolsets: %s", len(toolsetsMap), strings.Join(toolsetNames, ", ")))

	return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
}

// NewServer returns a Server object based on provided Config.
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
	instrumentation, err := util.InstrumentationFromContext(ctx)
	if err != nil {
		return nil, err
	}

	ctx, span := instrumentation.Tracer.Start(ctx, "toolbox/server/init")
	defer span.End()

	l, err := util.LoggerFromContext(ctx)
	if err != nil {
		return nil, err
	}

	// set up http serving
	r := chi.NewRouter()
	r.Use(middleware.Recoverer)
	// logging
	logLevel, err := log.SeverityToLevel(cfg.LogLevel.String())
	if err != nil {
		return nil, fmt.Errorf("unable to initialize http log: %w", err)
	}
	var httpOpts httplog.Options
	switch cfg.LoggingFormat.String() {
	case "json":
		httpOpts = httplog.Options{
			JSON:             true,
			LogLevel:         logLevel,
			Concise:          true,
			RequestHeaders:   false,
			MessageFieldName: "message",
			SourceFieldName:  "logging.googleapis.com/sourceLocation",
			TimeFieldName:    "timestamp",
			LevelFieldName:   "severity",
		}
	case "standard":
		httpOpts = httplog.Options{
			LogLevel:         logLevel,
			Concise:          true,
			RequestHeaders:   false,
			MessageFieldName: "message",
		}
	default:
		return nil, fmt.Errorf("invalid Logging format: %q", cfg.LoggingFormat.String())
	}
	httpLogger := httplog.NewLogger("httplog", httpOpts)
	r.Use(httplog.RequestLogger(httpLogger))

	sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := InitializeConfigs(ctx, cfg)
	if err != nil {
		return nil, fmt.Errorf("unable to initialize configs: %w", err)
	}

	addr := net.JoinHostPort(cfg.Address, strconv.Itoa(cfg.Port))
	srv := &http.Server{Addr: addr, Handler: r}

	sseManager := newSseManager(ctx)

	resourceManager := NewResourceManager(sourcesMap, authServicesMap, toolsMap, toolsetsMap)

	s := &Server{
		version:         cfg.Version,
		srv:             srv,
		root:            r,
		logger:          l,
		instrumentation: instrumentation,
		sseManager:      sseManager,
		ResourceMgr:     resourceManager,
	}
	// control plane
	apiR, err := apiRouter(s)
	if err != nil {
		return nil, err
	}
	r.Mount("/api", apiR)
	mcpR, err := mcpRouter(s)
	if err != nil {
		return nil, err
	}
	r.Mount("/mcp", mcpR)
	if cfg.UI {
		webR, err := webRouter()
		if err != nil {
			return nil, err
		}
		r.Mount("/ui", webR)
	}
	// default endpoint for validating server is running
	r.Get("/", func(w http.ResponseWriter, r *http.Request) {
		_, _ = w.Write([]byte("🧰 Hello, World! 🧰"))
	})

	return s, nil
}

// Listen starts a listener for the given Server instance.
func (s *Server) Listen(ctx context.Context) error {
	ctx, cancel := context.WithCancel(ctx)
	defer cancel()

	if s.listener != nil {
		return fmt.Errorf("server is already listening: %s", s.listener.Addr().String())
	}
	lc := net.ListenConfig{KeepAlive: 30 * time.Second}
	var err error
	if s.listener, err = lc.Listen(ctx, "tcp", s.srv.Addr); err != nil {
		return fmt.Errorf("failed to open listener for %q: %w", s.srv.Addr, err)
	}
	s.logger.DebugContext(ctx, fmt.Sprintf("server listening on %s", s.srv.Addr))
	return nil
}

// Serve starts an HTTP server for the given Server instance.
func (s *Server) Serve(ctx context.Context) error {
	s.logger.DebugContext(ctx, "Starting a HTTP server.")
	return s.srv.Serve(s.listener)
}

// ServeStdio starts a new stdio session for mcp.
func (s *Server) ServeStdio(ctx context.Context, stdin io.Reader, stdout io.Writer) error {
	stdioServer := NewStdioSession(s, stdin, stdout)
	return stdioServer.Start(ctx)
}

// Shutdown gracefully shuts down the server without interrupting any active
// connections. It uses http.Server.Shutdown() and has the same functionality.
func (s *Server) Shutdown(ctx context.Context) error {
	s.logger.DebugContext(ctx, "shutting down the server.")
	return s.srv.Shutdown(ctx)
}

```

--------------------------------------------------------------------------------
/.hugo/static/js/w3.js:
--------------------------------------------------------------------------------

```javascript
/* W3.JS 1.04 April 2019 by w3schools.com */
"use strict";
var w3 = {};
w3.hide = function (sel) {
  w3.hideElements(w3.getElements(sel));
};
w3.hideElements = function (elements) {
  var i, l = elements.length;
  for (i = 0; i < l; i++) {
    w3.hideElement(elements[i]);
  }
};
w3.hideElement = function (element) {
  w3.styleElement(element, "display", "none");
};
w3.show = function (sel, a) {
  var elements = w3.getElements(sel);
  if (a) {w3.hideElements(elements);}
  w3.showElements(elements);
};
w3.showElements = function (elements) {
  var i, l = elements.length;
  for (i = 0; i < l; i++) {
    w3.showElement(elements[i]);
  }
};
w3.showElement = function (element) {
  w3.styleElement(element, "display", "block");
};
w3.addStyle = function (sel, prop, val) {
  w3.styleElements(w3.getElements(sel), prop, val);
};
w3.styleElements = function (elements, prop, val) {
  var i, l = elements.length;
  for (i = 0; i < l; i++) {
    w3.styleElement(elements[i], prop, val);
  }
};
w3.styleElement = function (element, prop, val) {
  element.style.setProperty(prop, val);
};
w3.toggleShow = function (sel) {
  var i, x = w3.getElements(sel), l = x.length;
  for (i = 0; i < l; i++) {
    if (x[i].style.display == "none") {
      w3.styleElement(x[i], "display", "block");
    } else {
      w3.styleElement(x[i], "display", "none");
    }
  }
};
w3.addClass = function (sel, name) {
  w3.addClassElements(w3.getElements(sel), name);
};
w3.addClassElements = function (elements, name) {
  var i, l = elements.length;
  for (i = 0; i < l; i++) {
    w3.addClassElement(elements[i], name);
  }
};
w3.addClassElement = function (element, name) {
  var i, arr1, arr2;
  arr1 = element.className.split(" ");
  arr2 = name.split(" ");
  for (i = 0; i < arr2.length; i++) {
    if (arr1.indexOf(arr2[i]) == -1) {element.className += " " + arr2[i];}
  }
};
w3.removeClass = function (sel, name) {
  w3.removeClassElements(w3.getElements(sel), name);
};
w3.removeClassElements = function (elements, name) {
  var i, l = elements.length, arr1, arr2, j;
  for (i = 0; i < l; i++) {
    w3.removeClassElement(elements[i], name);
  }
};
w3.removeClassElement = function (element, name) {
  var i, arr1, arr2;
  arr1 = element.className.split(" ");
  arr2 = name.split(" ");
  for (i = 0; i < arr2.length; i++) {
    while (arr1.indexOf(arr2[i]) > -1) {
      arr1.splice(arr1.indexOf(arr2[i]), 1);
    }
  }
  element.className = arr1.join(" ");
};
w3.toggleClass = function (sel, c1, c2) {
  w3.toggleClassElements(w3.getElements(sel), c1, c2);
};
w3.toggleClassElements = function (elements, c1, c2) {
  var i, l = elements.length;
  for (i = 0; i < l; i++) {
    w3.toggleClassElement(elements[i], c1, c2);
  }
};
w3.toggleClassElement = function (element, c1, c2) {
  var t1, t2, t1Arr, t2Arr, j, arr, allPresent;
  t1 = (c1 || "");
  t2 = (c2 || "");
  t1Arr = t1.split(" ");
  t2Arr = t2.split(" ");
  arr = element.className.split(" ");
  if (t2Arr.length == 0) {
    allPresent = true;
    for (j = 0; j < t1Arr.length; j++) {
      if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
    }
    if (allPresent) {
      w3.removeClassElement(element, t1);
    } else {
      w3.addClassElement(element, t1);
    }
  } else {
    allPresent = true;
    for (j = 0; j < t1Arr.length; j++) {
      if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
    }
    if (allPresent) {
      w3.removeClassElement(element, t1);
      w3.addClassElement(element, t2);
    } else {
      w3.removeClassElement(element, t2);
      w3.addClassElement(element, t1);
    }
  }
};
w3.getElements = function (id) {
  if (typeof id == "object") {
    return [id];
  } else {
    return document.querySelectorAll(id);
  }
};
w3.filterHTML = function(id, sel, filter) {
  var a, b, c, i, ii, iii, hit;
  a = w3.getElements(id);
  for (i = 0; i < a.length; i++) {
    b = a[i].querySelectorAll(sel);
    for (ii = 0; ii < b.length; ii++) {
      hit = 0;
      if (b[ii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
        hit = 1;
      }
      c = b[ii].getElementsByTagName("*");
      for (iii = 0; iii < c.length; iii++) {
        if (c[iii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
          hit = 1;
        }
      }
      if (hit == 1) {
        b[ii].style.display = "";
      } else {
        b[ii].style.display = "none";
      }
    }
  }
};
w3.sortHTML = function(id, sel, sortvalue) {
  var a, b, i, ii, y, bytt, v1, v2, cc, j;
  a = w3.getElements(id);
  for (i = 0; i < a.length; i++) {
    for (j = 0; j < 2; j++) {
      cc = 0;
      y = 1;
      while (y == 1) {
        y = 0;
        b = a[i].querySelectorAll(sel);
        for (ii = 0; ii < (b.length - 1); ii++) {
          bytt = 0;
          if (sortvalue) {
            v1 = b[ii].querySelector(sortvalue).innerText;
            v2 = b[ii + 1].querySelector(sortvalue).innerText;
          } else {
            v1 = b[ii].innerText;
            v2 = b[ii + 1].innerText;
          }
          v1 = v1.toLowerCase();
          v2 = v2.toLowerCase();
          if ((j == 0 && (v1 > v2)) || (j == 1 && (v1 < v2))) {
            bytt = 1;
            break;
          }
        }
        if (bytt == 1) {
          b[ii].parentNode.insertBefore(b[ii + 1], b[ii]);
          y = 1;
          cc++;
        }
      }
      if (cc > 0) {break;}
    }
  }
};
w3.slideshow = function (sel, ms, func) {
  var i, ss, x = w3.getElements(sel), l = x.length;
  ss = {};
  ss.current = 1;
  ss.x = x;
  ss.ondisplaychange = func;
  if (!isNaN(ms) || ms == 0) {
    ss.milliseconds = ms;
  } else {
    ss.milliseconds = 1000;
  }
  ss.start = function() {
    ss.display(ss.current)
    if (ss.ondisplaychange) {ss.ondisplaychange();}
    if (ss.milliseconds > 0) {
      window.clearTimeout(ss.timeout);
      ss.timeout = window.setTimeout(ss.next, ss.milliseconds);
    }
  };
  ss.next = function() {
    ss.current += 1;
    if (ss.current > ss.x.length) {ss.current = 1;}
    ss.start();
  };
  ss.previous = function() {
    ss.current -= 1;
    if (ss.current < 1) {ss.current = ss.x.length;}
    ss.start();
  };
  ss.display = function (n) {
    w3.styleElements(ss.x, "display", "none");
    w3.styleElement(ss.x[n - 1], "display", "block");
  }
  ss.start();
  return ss;
};
w3.includeHTML = function(cb) {
  var z, i, elmnt, file, xhttp;
  z = document.getElementsByTagName("*");
  for (i = 0; i < z.length; i++) {
    elmnt = z[i];
    file = elmnt.getAttribute("w3-include-html");
    if (file) {
      xhttp = new XMLHttpRequest();
      xhttp.onreadystatechange = function() {
        if (this.readyState == 4) {
          if (this.status == 200) {elmnt.innerHTML = this.responseText;}
          if (this.status == 404) {
            if (elmnt.getAttribute("w3-include-html-default")) {
              elmnt.innerHTML = elmnt.getAttribute("w3-include-html-default");
            }
            else { elmnt.innerHTML = "Page not found."; }
          }
          elmnt.removeAttribute("w3-include-html");
          w3.includeHTML(cb);
        }
      }
      xhttp.open("GET", file, true);
      xhttp.send();
      return;
    }
  }
  if (cb) cb();
};
w3.getHttpData = function (file, func) {
  w3.http(file, function () {
    if (this.readyState == 4 && this.status == 200) {
      func(this.responseText);
    }
  });
};
w3.getHttpObject = function (file, func) {
  w3.http(file, function () {
    if (this.readyState == 4 && this.status == 200) {
      func(JSON.parse(this.responseText));
    }
  });
};
w3.displayHttp = function (id, file) {
  w3.http(file, function () {
    if (this.readyState == 4 && this.status == 200) {
      w3.displayObject(id, JSON.parse(this.responseText));
    }
  });
};
w3.http = function (target, readyfunc, xml, method) {
  var httpObj;
  if (!method) {method = "GET"; }
  if (window.XMLHttpRequest) {
    httpObj = new XMLHttpRequest();
  } else if (window.ActiveXObject) {
    httpObj = new ActiveXObject("Microsoft.XMLHTTP");
  }
  if (httpObj) {
    if (readyfunc) {httpObj.onreadystatechange = readyfunc;}
    httpObj.open(method, target, true);
    httpObj.send(xml);
  }
};
w3.getElementsByAttribute = function (x, att) {
  var arr = [], arrCount = -1, i, l, y = x.getElementsByTagName("*"), z = att.toUpperCase();
  l = y.length;
  for (i = -1; i < l; i += 1) {
    if (i == -1) {y[i] = x;}
    if (y[i].getAttribute(z) !== null) {arrCount += 1; arr[arrCount] = y[i];}
  }
  return arr;
};  
w3.dataObject = {},
w3.displayObject = function (id, data) {
  var htmlObj, htmlTemplate, html, arr = [], a, l, rowClone, x, j, i, ii, cc, repeat, repeatObj, repeatX = "";
  htmlObj = document.getElementById(id);
  htmlTemplate = init_template(id, htmlObj);
  html = htmlTemplate.cloneNode(true);
  arr = w3.getElementsByAttribute(html, "w3-repeat");
  l = arr.length;
  for (j = (l - 1); j >= 0; j -= 1) {
    cc = arr[j].getAttribute("w3-repeat").split(" ");
    if (cc.length == 1) {
      repeat = cc[0];
    } else {
      repeatX = cc[0];
      repeat = cc[2];
    }
    arr[j].removeAttribute("w3-repeat");
    repeatObj = data[repeat];
    if (repeatObj && typeof repeatObj == "object" && repeatObj.length != "undefined") {
      i = 0;
      for (x in repeatObj) {
        i += 1;
        rowClone = arr[j];
        rowClone = w3_replace_curly(rowClone, "element", repeatX, repeatObj[x]);
        a = rowClone.attributes;
        for (ii = 0; ii < a.length; ii += 1) {
          a[ii].value = w3_replace_curly(a[ii], "attribute", repeatX, repeatObj[x]).value;
        }
        (i === repeatObj.length) ? arr[j].parentNode.replaceChild(rowClone, arr[j]) : arr[j].parentNode.insertBefore(rowClone, arr[j]);
      }
    } else {
      console.log("w3-repeat must be an array. " + repeat + " is not an array.");
      continue;
    }
  }
  html = w3_replace_curly(html, "element");
  htmlObj.parentNode.replaceChild(html, htmlObj);
  function init_template(id, obj) {
    var template;
    template = obj.cloneNode(true);
    if (w3.dataObject.hasOwnProperty(id)) {return w3.dataObject[id];}
    w3.dataObject[id] = template;
    return template;
  }
  function w3_replace_curly(elmnt, typ, repeatX, x) {
    var value, rowClone, pos1, pos2, originalHTML, lookFor, lookForARR = [], i, cc, r;
    rowClone = elmnt.cloneNode(true);
    pos1 = 0;
    while (pos1 > -1) {
      originalHTML = (typ == "attribute") ? rowClone.value : rowClone.innerHTML;
      pos1 = originalHTML.indexOf("{{", pos1);
      if (pos1 === -1) {break;}
      pos2 = originalHTML.indexOf("}}", pos1 + 1);
      lookFor = originalHTML.substring(pos1 + 2, pos2);
      lookForARR = lookFor.split("||");
      value = undefined;
      for (i = 0; i < lookForARR.length; i += 1) {
        lookForARR[i] = lookForARR[i].replace(/^\s+|\s+$/gm, ''); //trim
        if (x) {value = x[lookForARR[i]];}
        if (value == undefined && data) {value = data[lookForARR[i]];}
        if (value == undefined) {
          cc = lookForARR[i].split(".");
          if (cc[0] == repeatX) {value = x[cc[1]]; }
        }
        if (value == undefined) {
          if (lookForARR[i] == repeatX) {value = x;}
        }
        if (value == undefined) {
          if (lookForARR[i].substr(0, 1) == '"') {
            value = lookForARR[i].replace(/"/g, "");
          } else if (lookForARR[i].substr(0,1) == "'") {
            value = lookForARR[i].replace(/'/g, "");
          }
        }
        if (value != undefined) {break;}
      }
      if (value != undefined) {
        r = "{{" + lookFor + "}}";
        if (typ == "attribute") {
          rowClone.value = rowClone.value.replace(r, value);
        } else {
          w3_replace_html(rowClone, r, value);
        }
      }
      pos1 = pos1 + 1;
    }
    return rowClone;
  }
  function w3_replace_html(a, r, result) {
    var b, l, i, a, x, j;
    if (a.hasAttributes()) {
      b = a.attributes;
      l = b.length;
      for (i = 0; i < l; i += 1) {
        if (b[i].value.indexOf(r) > -1) {b[i].value = b[i].value.replace(r, result);}
      }
    }
    x = a.getElementsByTagName("*");
    l = x.length;
    a.innerHTML = a.innerHTML.replace(r, result);
  }
};
```

--------------------------------------------------------------------------------
/internal/tools/neo4j/neo4jschema/helpers/helpers.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// Package helpers provides utility functions for transforming and processing Neo4j
// schema data. It includes functions for converting raw query results from both
// APOC and native Cypher queries into a standardized, structured format.
package helpers

import (
	"fmt"
	"sort"

	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types"
	"github.com/neo4j/neo4j-go-driver/v5/neo4j"
)

// ConvertToStringSlice converts a slice of any type to a slice of strings.
// It uses fmt.Sprintf to perform the conversion for each element.
// Example:
//
//	input: []any{"user", 123, true}
//	output: []string{"user", "123", "true"}
func ConvertToStringSlice(slice []any) []string {
	result := make([]string, len(slice))
	for i, v := range slice {
		result[i] = fmt.Sprintf("%v", v)
	}
	return result
}

// GetStringValue safely converts any value to its string representation.
// If the input value is nil, it returns an empty string.
func GetStringValue(val any) string {
	if val == nil {
		return ""
	}
	return fmt.Sprintf("%v", val)
}

// MapToAPOCSchema converts a raw map from a Cypher query into a structured
// APOCSchemaResult. This is a workaround for database drivers that may return
// complex nested structures as `map[string]any` instead of unmarshalling
// directly into a struct. It achieves this by marshalling the map to YAML and
// then unmarshalling into the target struct.
func MapToAPOCSchema(schemaMap map[string]any) (*types.APOCSchemaResult, error) {
	schemaBytes, err := yaml.Marshal(schemaMap)
	if err != nil {
		return nil, fmt.Errorf("failed to marshal schema map: %w", err)
	}
	var entities map[string]types.APOCEntity
	if err = yaml.Unmarshal(schemaBytes, &entities); err != nil {
		return nil, fmt.Errorf("failed to unmarshal schema map into entities: %w", err)
	}
	return &types.APOCSchemaResult{Value: entities}, nil
}

// ProcessAPOCSchema transforms the nested result from the `apoc.meta.schema()`
// procedure into flat lists of node labels and relationships, along with
// aggregated database statistics. It iterates through entities, processes nodes,
// and extracts outgoing relationship information nested within those nodes.
func ProcessAPOCSchema(apocSchema *types.APOCSchemaResult) ([]types.NodeLabel, []types.Relationship, *types.Statistics) {
	var nodeLabels []types.NodeLabel
	relMap := make(map[string]*types.Relationship)
	stats := &types.Statistics{
		NodesByLabel:        make(map[string]int64),
		RelationshipsByType: make(map[string]int64),
		PropertiesByLabel:   make(map[string]int64),
		PropertiesByRelType: make(map[string]int64),
	}

	for name, entity := range apocSchema.Value {
		// We only process top-level entities of type "node". Relationship info is
		// derived from the "relationships" field within each node entity.
		if entity.Type != "node" {
			continue
		}

		nodeLabel := types.NodeLabel{
			Name:       name,
			Count:      entity.Count,
			Properties: extractAPOCProperties(entity.Properties),
		}
		nodeLabels = append(nodeLabels, nodeLabel)

		// Aggregate statistics for the node.
		stats.NodesByLabel[name] = entity.Count
		stats.TotalNodes += entity.Count
		propCount := int64(len(nodeLabel.Properties))
		stats.PropertiesByLabel[name] = propCount
		stats.TotalProperties += propCount * entity.Count

		// Extract relationship information from the node.
		for relName, relInfo := range entity.Relationships {
			// Only process outgoing relationships to avoid double-counting.
			if relInfo.Direction != "out" {
				continue
			}

			rel, exists := relMap[relName]
			if !exists {
				rel = &types.Relationship{
					Type:       relName,
					Properties: extractAPOCProperties(relInfo.Properties),
				}
				if len(relInfo.Labels) > 0 {
					rel.EndNode = relInfo.Labels[0]
				}
				rel.StartNode = name
				relMap[relName] = rel
			}
			rel.Count += relInfo.Count
		}
	}

	// Consolidate the relationships from the map into a slice and update stats.
	relationships := make([]types.Relationship, 0, len(relMap))
	for _, rel := range relMap {
		relationships = append(relationships, *rel)
		stats.RelationshipsByType[rel.Type] = rel.Count
		stats.TotalRelationships += rel.Count
		propCount := int64(len(rel.Properties))
		stats.PropertiesByRelType[rel.Type] = propCount
		stats.TotalProperties += propCount * rel.Count
	}

	sortAndClean(nodeLabels, relationships, stats)

	// Set empty maps and lists to nil for cleaner output.
	if len(nodeLabels) == 0 {
		nodeLabels = nil
	}
	if len(relationships) == 0 {
		relationships = nil
	}
	return nodeLabels, relationships, stats
}

// ProcessNonAPOCSchema serves as an alternative to ProcessAPOCSchema for environments
// where APOC procedures are not available. It converts schema data gathered from
// multiple separate, native Cypher queries (providing node counts, property maps, etc.)
// into the same standardized, structured format.
func ProcessNonAPOCSchema(
	nodeCounts map[string]int64,
	nodePropsMap map[string]map[string]map[string]bool,
	relCounts map[string]int64,
	relPropsMap map[string]map[string]map[string]bool,
	relConnectivity map[string]types.RelConnectivityInfo,
) ([]types.NodeLabel, []types.Relationship, *types.Statistics) {
	stats := &types.Statistics{
		NodesByLabel:        make(map[string]int64),
		RelationshipsByType: make(map[string]int64),
		PropertiesByLabel:   make(map[string]int64),
		PropertiesByRelType: make(map[string]int64),
	}

	// Process node information.
	nodeLabels := make([]types.NodeLabel, 0, len(nodeCounts))
	for label, count := range nodeCounts {
		properties := make([]types.PropertyInfo, 0)
		if props, ok := nodePropsMap[label]; ok {
			for key, typeSet := range props {
				typeList := make([]string, 0, len(typeSet))
				for tp := range typeSet {
					typeList = append(typeList, tp)
				}
				sort.Strings(typeList)
				properties = append(properties, types.PropertyInfo{Name: key, Types: typeList})
			}
		}
		sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name })

		nodeLabels = append(nodeLabels, types.NodeLabel{Name: label, Count: count, Properties: properties})

		// Aggregate node statistics.
		stats.NodesByLabel[label] = count
		stats.TotalNodes += count
		propCount := int64(len(properties))
		stats.PropertiesByLabel[label] = propCount
		stats.TotalProperties += propCount * count
	}

	// Process relationship information.
	relationships := make([]types.Relationship, 0, len(relCounts))
	for relType, count := range relCounts {
		properties := make([]types.PropertyInfo, 0)
		if props, ok := relPropsMap[relType]; ok {
			for key, typeSet := range props {
				typeList := make([]string, 0, len(typeSet))
				for tp := range typeSet {
					typeList = append(typeList, tp)
				}
				sort.Strings(typeList)
				properties = append(properties, types.PropertyInfo{Name: key, Types: typeList})
			}
		}
		sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name })

		conn := relConnectivity[relType]
		relationships = append(relationships, types.Relationship{
			Type:       relType,
			Count:      count,
			StartNode:  conn.StartNode,
			EndNode:    conn.EndNode,
			Properties: properties,
		})

		// Aggregate relationship statistics.
		stats.RelationshipsByType[relType] = count
		stats.TotalRelationships += count
		propCount := int64(len(properties))
		stats.PropertiesByRelType[relType] = propCount
		stats.TotalProperties += propCount * count
	}

	sortAndClean(nodeLabels, relationships, stats)

	// Set empty maps and lists to nil for cleaner output.
	if len(nodeLabels) == 0 {
		nodeLabels = nil
	}
	if len(relationships) == 0 {
		relationships = nil
	}
	return nodeLabels, relationships, stats
}

// extractAPOCProperties is a helper that converts a map of APOC property
// information into a slice of standardized PropertyInfo structs. The resulting
// slice is sorted by property name for consistent ordering.
func extractAPOCProperties(props map[string]types.APOCProperty) []types.PropertyInfo {
	properties := make([]types.PropertyInfo, 0, len(props))
	for name, info := range props {
		properties = append(properties, types.PropertyInfo{
			Name:      name,
			Types:     []string{info.Type},
			Indexed:   info.Indexed,
			Unique:    info.Unique,
			Mandatory: info.Existence,
		})
	}
	sort.Slice(properties, func(i, j int) bool {
		return properties[i].Name < properties[j].Name
	})
	return properties
}

// sortAndClean performs final processing on the schema data. It sorts node and
// relationship slices for consistent output, primarily by count (descending) and
// secondarily by name/type. It also sets any empty maps in the statistics
// struct to nil, which can simplify downstream serialization (e.g., omitting
// empty fields in JSON).
func sortAndClean(nodeLabels []types.NodeLabel, relationships []types.Relationship, stats *types.Statistics) {
	// Sort nodes by count (desc) then name (asc).
	sort.Slice(nodeLabels, func(i, j int) bool {
		if nodeLabels[i].Count != nodeLabels[j].Count {
			return nodeLabels[i].Count > nodeLabels[j].Count
		}
		return nodeLabels[i].Name < nodeLabels[j].Name
	})
	// Sort relationships by count (desc) then type (asc).
	sort.Slice(relationships, func(i, j int) bool {
		if relationships[i].Count != relationships[j].Count {
			return relationships[i].Count > relationships[j].Count
		}
		return relationships[i].Type < relationships[j].Type
	})
	// Nil out empty maps for cleaner output.
	if len(stats.NodesByLabel) == 0 {
		stats.NodesByLabel = nil
	}
	if len(stats.RelationshipsByType) == 0 {
		stats.RelationshipsByType = nil
	}
	if len(stats.PropertiesByLabel) == 0 {
		stats.PropertiesByLabel = nil
	}
	if len(stats.PropertiesByRelType) == 0 {
		stats.PropertiesByRelType = nil
	}
}

// ConvertValue converts Neo4j value to JSON-compatible value.
func ConvertValue(value any) any {
	switch v := value.(type) {
	case nil, neo4j.InvalidValue:
		return nil
	case bool, string, int, int8, int16, int32, int64, float32, float64:
		return v
	case neo4j.Date, neo4j.LocalTime, neo4j.Time,
		neo4j.LocalDateTime, neo4j.Duration:
		if iv, ok := v.(types.ValueType); ok {
			return iv.String()
		}
	case neo4j.Node:
		return map[string]any{
			"elementId":  v.GetElementId(),
			"labels":     v.Labels,
			"properties": ConvertValue(v.GetProperties()),
		}
	case neo4j.Relationship:
		return map[string]any{
			"elementId":      v.GetElementId(),
			"type":           v.Type,
			"startElementId": v.StartElementId,
			"endElementId":   v.EndElementId,
			"properties":     ConvertValue(v.GetProperties()),
		}
	case neo4j.Entity:
		return map[string]any{
			"elementId":  v.GetElementId(),
			"properties": ConvertValue(v.GetProperties()),
		}
	case neo4j.Path:
		var nodes []any
		var relationships []any
		for _, r := range v.Relationships {
			relationships = append(relationships, ConvertValue(r))
		}
		for _, n := range v.Nodes {
			nodes = append(nodes, ConvertValue(n))
		}
		return map[string]any{
			"nodes":         nodes,
			"relationships": relationships,
		}
	case neo4j.Record:
		m := make(map[string]any)
		for i, key := range v.Keys {
			m[key] = ConvertValue(v.Values[i])
		}
		return m
	case neo4j.Point2D:
		return map[string]any{"x": v.X, "y": v.Y, "srid": v.SpatialRefId}
	case neo4j.Point3D:
		return map[string]any{"x": v.X, "y": v.Y, "z": v.Z, "srid": v.SpatialRefId}
	case []any:
		arr := make([]any, len(v))
		for i, elem := range v {
			arr[i] = ConvertValue(elem)
		}
		return arr
	case map[string]any:
		m := make(map[string]any)
		for key, val := range v {
			m[key] = ConvertValue(val)
		}
		return m
	}
	return fmt.Sprintf("%v", value)
}

```
Page 22/37FirstPrevNextLast