#
tokens: 47637/50000 4/867 files (page 39/52)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 39 of 52. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── elasticsearch.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mindsdb.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── serverless-spark.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── elasticsearch
│       │       │   ├── _index.md
│       │       │   └── elasticsearch-esql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-create-project-file.md
│       │       │   ├── looker-delete-project-file.md
│       │       │   ├── looker-dev-mode.md
│       │       │   ├── looker-get-connection-databases.md
│       │       │   ├── looker-get-connection-schemas.md
│       │       │   ├── looker-get-connection-table-columns.md
│       │       │   ├── looker-get-connection-tables.md
│       │       │   ├── looker-get-connections.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-get-project-file.md
│       │       │   ├── looker-get-project-files.md
│       │       │   ├── looker-get-projects.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   ├── looker-run-look.md
│       │       │   └── looker-update-project-file.md
│       │       ├── mindsdb
│       │       │   ├── _index.md
│       │       │   ├── mindsdb-execute-sql.md
│       │       │   └── mindsdb-sql.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-schemas.md
│       │       │   ├── postgres-list-tables.md
│       │       │   ├── postgres-list-views.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── serverless-spark
│       │       │   ├── _index.md
│       │       │   ├── serverless-spark-cancel-batch.md
│       │       │   ├── serverless-spark-get-batch.md
│       │       │   └── serverless-spark-list-batches.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── elasticsearch.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mindsdb.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── serverless-spark.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   ├── bigquery.go
│   │   │   └── cache.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── elasticsearch
│   │   │   ├── elasticsearch_test.go
│   │   │   └── elasticsearch.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mindsdb
│   │   │   ├── mindsdb_test.go
│   │   │   └── mindsdb.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlessspark_test.go
│   │   │   └── serverlessspark.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── elasticsearch
│   │   │   └── elasticsearchesql
│   │   │       ├── elasticsearchesql_test.go
│   │   │       └── elasticsearchesql.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookercreateprojectfile
│   │   │   │   ├── lookercreateprojectfile_test.go
│   │   │   │   └── lookercreateprojectfile.go
│   │   │   ├── lookerdeleteprojectfile
│   │   │   │   ├── lookerdeleteprojectfile_test.go
│   │   │   │   └── lookerdeleteprojectfile.go
│   │   │   ├── lookerdevmode
│   │   │   │   ├── lookerdevmode_test.go
│   │   │   │   └── lookerdevmode.go
│   │   │   ├── lookergetconnectiondatabases
│   │   │   │   ├── lookergetconnectiondatabases_test.go
│   │   │   │   └── lookergetconnectiondatabases.go
│   │   │   ├── lookergetconnections
│   │   │   │   ├── lookergetconnections_test.go
│   │   │   │   └── lookergetconnections.go
│   │   │   ├── lookergetconnectionschemas
│   │   │   │   ├── lookergetconnectionschemas_test.go
│   │   │   │   └── lookergetconnectionschemas.go
│   │   │   ├── lookergetconnectiontablecolumns
│   │   │   │   ├── lookergetconnectiontablecolumns_test.go
│   │   │   │   └── lookergetconnectiontablecolumns.go
│   │   │   ├── lookergetconnectiontables
│   │   │   │   ├── lookergetconnectiontables_test.go
│   │   │   │   └── lookergetconnectiontables.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookergetprojectfile
│   │   │   │   ├── lookergetprojectfile_test.go
│   │   │   │   └── lookergetprojectfile.go
│   │   │   ├── lookergetprojectfiles
│   │   │   │   ├── lookergetprojectfiles_test.go
│   │   │   │   └── lookergetprojectfiles.go
│   │   │   ├── lookergetprojects
│   │   │   │   ├── lookergetprojects_test.go
│   │   │   │   └── lookergetprojects.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   ├── lookerrunlook
│   │   │   │   ├── lookerrunlook_test.go
│   │   │   │   └── lookerrunlook.go
│   │   │   └── lookerupdateprojectfile
│   │   │       ├── lookerupdateprojectfile_test.go
│   │   │       └── lookerupdateprojectfile.go
│   │   ├── mindsdb
│   │   │   ├── mindsdbexecutesql
│   │   │   │   ├── mindsdbexecutesql_test.go
│   │   │   │   └── mindsdbexecutesql.go
│   │   │   └── mindsdbsql
│   │   │       ├── mindsdbsql_test.go
│   │   │       └── mindsdbsql.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslistschemas
│   │   │   │   ├── postgreslistschemas_test.go
│   │   │   │   └── postgreslistschemas.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   ├── postgreslistviews
│   │   │   │   ├── postgreslistviews_test.go
│   │   │   │   └── postgreslistviews.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlesssparkcancelbatch
│   │   │   │   ├── serverlesssparkcancelbatch_test.go
│   │   │   │   └── serverlesssparkcancelbatch.go
│   │   │   ├── serverlesssparkgetbatch
│   │   │   │   ├── serverlesssparkgetbatch_test.go
│   │   │   │   └── serverlesssparkgetbatch.go
│   │   │   └── serverlesssparklistbatches
│   │   │       ├── serverlesssparklistbatches_test.go
│   │   │       └── serverlesssparklistbatches.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       ├── orderedmap
│       │   ├── orderedmap_test.go
│       │   └── orderedmap.go
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── elasticsearch
    │   └── elasticsearch_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mindsdb
    │   └── mindsdb_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── serverlessspark
    │   └── serverless_spark_integration_test.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/internal/sources/bigquery/bigquery.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package bigquery
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"fmt"
 20 | 	"net/http"
 21 | 	"strings"
 22 | 	"sync"
 23 | 	"time"
 24 | 
 25 | 	bigqueryapi "cloud.google.com/go/bigquery"
 26 | 	dataplexapi "cloud.google.com/go/dataplex/apiv1"
 27 | 	"github.com/goccy/go-yaml"
 28 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 29 | 	"github.com/googleapis/genai-toolbox/internal/util"
 30 | 	"go.opentelemetry.io/otel/trace"
 31 | 	"golang.org/x/oauth2"
 32 | 	"golang.org/x/oauth2/google"
 33 | 	bigqueryrestapi "google.golang.org/api/bigquery/v2"
 34 | 	"google.golang.org/api/googleapi"
 35 | 	"google.golang.org/api/impersonate"
 36 | 	"google.golang.org/api/option"
 37 | )
 38 | 
 39 | const SourceKind string = "bigquery"
 40 | 
 41 | const (
 42 | 	// No write operations are allowed.
 43 | 	WriteModeBlocked string = "blocked"
 44 | 	// Only protected write operations are allowed in a BigQuery session.
 45 | 	WriteModeProtected string = "protected"
 46 | 	// All write operations are allowed.
 47 | 	WriteModeAllowed string = "allowed"
 48 | )
 49 | 
 50 | // validate interface
 51 | var _ sources.SourceConfig = Config{}
 52 | 
 53 | type BigqueryClientCreator func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
 54 | 
 55 | type BigQuerySessionProvider func(ctx context.Context) (*Session, error)
 56 | 
 57 | type DataplexClientCreator func(tokenString string) (*dataplexapi.CatalogClient, error)
 58 | 
 59 | func init() {
 60 | 	if !sources.Register(SourceKind, newConfig) {
 61 | 		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
 62 | 	}
 63 | }
 64 | 
 65 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
 66 | 	actual := Config{Name: name}
 67 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 68 | 		return nil, err
 69 | 	}
 70 | 	return actual, nil
 71 | }
 72 | 
 73 | type Config struct {
 74 | 	// BigQuery configs
 75 | 	Name                      string   `yaml:"name" validate:"required"`
 76 | 	Kind                      string   `yaml:"kind" validate:"required"`
 77 | 	Project                   string   `yaml:"project" validate:"required"`
 78 | 	Location                  string   `yaml:"location"`
 79 | 	WriteMode                 string   `yaml:"writeMode"`
 80 | 	AllowedDatasets           []string `yaml:"allowedDatasets"`
 81 | 	UseClientOAuth            bool     `yaml:"useClientOAuth"`
 82 | 	ImpersonateServiceAccount string   `yaml:"impersonateServiceAccount"`
 83 | }
 84 | 
 85 | func (r Config) SourceConfigKind() string {
 86 | 	// Returns BigQuery source kind
 87 | 	return SourceKind
 88 | }
 89 | func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
 90 | 	if r.WriteMode == "" {
 91 | 		r.WriteMode = WriteModeAllowed
 92 | 	}
 93 | 
 94 | 	if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
 95 | 		// The protected mode only allows write operations to the session's temporary datasets.
 96 | 		// when using client OAuth, a new session is created every
 97 | 		// time a BigQuery tool is invoked. Therefore, no session data can
 98 | 		// be preserved as needed by the protected mode.
 99 | 		return nil, fmt.Errorf("writeMode 'protected' cannot be used with useClientOAuth 'true'")
100 | 	}
101 | 
102 | 	if r.UseClientOAuth && r.ImpersonateServiceAccount != "" {
103 | 		return nil, fmt.Errorf("useClientOAuth cannot be used with impersonateServiceAccount")
104 | 	}
105 | 
106 | 	var client *bigqueryapi.Client
107 | 	var restService *bigqueryrestapi.Service
108 | 	var tokenSource oauth2.TokenSource
109 | 	var clientCreator BigqueryClientCreator
110 | 	var err error
111 | 
112 | 	s := &Source{
113 | 		Name:                      r.Name,
114 | 		Kind:                      SourceKind,
115 | 		Project:                   r.Project,
116 | 		Location:                  r.Location,
117 | 		Client:                    client,
118 | 		RestService:               restService,
119 | 		TokenSource:               tokenSource,
120 | 		MaxQueryResultRows:        50,
121 | 		WriteMode:                 r.WriteMode,
122 | 		UseClientOAuth:            r.UseClientOAuth,
123 | 		ClientCreator:             clientCreator,
124 | 		ImpersonateServiceAccount: r.ImpersonateServiceAccount,
125 | 	}
126 | 
127 | 	if r.UseClientOAuth {
128 | 		// use client OAuth
129 | 		baseClientCreator, err := newBigQueryClientCreator(ctx, tracer, r.Project, r.Location, r.Name)
130 | 		if err != nil {
131 | 			return nil, fmt.Errorf("error constructing client creator: %w", err)
132 | 		}
133 | 		setupClientCaching(s, baseClientCreator)
134 | 
135 | 	} else {
136 | 		// Initializes a BigQuery Google SQL source
137 | 		client, restService, tokenSource, err = initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location, r.ImpersonateServiceAccount)
138 | 		if err != nil {
139 | 			return nil, fmt.Errorf("error creating client from ADC: %w", err)
140 | 		}
141 | 		s.Client = client
142 | 		s.RestService = restService
143 | 		s.TokenSource = tokenSource
144 | 	}
145 | 
146 | 	allowedDatasets := make(map[string]struct{})
147 | 	// Get full id of allowed datasets and verify they exist.
148 | 	if len(r.AllowedDatasets) > 0 {
149 | 		for _, allowed := range r.AllowedDatasets {
150 | 			var projectID, datasetID, allowedFullID string
151 | 			if strings.Contains(allowed, ".") {
152 | 				parts := strings.Split(allowed, ".")
153 | 				if len(parts) != 2 {
154 | 					return nil, fmt.Errorf("invalid allowedDataset format: %q, expected 'project.dataset' or 'dataset'", allowed)
155 | 				}
156 | 				projectID = parts[0]
157 | 				datasetID = parts[1]
158 | 				allowedFullID = allowed
159 | 			} else {
160 | 				projectID = r.Project
161 | 				datasetID = allowed
162 | 				allowedFullID = fmt.Sprintf("%s.%s", projectID, datasetID)
163 | 			}
164 | 
165 | 			if s.Client != nil {
166 | 				dataset := s.Client.DatasetInProject(projectID, datasetID)
167 | 				_, err := dataset.Metadata(ctx)
168 | 				if err != nil {
169 | 					if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound {
170 | 						return nil, fmt.Errorf("allowedDataset '%s' not found in project '%s'", datasetID, projectID)
171 | 					}
172 | 					return nil, fmt.Errorf("failed to verify allowedDataset '%s' in project '%s': %w", datasetID, projectID, err)
173 | 				}
174 | 			}
175 | 			allowedDatasets[allowedFullID] = struct{}{}
176 | 		}
177 | 	}
178 | 
179 | 	s.AllowedDatasets = allowedDatasets
180 | 	s.SessionProvider = s.newBigQuerySessionProvider()
181 | 
182 | 	if r.WriteMode != WriteModeAllowed && r.WriteMode != WriteModeBlocked && r.WriteMode != WriteModeProtected {
183 | 		return nil, fmt.Errorf("invalid writeMode %q: must be one of %q, %q, or %q", r.WriteMode, WriteModeAllowed, WriteModeProtected, WriteModeBlocked)
184 | 	}
185 | 	s.makeDataplexCatalogClient = s.lazyInitDataplexClient(ctx, tracer)
186 | 	return s, nil
187 | }
188 | 
189 | // setupClientCaching initializes caches and wraps the base client creator with caching logic.
190 | func setupClientCaching(s *Source, baseCreator BigqueryClientCreator) {
191 | 	// Define eviction handlers
192 | 	onBqEvict := func(key string, value interface{}) {
193 | 		if client, ok := value.(*bigqueryapi.Client); ok && client != nil {
194 | 			client.Close()
195 | 		}
196 | 	}
197 | 	onDataplexEvict := func(key string, value interface{}) {
198 | 		if client, ok := value.(*dataplexapi.CatalogClient); ok && client != nil {
199 | 			client.Close()
200 | 		}
201 | 	}
202 | 
203 | 	// Initialize caches
204 | 	s.bqClientCache = NewCache(onBqEvict)
205 | 	s.bqRestCache = NewCache(nil)
206 | 	s.dataplexCache = NewCache(onDataplexEvict)
207 | 
208 | 	// Create the caching wrapper for the client creator
209 | 	s.ClientCreator = func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
210 | 		// Check cache
211 | 		bqClientVal, bqFound := s.bqClientCache.Get(tokenString)
212 | 
213 | 		if wantRestService {
214 | 			restServiceVal, restFound := s.bqRestCache.Get(tokenString)
215 | 			if bqFound && restFound {
216 | 				// Cache hit for both
217 | 				return bqClientVal.(*bigqueryapi.Client), restServiceVal.(*bigqueryrestapi.Service), nil
218 | 			}
219 | 		} else {
220 | 			if bqFound {
221 | 				return bqClientVal.(*bigqueryapi.Client), nil, nil
222 | 			}
223 | 		}
224 | 
225 | 		// Cache miss - call the client creator
226 | 		client, restService, err := baseCreator(tokenString, wantRestService)
227 | 		if err != nil {
228 | 			return nil, nil, err
229 | 		}
230 | 
231 | 		// Set in cache
232 | 		s.bqClientCache.Set(tokenString, client)
233 | 		if wantRestService && restService != nil {
234 | 			s.bqRestCache.Set(tokenString, restService)
235 | 		}
236 | 
237 | 		return client, restService, nil
238 | 	}
239 | }
240 | 
241 | var _ sources.Source = &Source{}
242 | 
243 | type Source struct {
244 | 	// BigQuery Google SQL struct with client
245 | 	Name                      string `yaml:"name"`
246 | 	Kind                      string `yaml:"kind"`
247 | 	Project                   string
248 | 	Location                  string
249 | 	Client                    *bigqueryapi.Client
250 | 	RestService               *bigqueryrestapi.Service
251 | 	TokenSource               oauth2.TokenSource
252 | 	MaxQueryResultRows        int
253 | 	ClientCreator             BigqueryClientCreator
254 | 	AllowedDatasets           map[string]struct{}
255 | 	UseClientOAuth            bool
256 | 	ImpersonateServiceAccount string
257 | 	WriteMode                 string
258 | 	sessionMutex              sync.Mutex
259 | 	makeDataplexCatalogClient func() (*dataplexapi.CatalogClient, DataplexClientCreator, error)
260 | 	SessionProvider           BigQuerySessionProvider
261 | 	Session                   *Session
262 | 
263 | 	// Caches for OAuth clients
264 | 	bqClientCache *Cache
265 | 	bqRestCache   *Cache
266 | 	dataplexCache *Cache
267 | }
268 | 
269 | type Session struct {
270 | 	ID           string
271 | 	ProjectID    string
272 | 	DatasetID    string
273 | 	CreationTime time.Time
274 | 	LastUsed     time.Time
275 | }
276 | 
277 | func (s *Source) SourceKind() string {
278 | 	// Returns BigQuery Google SQL source kind
279 | 	return SourceKind
280 | }
281 | 
282 | func (s *Source) BigQueryClient() *bigqueryapi.Client {
283 | 	return s.Client
284 | }
285 | 
286 | func (s *Source) BigQueryRestService() *bigqueryrestapi.Service {
287 | 	return s.RestService
288 | }
289 | 
290 | func (s *Source) BigQueryWriteMode() string {
291 | 	return s.WriteMode
292 | }
293 | 
294 | func (s *Source) BigQuerySession() BigQuerySessionProvider {
295 | 	return s.SessionProvider
296 | }
297 | 
298 | func (s *Source) newBigQuerySessionProvider() BigQuerySessionProvider {
299 | 	return func(ctx context.Context) (*Session, error) {
300 | 		if s.WriteMode != WriteModeProtected {
301 | 			return nil, nil
302 | 		}
303 | 
304 | 		s.sessionMutex.Lock()
305 | 		defer s.sessionMutex.Unlock()
306 | 
307 | 		logger, err := util.LoggerFromContext(ctx)
308 | 		if err != nil {
309 | 			return nil, fmt.Errorf("failed to get logger from context: %w", err)
310 | 		}
311 | 
312 | 		if s.Session != nil {
313 | 			// Absolute 7-day lifetime check.
314 | 			const sessionMaxLifetime = 7 * 24 * time.Hour
315 | 			// This assumes a single task will not exceed 30 minutes, preventing it from failing mid-execution.
316 | 			const refreshThreshold = 30 * time.Minute
317 | 			if time.Since(s.Session.CreationTime) > (sessionMaxLifetime - refreshThreshold) {
318 | 				logger.DebugContext(ctx, "Session is approaching its 7-day maximum lifetime. Creating a new one.")
319 | 			} else {
320 | 				job := &bigqueryrestapi.Job{
321 | 					Configuration: &bigqueryrestapi.JobConfiguration{
322 | 						DryRun: true,
323 | 						Query: &bigqueryrestapi.JobConfigurationQuery{
324 | 							Query:                "SELECT 1",
325 | 							UseLegacySql:         new(bool),
326 | 							ConnectionProperties: []*bigqueryrestapi.ConnectionProperty{{Key: "session_id", Value: s.Session.ID}},
327 | 						},
328 | 					},
329 | 				}
330 | 				_, err := s.RestService.Jobs.Insert(s.Project, job).Do()
331 | 				if err == nil {
332 | 					s.Session.LastUsed = time.Now()
333 | 					return s.Session, nil
334 | 				}
335 | 				logger.DebugContext(ctx, "Session validation failed (likely expired), creating a new one.", "error", err)
336 | 			}
337 | 		}
338 | 
339 | 		// Create a new session if one doesn't exist, it has passed its 7-day lifetime,
340 | 		// or it failed the validation dry run.
341 | 
342 | 		creationTime := time.Now()
343 | 		job := &bigqueryrestapi.Job{
344 | 			JobReference: &bigqueryrestapi.JobReference{
345 | 				ProjectId: s.Project,
346 | 				Location:  s.Location,
347 | 			},
348 | 			Configuration: &bigqueryrestapi.JobConfiguration{
349 | 				DryRun: true,
350 | 				Query: &bigqueryrestapi.JobConfigurationQuery{
351 | 					Query:         "SELECT 1",
352 | 					CreateSession: true,
353 | 				},
354 | 			},
355 | 		}
356 | 
357 | 		createdJob, err := s.RestService.Jobs.Insert(s.Project, job).Do()
358 | 		if err != nil {
359 | 			return nil, fmt.Errorf("failed to create new session: %w", err)
360 | 		}
361 | 
362 | 		var sessionID, sessionDatasetID, projectID string
363 | 		if createdJob.Status != nil && createdJob.Statistics.SessionInfo != nil {
364 | 			sessionID = createdJob.Statistics.SessionInfo.SessionId
365 | 		} else {
366 | 			return nil, fmt.Errorf("failed to get session ID from new session job")
367 | 		}
368 | 
369 | 		if createdJob.Configuration != nil && createdJob.Configuration.Query != nil && createdJob.Configuration.Query.DestinationTable != nil {
370 | 			sessionDatasetID = createdJob.Configuration.Query.DestinationTable.DatasetId
371 | 			projectID = createdJob.Configuration.Query.DestinationTable.ProjectId
372 | 		} else {
373 | 			return nil, fmt.Errorf("failed to get session dataset ID from new session job")
374 | 		}
375 | 
376 | 		s.Session = &Session{
377 | 			ID:           sessionID,
378 | 			ProjectID:    projectID,
379 | 			DatasetID:    sessionDatasetID,
380 | 			CreationTime: creationTime,
381 | 			LastUsed:     creationTime,
382 | 		}
383 | 		return s.Session, nil
384 | 	}
385 | }
386 | 
387 | func (s *Source) UseClientAuthorization() bool {
388 | 	return s.UseClientOAuth
389 | }
390 | 
391 | func (s *Source) BigQueryProject() string {
392 | 	return s.Project
393 | }
394 | 
395 | func (s *Source) BigQueryLocation() string {
396 | 	return s.Location
397 | }
398 | 
399 | func (s *Source) BigQueryTokenSource() oauth2.TokenSource {
400 | 	return s.TokenSource
401 | }
402 | 
403 | func (s *Source) BigQueryTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error) {
404 | 	if s.ImpersonateServiceAccount != "" {
405 | 		// Create impersonated credentials token source with the requested scope
406 | 		ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
407 | 			TargetPrincipal: s.ImpersonateServiceAccount,
408 | 			Scopes:          []string{scope},
409 | 		})
410 | 		if err != nil {
411 | 			return nil, fmt.Errorf("failed to create impersonated credentials for %q with scope %q: %w", s.ImpersonateServiceAccount, scope, err)
412 | 		}
413 | 		return ts, nil
414 | 	}
415 | 	return google.DefaultTokenSource(ctx, scope)
416 | }
417 | 
418 | func (s *Source) GetMaxQueryResultRows() int {
419 | 	return s.MaxQueryResultRows
420 | }
421 | 
422 | func (s *Source) BigQueryClientCreator() BigqueryClientCreator {
423 | 	return s.ClientCreator
424 | }
425 | 
426 | func (s *Source) BigQueryAllowedDatasets() []string {
427 | 	if len(s.AllowedDatasets) == 0 {
428 | 		return nil
429 | 	}
430 | 	datasets := make([]string, 0, len(s.AllowedDatasets))
431 | 	for d := range s.AllowedDatasets {
432 | 		datasets = append(datasets, d)
433 | 	}
434 | 	return datasets
435 | }
436 | 
437 | // IsDatasetAllowed checks if a given dataset is accessible based on the source's configuration.
438 | func (s *Source) IsDatasetAllowed(projectID, datasetID string) bool {
439 | 	// If the normalized map is empty, it means no restrictions were configured.
440 | 	if len(s.AllowedDatasets) == 0 {
441 | 		return true
442 | 	}
443 | 
444 | 	targetDataset := fmt.Sprintf("%s.%s", projectID, datasetID)
445 | 	_, ok := s.AllowedDatasets[targetDataset]
446 | 	return ok
447 | }
448 | 
449 | func (s *Source) MakeDataplexCatalogClient() func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
450 | 	return s.makeDataplexCatalogClient
451 | }
452 | 
453 | func (s *Source) lazyInitDataplexClient(ctx context.Context, tracer trace.Tracer) func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
454 | 	var once sync.Once
455 | 	var client *dataplexapi.CatalogClient
456 | 	var clientCreator DataplexClientCreator
457 | 	var err error
458 | 
459 | 	return func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
460 | 		once.Do(func() {
461 | 			c, cc, e := initDataplexConnection(ctx, tracer, s.Name, s.Project, s.UseClientOAuth, s.ImpersonateServiceAccount)
462 | 			if e != nil {
463 | 				err = fmt.Errorf("failed to initialize dataplex client: %w", e)
464 | 				return
465 | 			}
466 | 			client = c
467 | 
468 | 			// If using OAuth, wrap the provided client creator (cc) with caching logic
469 | 			if s.UseClientOAuth && cc != nil {
470 | 				clientCreator = func(tokenString string) (*dataplexapi.CatalogClient, error) {
471 | 					// Check cache
472 | 					if val, found := s.dataplexCache.Get(tokenString); found {
473 | 						return val.(*dataplexapi.CatalogClient), nil
474 | 					}
475 | 
476 | 					// Cache miss - call client creator
477 | 					dpClient, err := cc(tokenString)
478 | 					if err != nil {
479 | 						return nil, err
480 | 					}
481 | 
482 | 					// Set in cache
483 | 					s.dataplexCache.Set(tokenString, dpClient)
484 | 					return dpClient, nil
485 | 				}
486 | 			} else {
487 | 				// Not using OAuth or no creator was returned
488 | 				clientCreator = cc
489 | 			}
490 | 		})
491 | 		return client, clientCreator, err
492 | 	}
493 | }
494 | 
495 | func initBigQueryConnection(
496 | 	ctx context.Context,
497 | 	tracer trace.Tracer,
498 | 	name string,
499 | 	project string,
500 | 	location string,
501 | 	impersonateServiceAccount string,
502 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, oauth2.TokenSource, error) {
503 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
504 | 	defer span.End()
505 | 
506 | 	userAgent, err := util.UserAgentFromContext(ctx)
507 | 	if err != nil {
508 | 		return nil, nil, nil, err
509 | 	}
510 | 
511 | 	var tokenSource oauth2.TokenSource
512 | 	var opts []option.ClientOption
513 | 
514 | 	if impersonateServiceAccount != "" {
515 | 		// Create impersonated credentials token source with cloud-platform scope
516 | 		// This broader scope is needed for tools like conversational analytics
517 | 		cloudPlatformTokenSource, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
518 | 			TargetPrincipal: impersonateServiceAccount,
519 | 			Scopes:          []string{"https://www.googleapis.com/auth/cloud-platform"},
520 | 		})
521 | 		if err != nil {
522 | 			return nil, nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
523 | 		}
524 | 		tokenSource = cloudPlatformTokenSource
525 | 		opts = []option.ClientOption{
526 | 			option.WithUserAgent(userAgent),
527 | 			option.WithTokenSource(cloudPlatformTokenSource),
528 | 		}
529 | 	} else {
530 | 		// Use default credentials
531 | 		cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
532 | 		if err != nil {
533 | 			return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
534 | 		}
535 | 		tokenSource = cred.TokenSource
536 | 		opts = []option.ClientOption{
537 | 			option.WithUserAgent(userAgent),
538 | 			option.WithCredentials(cred),
539 | 		}
540 | 	}
541 | 
542 | 	// Initialize the high-level BigQuery client
543 | 	client, err := bigqueryapi.NewClient(ctx, project, opts...)
544 | 	if err != nil {
545 | 		return nil, nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
546 | 	}
547 | 	client.Location = location
548 | 
549 | 	// Initialize the low-level BigQuery REST service using the same credentials
550 | 	restService, err := bigqueryrestapi.NewService(ctx, opts...)
551 | 	if err != nil {
552 | 		return nil, nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
553 | 	}
554 | 
555 | 	return client, restService, tokenSource, nil
556 | }
557 | 
558 | // initBigQueryConnectionWithOAuthToken initialize a BigQuery client with an
559 | // OAuth access token.
560 | func initBigQueryConnectionWithOAuthToken(
561 | 	ctx context.Context,
562 | 	tracer trace.Tracer,
563 | 	project string,
564 | 	location string,
565 | 	name string,
566 | 	userAgent string,
567 | 	tokenString string,
568 | 	wantRestService bool,
569 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
570 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
571 | 	defer span.End()
572 | 	// Construct token source
573 | 	token := &oauth2.Token{
574 | 		AccessToken: string(tokenString),
575 | 	}
576 | 	ts := oauth2.StaticTokenSource(token)
577 | 
578 | 	// Initialize the BigQuery client with tokenSource
579 | 	client, err := bigqueryapi.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
580 | 	if err != nil {
581 | 		return nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
582 | 	}
583 | 	client.Location = location
584 | 
585 | 	if wantRestService {
586 | 		// Initialize the low-level BigQuery REST service using the same credentials
587 | 		restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
588 | 		if err != nil {
589 | 			return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
590 | 		}
591 | 		return client, restService, nil
592 | 	}
593 | 
594 | 	return client, nil, nil
595 | }
596 | 
597 | // newBigQueryClientCreator sets the project parameters for the init helper
598 | // function. The returned function takes in an OAuth access token and uses it to
599 | // create a BQ client.
600 | func newBigQueryClientCreator(
601 | 	ctx context.Context,
602 | 	tracer trace.Tracer,
603 | 	project string,
604 | 	location string,
605 | 	name string,
606 | ) (func(string, bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error), error) {
607 | 	userAgent, err := util.UserAgentFromContext(ctx)
608 | 	if err != nil {
609 | 		return nil, err
610 | 	}
611 | 
612 | 	return func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
613 | 		return initBigQueryConnectionWithOAuthToken(ctx, tracer, project, location, name, userAgent, tokenString, wantRestService)
614 | 	}, nil
615 | }
616 | 
617 | func initDataplexConnection(
618 | 	ctx context.Context,
619 | 	tracer trace.Tracer,
620 | 	name string,
621 | 	project string,
622 | 	useClientOAuth bool,
623 | 	impersonateServiceAccount string,
624 | ) (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
625 | 	var client *dataplexapi.CatalogClient
626 | 	var clientCreator DataplexClientCreator
627 | 	var err error
628 | 
629 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
630 | 	defer span.End()
631 | 
632 | 	userAgent, err := util.UserAgentFromContext(ctx)
633 | 	if err != nil {
634 | 		return nil, nil, err
635 | 	}
636 | 
637 | 	if useClientOAuth {
638 | 		clientCreator = newDataplexClientCreator(ctx, project, userAgent)
639 | 	} else {
640 | 		var opts []option.ClientOption
641 | 
642 | 		if impersonateServiceAccount != "" {
643 | 			// Create impersonated credentials token source
644 | 			ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
645 | 				TargetPrincipal: impersonateServiceAccount,
646 | 				Scopes:          []string{"https://www.googleapis.com/auth/cloud-platform"},
647 | 			})
648 | 			if err != nil {
649 | 				return nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
650 | 			}
651 | 			opts = []option.ClientOption{
652 | 				option.WithUserAgent(userAgent),
653 | 				option.WithTokenSource(ts),
654 | 			}
655 | 		} else {
656 | 			// Use default credentials
657 | 			cred, err := google.FindDefaultCredentials(ctx)
658 | 			if err != nil {
659 | 				return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
660 | 			}
661 | 			opts = []option.ClientOption{
662 | 				option.WithUserAgent(userAgent),
663 | 				option.WithCredentials(cred),
664 | 			}
665 | 		}
666 | 
667 | 		client, err = dataplexapi.NewCatalogClient(ctx, opts...)
668 | 		if err != nil {
669 | 			return nil, nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
670 | 		}
671 | 	}
672 | 
673 | 	return client, clientCreator, nil
674 | }
675 | 
676 | func initDataplexConnectionWithOAuthToken(
677 | 	ctx context.Context,
678 | 	project string,
679 | 	userAgent string,
680 | 	tokenString string,
681 | ) (*dataplexapi.CatalogClient, error) {
682 | 	// Construct token source
683 | 	token := &oauth2.Token{
684 | 		AccessToken: string(tokenString),
685 | 	}
686 | 	ts := oauth2.StaticTokenSource(token)
687 | 
688 | 	client, err := dataplexapi.NewCatalogClient(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
689 | 	if err != nil {
690 | 		return nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
691 | 	}
692 | 	return client, nil
693 | }
694 | 
695 | func newDataplexClientCreator(
696 | 	ctx context.Context,
697 | 	project string,
698 | 	userAgent string,
699 | ) func(string) (*dataplexapi.CatalogClient, error) {
700 | 	return func(tokenString string) (*dataplexapi.CatalogClient, error) {
701 | 		return initDataplexConnectionWithOAuthToken(ctx, project, userAgent, tokenString)
702 | 	}
703 | }
704 | 
```

--------------------------------------------------------------------------------
/internal/tools/neo4j/neo4jschema/neo4jschema.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package neo4jschema
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"fmt"
 20 | 	"sync"
 21 | 	"time"
 22 | 
 23 | 	"github.com/goccy/go-yaml"
 24 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 25 | 	neo4jsc "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
 26 | 	"github.com/googleapis/genai-toolbox/internal/tools"
 27 | 	"github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/cache"
 28 | 	"github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/helpers"
 29 | 	"github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types"
 30 | 	"github.com/neo4j/neo4j-go-driver/v5/neo4j"
 31 | )
 32 | 
 33 | // kind defines the unique identifier for this tool.
 34 | const kind string = "neo4j-schema"
 35 | 
 36 | // init registers the tool with the application's tool registry when the package is initialized.
 37 | func init() {
 38 | 	if !tools.Register(kind, newConfig) {
 39 | 		panic(fmt.Sprintf("tool kind %q already registered", kind))
 40 | 	}
 41 | }
 42 | 
 43 | // newConfig decodes a YAML configuration into a Config struct.
 44 | // This function is called by the tool registry to create a new configuration object.
 45 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
 46 | 	actual := Config{Name: name}
 47 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 48 | 		return nil, err
 49 | 	}
 50 | 	return actual, nil
 51 | }
 52 | 
 53 | // compatibleSource defines the interface a data source must implement to be used by this tool.
 54 | // It ensures that the source can provide a Neo4j driver and database name.
 55 | type compatibleSource interface {
 56 | 	Neo4jDriver() neo4j.DriverWithContext
 57 | 	Neo4jDatabase() string
 58 | }
 59 | 
 60 | // Statically verify that our compatible source implementation is valid.
 61 | var _ compatibleSource = &neo4jsc.Source{}
 62 | 
 63 | // compatibleSources lists the kinds of sources that are compatible with this tool.
 64 | var compatibleSources = [...]string{neo4jsc.SourceKind}
 65 | 
 66 | // Config holds the configuration settings for the Neo4j schema tool.
 67 | // These settings are typically read from a YAML file.
 68 | type Config struct {
 69 | 	Name               string   `yaml:"name" validate:"required"`
 70 | 	Kind               string   `yaml:"kind" validate:"required"`
 71 | 	Source             string   `yaml:"source" validate:"required"`
 72 | 	Description        string   `yaml:"description" validate:"required"`
 73 | 	AuthRequired       []string `yaml:"authRequired"`
 74 | 	CacheExpireMinutes *int     `yaml:"cacheExpireMinutes,omitempty"` // Cache expiration time in minutes.
 75 | }
 76 | 
 77 | // Statically verify that Config implements the tools.ToolConfig interface.
 78 | var _ tools.ToolConfig = Config{}
 79 | 
 80 | // ToolConfigKind returns the kind of this tool configuration.
 81 | func (cfg Config) ToolConfigKind() string {
 82 | 	return kind
 83 | }
 84 | 
 85 | // Initialize sets up the tool with its dependencies and returns a ready-to-use Tool instance.
 86 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
 87 | 	// Verify that the specified source exists.
 88 | 	rawS, ok := srcs[cfg.Source]
 89 | 	if !ok {
 90 | 		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
 91 | 	}
 92 | 
 93 | 	// Verify the source is of a compatible kind.
 94 | 	s, ok := rawS.(compatibleSource)
 95 | 	if !ok {
 96 | 		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
 97 | 	}
 98 | 
 99 | 	parameters := tools.Parameters{}
100 | 	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
101 | 
102 | 	// Set a default cache expiration if not provided in the configuration.
103 | 	if cfg.CacheExpireMinutes == nil {
104 | 		defaultExpiration := cache.DefaultExpiration // Default to 60 minutes
105 | 		cfg.CacheExpireMinutes = &defaultExpiration
106 | 	}
107 | 
108 | 	// Finish tool setup by creating the Tool instance.
109 | 	t := Tool{
110 | 		Name:               cfg.Name,
111 | 		Kind:               kind,
112 | 		AuthRequired:       cfg.AuthRequired,
113 | 		Driver:             s.Neo4jDriver(),
114 | 		Database:           s.Neo4jDatabase(),
115 | 		cache:              cache.NewCache(),
116 | 		cacheExpireMinutes: cfg.CacheExpireMinutes,
117 | 		manifest:           tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
118 | 		mcpManifest:        mcpManifest,
119 | 	}
120 | 	return t, nil
121 | }
122 | 
123 | // Statically verify that Tool implements the tools.Tool interface.
124 | var _ tools.Tool = Tool{}
125 | 
126 | // Tool represents the Neo4j schema extraction tool.
127 | // It holds the Neo4j driver, database information, and a cache for the schema.
128 | type Tool struct {
129 | 	Name               string   `yaml:"name"`
130 | 	Kind               string   `yaml:"kind"`
131 | 	AuthRequired       []string `yaml:"authRequired"`
132 | 	Driver             neo4j.DriverWithContext
133 | 	Database           string
134 | 	cache              *cache.Cache
135 | 	cacheExpireMinutes *int
136 | 	manifest           tools.Manifest
137 | 	mcpManifest        tools.McpManifest
138 | }
139 | 
140 | // Invoke executes the tool's main logic: fetching the Neo4j schema.
141 | // It first checks the cache for a valid schema before extracting it from the database.
142 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
143 | 	// Check if a valid schema is already in the cache.
144 | 	if cachedSchema, ok := t.cache.Get("schema"); ok {
145 | 		if schema, ok := cachedSchema.(*types.SchemaInfo); ok {
146 | 			return schema, nil
147 | 		}
148 | 	}
149 | 
150 | 	// If not cached, extract the schema from the database.
151 | 	schema, err := t.extractSchema(ctx)
152 | 	if err != nil {
153 | 		return nil, fmt.Errorf("failed to extract database schema: %w", err)
154 | 	}
155 | 
156 | 	// Cache the newly extracted schema for future use.
157 | 	expiration := time.Duration(*t.cacheExpireMinutes) * time.Minute
158 | 	t.cache.Set("schema", schema, expiration)
159 | 
160 | 	return schema, nil
161 | }
162 | 
163 | // ParseParams is a placeholder as this tool does not require input parameters.
164 | func (t Tool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (tools.ParamValues, error) {
165 | 	return tools.ParamValues{}, nil
166 | }
167 | 
168 | // Manifest returns the tool's manifest, which describes its purpose and parameters.
169 | func (t Tool) Manifest() tools.Manifest {
170 | 	return t.manifest
171 | }
172 | 
173 | // McpManifest returns the machine-consumable manifest for the tool.
174 | func (t Tool) McpManifest() tools.McpManifest {
175 | 	return t.mcpManifest
176 | }
177 | 
178 | // Authorized checks if the tool is authorized to run based on the provided authentication services.
179 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
180 | 	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
181 | }
182 | 
183 | func (t Tool) RequiresClientAuthorization() bool {
184 | 	return false
185 | }
186 | 
187 | // checkAPOCProcedures verifies if essential APOC procedures are available in the database.
188 | // It returns true only if all required procedures are found.
189 | func (t Tool) checkAPOCProcedures(ctx context.Context) (bool, error) {
190 | 	proceduresToCheck := []string{"apoc.meta.schema", "apoc.meta.cypher.types"}
191 | 
192 | 	session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
193 | 	defer session.Close(ctx)
194 | 
195 | 	// This query efficiently counts how many of the specified procedures exist.
196 | 	query := "SHOW PROCEDURES YIELD name WHERE name IN $procs RETURN count(name) AS procCount"
197 | 	params := map[string]any{"procs": proceduresToCheck}
198 | 
199 | 	result, err := session.Run(ctx, query, params)
200 | 	if err != nil {
201 | 		return false, fmt.Errorf("failed to execute procedure check query: %w", err)
202 | 	}
203 | 
204 | 	record, err := result.Single(ctx)
205 | 	if err != nil {
206 | 		return false, fmt.Errorf("failed to retrieve single result for procedure check: %w", err)
207 | 	}
208 | 
209 | 	rawCount, found := record.Get("procCount")
210 | 	if !found {
211 | 		return false, fmt.Errorf("field 'procCount' not found in result record")
212 | 	}
213 | 
214 | 	procCount, ok := rawCount.(int64)
215 | 	if !ok {
216 | 		return false, fmt.Errorf("expected 'procCount' to be of type int64, but got %T", rawCount)
217 | 	}
218 | 
219 | 	// Return true only if the number of found procedures matches the number we were looking for.
220 | 	return procCount == int64(len(proceduresToCheck)), nil
221 | }
222 | 
223 | // extractSchema orchestrates the concurrent extraction of different parts of the database schema.
224 | // It runs several extraction tasks in parallel for efficiency.
225 | func (t Tool) extractSchema(ctx context.Context) (*types.SchemaInfo, error) {
226 | 	schema := &types.SchemaInfo{}
227 | 	var mu sync.Mutex
228 | 
229 | 	// Define the different schema extraction tasks.
230 | 	tasks := []struct {
231 | 		name string
232 | 		fn   func() error
233 | 	}{
234 | 		{
235 | 			name: "database-info",
236 | 			fn: func() error {
237 | 				dbInfo, err := t.extractDatabaseInfo(ctx)
238 | 				if err != nil {
239 | 					return fmt.Errorf("failed to extract database info: %w", err)
240 | 				}
241 | 				mu.Lock()
242 | 				defer mu.Unlock()
243 | 				schema.DatabaseInfo = *dbInfo
244 | 				return nil
245 | 			},
246 | 		},
247 | 		{
248 | 			name: "schema-extraction",
249 | 			fn: func() error {
250 | 				// Check if APOC procedures are available.
251 | 				hasAPOC, err := t.checkAPOCProcedures(ctx)
252 | 				if err != nil {
253 | 					return fmt.Errorf("failed to check APOC procedures: %w", err)
254 | 				}
255 | 
256 | 				var nodeLabels []types.NodeLabel
257 | 				var relationships []types.Relationship
258 | 				var stats *types.Statistics
259 | 
260 | 				// Use APOC if available for a more detailed schema; otherwise, use native queries.
261 | 				if hasAPOC {
262 | 					nodeLabels, relationships, stats, err = t.GetAPOCSchema(ctx)
263 | 				} else {
264 | 					nodeLabels, relationships, stats, err = t.GetSchemaWithoutAPOC(ctx, 100)
265 | 				}
266 | 				if err != nil {
267 | 					return fmt.Errorf("failed to get schema: %w", err)
268 | 				}
269 | 
270 | 				mu.Lock()
271 | 				defer mu.Unlock()
272 | 				schema.NodeLabels = nodeLabels
273 | 				schema.Relationships = relationships
274 | 				schema.Statistics = *stats
275 | 				return nil
276 | 			},
277 | 		},
278 | 		{
279 | 			name: "constraints",
280 | 			fn: func() error {
281 | 				constraints, err := t.extractConstraints(ctx)
282 | 				if err != nil {
283 | 					return fmt.Errorf("failed to extract constraints: %w", err)
284 | 				}
285 | 				mu.Lock()
286 | 				defer mu.Unlock()
287 | 				schema.Constraints = constraints
288 | 				return nil
289 | 			},
290 | 		},
291 | 		{
292 | 			name: "indexes",
293 | 			fn: func() error {
294 | 				indexes, err := t.extractIndexes(ctx)
295 | 				if err != nil {
296 | 					return fmt.Errorf("failed to extract indexes: %w", err)
297 | 				}
298 | 				mu.Lock()
299 | 				defer mu.Unlock()
300 | 				schema.Indexes = indexes
301 | 				return nil
302 | 			},
303 | 		},
304 | 	}
305 | 
306 | 	var wg sync.WaitGroup
307 | 	errCh := make(chan error, len(tasks))
308 | 
309 | 	// Execute all tasks concurrently.
310 | 	for _, task := range tasks {
311 | 		wg.Add(1)
312 | 		go func(task struct {
313 | 			name string
314 | 			fn   func() error
315 | 		}) {
316 | 			defer wg.Done()
317 | 			if err := task.fn(); err != nil {
318 | 				errCh <- err
319 | 			}
320 | 		}(task)
321 | 	}
322 | 
323 | 	wg.Wait()
324 | 	close(errCh)
325 | 
326 | 	// Collect any errors that occurred during the concurrent tasks.
327 | 	for err := range errCh {
328 | 		if err != nil {
329 | 			schema.Errors = append(schema.Errors, err.Error())
330 | 		}
331 | 	}
332 | 	return schema, nil
333 | }
334 | 
335 | // GetAPOCSchema extracts schema information using the APOC library, which provides detailed metadata.
336 | func (t Tool) GetAPOCSchema(ctx context.Context) ([]types.NodeLabel, []types.Relationship, *types.Statistics, error) {
337 | 	var nodeLabels []types.NodeLabel
338 | 	var relationships []types.Relationship
339 | 	stats := &types.Statistics{
340 | 		NodesByLabel:        make(map[string]int64),
341 | 		RelationshipsByType: make(map[string]int64),
342 | 		PropertiesByLabel:   make(map[string]int64),
343 | 		PropertiesByRelType: make(map[string]int64),
344 | 	}
345 | 
346 | 	var mu sync.Mutex
347 | 	var firstErr error
348 | 	ctx, cancel := context.WithCancel(ctx)
349 | 	defer cancel()
350 | 
351 | 	handleError := func(err error) {
352 | 		mu.Lock()
353 | 		defer mu.Unlock()
354 | 		if firstErr == nil {
355 | 			firstErr = err
356 | 			cancel() // Cancel other operations on the first error.
357 | 		}
358 | 	}
359 | 
360 | 	tasks := []struct {
361 | 		name string
362 | 		fn   func(session neo4j.SessionWithContext) error
363 | 	}{
364 | 		{
365 | 			name: "apoc-schema",
366 | 			fn: func(session neo4j.SessionWithContext) error {
367 | 				result, err := session.Run(ctx, "CALL apoc.meta.schema({sample: 10}) YIELD value RETURN value", nil)
368 | 				if err != nil {
369 | 					return fmt.Errorf("failed to run APOC schema query: %w", err)
370 | 				}
371 | 				if !result.Next(ctx) {
372 | 					return fmt.Errorf("no results from APOC schema query")
373 | 				}
374 | 				schemaMap, ok := result.Record().Values[0].(map[string]any)
375 | 				if !ok {
376 | 					return fmt.Errorf("unexpected result format from APOC schema query: %T", result.Record().Values[0])
377 | 				}
378 | 				apocSchema, err := helpers.MapToAPOCSchema(schemaMap)
379 | 				if err != nil {
380 | 					return fmt.Errorf("failed to convert schema map to APOCSchemaResult: %w", err)
381 | 				}
382 | 				nodes, _, apocStats := helpers.ProcessAPOCSchema(apocSchema)
383 | 				mu.Lock()
384 | 				defer mu.Unlock()
385 | 				nodeLabels = nodes
386 | 				stats.TotalNodes = apocStats.TotalNodes
387 | 				stats.TotalProperties += apocStats.TotalProperties
388 | 				stats.NodesByLabel = apocStats.NodesByLabel
389 | 				stats.PropertiesByLabel = apocStats.PropertiesByLabel
390 | 				return nil
391 | 			},
392 | 		},
393 | 		{
394 | 			name: "apoc-relationships",
395 | 			fn: func(session neo4j.SessionWithContext) error {
396 | 				query := `
397 | 					MATCH (startNode)-[rel]->(endNode)
398 | 					WITH
399 | 					  labels(startNode)[0] AS startNode,
400 | 					  type(rel) AS relType,
401 | 					  apoc.meta.cypher.types(rel) AS relProperties,
402 | 					  labels(endNode)[0] AS endNode,
403 | 					  count(*) AS count
404 | 					RETURN relType, startNode, endNode, relProperties, count`
405 | 				result, err := session.Run(ctx, query, nil)
406 | 				if err != nil {
407 | 					return fmt.Errorf("failed to extract relationships: %w", err)
408 | 				}
409 | 				for result.Next(ctx) {
410 | 					record := result.Record()
411 | 					relType, startNode, endNode := record.Values[0].(string), record.Values[1].(string), record.Values[2].(string)
412 | 					properties, count := record.Values[3].(map[string]any), record.Values[4].(int64)
413 | 
414 | 					if relType == "" || count == 0 {
415 | 						continue
416 | 					}
417 | 					relationship := types.Relationship{Type: relType, StartNode: startNode, EndNode: endNode, Count: count, Properties: []types.PropertyInfo{}}
418 | 					for prop, propType := range properties {
419 | 						relationship.Properties = append(relationship.Properties, types.PropertyInfo{Name: prop, Types: []string{propType.(string)}})
420 | 					}
421 | 					mu.Lock()
422 | 					relationships = append(relationships, relationship)
423 | 					stats.RelationshipsByType[relType] += count
424 | 					stats.TotalRelationships += count
425 | 					propCount := int64(len(relationship.Properties))
426 | 					stats.TotalProperties += propCount
427 | 					stats.PropertiesByRelType[relType] += propCount
428 | 					mu.Unlock()
429 | 				}
430 | 				mu.Lock()
431 | 				defer mu.Unlock()
432 | 				if len(stats.RelationshipsByType) == 0 {
433 | 					stats.RelationshipsByType = nil
434 | 				}
435 | 				if len(stats.PropertiesByRelType) == 0 {
436 | 					stats.PropertiesByRelType = nil
437 | 				}
438 | 				return nil
439 | 			},
440 | 		},
441 | 	}
442 | 
443 | 	var wg sync.WaitGroup
444 | 	wg.Add(len(tasks))
445 | 	for _, task := range tasks {
446 | 		go func(task struct {
447 | 			name string
448 | 			fn   func(session neo4j.SessionWithContext) error
449 | 		}) {
450 | 			defer wg.Done()
451 | 			session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
452 | 			defer session.Close(ctx)
453 | 			if err := task.fn(session); err != nil {
454 | 				handleError(fmt.Errorf("task %s failed: %w", task.name, err))
455 | 			}
456 | 		}(task)
457 | 	}
458 | 	wg.Wait()
459 | 
460 | 	if firstErr != nil {
461 | 		return nil, nil, nil, firstErr
462 | 	}
463 | 	return nodeLabels, relationships, stats, nil
464 | }
465 | 
466 | // GetSchemaWithoutAPOC extracts schema information using native Cypher queries.
467 | // This serves as a fallback for databases without APOC installed.
468 | func (t Tool) GetSchemaWithoutAPOC(ctx context.Context, sampleSize int) ([]types.NodeLabel, []types.Relationship, *types.Statistics, error) {
469 | 	nodePropsMap := make(map[string]map[string]map[string]bool)
470 | 	relPropsMap := make(map[string]map[string]map[string]bool)
471 | 	nodeCounts := make(map[string]int64)
472 | 	relCounts := make(map[string]int64)
473 | 	relConnectivity := make(map[string]types.RelConnectivityInfo)
474 | 
475 | 	var mu sync.Mutex
476 | 	var firstErr error
477 | 	ctx, cancel := context.WithCancel(ctx)
478 | 	defer cancel()
479 | 
480 | 	handleError := func(err error) {
481 | 		mu.Lock()
482 | 		defer mu.Unlock()
483 | 		if firstErr == nil {
484 | 			firstErr = err
485 | 			cancel()
486 | 		}
487 | 	}
488 | 
489 | 	tasks := []struct {
490 | 		name string
491 | 		fn   func(session neo4j.SessionWithContext) error
492 | 	}{
493 | 		{
494 | 			name: "node-schema",
495 | 			fn: func(session neo4j.SessionWithContext) error {
496 | 				countResult, err := session.Run(ctx, `MATCH (n) UNWIND labels(n) AS label RETURN label, count(*) AS count ORDER BY count DESC`, nil)
497 | 				if err != nil {
498 | 					return fmt.Errorf("node count query failed: %w", err)
499 | 				}
500 | 				var labelsList []string
501 | 				mu.Lock()
502 | 				for countResult.Next(ctx) {
503 | 					record := countResult.Record()
504 | 					label, count := record.Values[0].(string), record.Values[1].(int64)
505 | 					nodeCounts[label] = count
506 | 					labelsList = append(labelsList, label)
507 | 				}
508 | 				mu.Unlock()
509 | 				if err = countResult.Err(); err != nil {
510 | 					return fmt.Errorf("node count result error: %w", err)
511 | 				}
512 | 
513 | 				for _, label := range labelsList {
514 | 					propQuery := fmt.Sprintf(`MATCH (n:%s) WITH n LIMIT $sampleSize UNWIND keys(n) AS key WITH key, n[key] AS value WHERE value IS NOT NULL RETURN key, COLLECT(DISTINCT valueType(value)) AS types`, label)
515 | 					propResult, err := session.Run(ctx, propQuery, map[string]any{"sampleSize": sampleSize})
516 | 					if err != nil {
517 | 						return fmt.Errorf("node properties query for label %s failed: %w", label, err)
518 | 					}
519 | 					mu.Lock()
520 | 					if nodePropsMap[label] == nil {
521 | 						nodePropsMap[label] = make(map[string]map[string]bool)
522 | 					}
523 | 					for propResult.Next(ctx) {
524 | 						record := propResult.Record()
525 | 						key, types := record.Values[0].(string), record.Values[1].([]any)
526 | 						if nodePropsMap[label][key] == nil {
527 | 							nodePropsMap[label][key] = make(map[string]bool)
528 | 						}
529 | 						for _, tp := range types {
530 | 							nodePropsMap[label][key][tp.(string)] = true
531 | 						}
532 | 					}
533 | 					mu.Unlock()
534 | 					if err = propResult.Err(); err != nil {
535 | 						return fmt.Errorf("node properties result error for label %s: %w", label, err)
536 | 					}
537 | 				}
538 | 				return nil
539 | 			},
540 | 		},
541 | 		{
542 | 			name: "relationship-schema",
543 | 			fn: func(session neo4j.SessionWithContext) error {
544 | 				relQuery := `
545 | 					MATCH (start)-[r]->(end)
546 | 					WITH type(r) AS relType, labels(start) AS startLabels, labels(end) AS endLabels, count(*) AS count
547 | 					RETURN relType, CASE WHEN size(startLabels) > 0 THEN startLabels[0] ELSE null END AS startLabel, CASE WHEN size(endLabels) > 0 THEN endLabels[0] ELSE null END AS endLabel, sum(count) AS totalCount
548 | 					ORDER BY totalCount DESC`
549 | 				relResult, err := session.Run(ctx, relQuery, nil)
550 | 				if err != nil {
551 | 					return fmt.Errorf("relationship count query failed: %w", err)
552 | 				}
553 | 				var relTypesList []string
554 | 				mu.Lock()
555 | 				for relResult.Next(ctx) {
556 | 					record := relResult.Record()
557 | 					relType := record.Values[0].(string)
558 | 					startLabel := ""
559 | 					if record.Values[1] != nil {
560 | 						startLabel = record.Values[1].(string)
561 | 					}
562 | 					endLabel := ""
563 | 					if record.Values[2] != nil {
564 | 						endLabel = record.Values[2].(string)
565 | 					}
566 | 					count := record.Values[3].(int64)
567 | 					relCounts[relType] = count
568 | 					relTypesList = append(relTypesList, relType)
569 | 					if existing, ok := relConnectivity[relType]; !ok || count > existing.Count {
570 | 						relConnectivity[relType] = types.RelConnectivityInfo{StartNode: startLabel, EndNode: endLabel, Count: count}
571 | 					}
572 | 				}
573 | 				mu.Unlock()
574 | 				if err = relResult.Err(); err != nil {
575 | 					return fmt.Errorf("relationship count result error: %w", err)
576 | 				}
577 | 
578 | 				for _, relType := range relTypesList {
579 | 					propQuery := fmt.Sprintf(`MATCH ()-[r:%s]->() WITH r LIMIT $sampleSize WHERE size(keys(r)) > 0 UNWIND keys(r) AS key WITH key, r[key] AS value WHERE value IS NOT NULL RETURN key, COLLECT(DISTINCT valueType(value)) AS types`, relType)
580 | 					propResult, err := session.Run(ctx, propQuery, map[string]any{"sampleSize": sampleSize})
581 | 					if err != nil {
582 | 						return fmt.Errorf("relationship properties query for type %s failed: %w", relType, err)
583 | 					}
584 | 					mu.Lock()
585 | 					if relPropsMap[relType] == nil {
586 | 						relPropsMap[relType] = make(map[string]map[string]bool)
587 | 					}
588 | 					for propResult.Next(ctx) {
589 | 						record := propResult.Record()
590 | 						key, propTypes := record.Values[0].(string), record.Values[1].([]any)
591 | 						if relPropsMap[relType][key] == nil {
592 | 							relPropsMap[relType][key] = make(map[string]bool)
593 | 						}
594 | 						for _, t := range propTypes {
595 | 							relPropsMap[relType][key][t.(string)] = true
596 | 						}
597 | 					}
598 | 					mu.Unlock()
599 | 					if err = propResult.Err(); err != nil {
600 | 						return fmt.Errorf("relationship properties result error for type %s: %w", relType, err)
601 | 					}
602 | 				}
603 | 				return nil
604 | 			},
605 | 		},
606 | 	}
607 | 
608 | 	var wg sync.WaitGroup
609 | 	wg.Add(len(tasks))
610 | 	for _, task := range tasks {
611 | 		go func(task struct {
612 | 			name string
613 | 			fn   func(session neo4j.SessionWithContext) error
614 | 		}) {
615 | 			defer wg.Done()
616 | 			session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
617 | 			defer session.Close(ctx)
618 | 			if err := task.fn(session); err != nil {
619 | 				handleError(fmt.Errorf("task %s failed: %w", task.name, err))
620 | 			}
621 | 		}(task)
622 | 	}
623 | 	wg.Wait()
624 | 
625 | 	if firstErr != nil {
626 | 		return nil, nil, nil, firstErr
627 | 	}
628 | 
629 | 	nodeLabels, relationships, stats := helpers.ProcessNonAPOCSchema(nodeCounts, nodePropsMap, relCounts, relPropsMap, relConnectivity)
630 | 	return nodeLabels, relationships, stats, nil
631 | }
632 | 
633 | // extractDatabaseInfo retrieves general information about the Neo4j database instance.
634 | func (t Tool) extractDatabaseInfo(ctx context.Context) (*types.DatabaseInfo, error) {
635 | 	session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
636 | 	defer session.Close(ctx)
637 | 
638 | 	result, err := session.Run(ctx, "CALL dbms.components() YIELD name, versions, edition", nil)
639 | 	if err != nil {
640 | 		return nil, err
641 | 	}
642 | 
643 | 	dbInfo := &types.DatabaseInfo{}
644 | 	if result.Next(ctx) {
645 | 		record := result.Record()
646 | 		dbInfo.Name = record.Values[0].(string)
647 | 		if versions, ok := record.Values[1].([]any); ok && len(versions) > 0 {
648 | 			dbInfo.Version = versions[0].(string)
649 | 		}
650 | 		dbInfo.Edition = record.Values[2].(string)
651 | 	}
652 | 	return dbInfo, result.Err()
653 | }
654 | 
655 | // extractConstraints fetches all schema constraints from the database.
656 | func (t Tool) extractConstraints(ctx context.Context) ([]types.Constraint, error) {
657 | 	session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
658 | 	defer session.Close(ctx)
659 | 
660 | 	result, err := session.Run(ctx, "SHOW CONSTRAINTS", nil)
661 | 	if err != nil {
662 | 		return nil, err
663 | 	}
664 | 
665 | 	var constraints []types.Constraint
666 | 	for result.Next(ctx) {
667 | 		record := result.Record().AsMap()
668 | 		constraint := types.Constraint{
669 | 			Name:       helpers.GetStringValue(record["name"]),
670 | 			Type:       helpers.GetStringValue(record["type"]),
671 | 			EntityType: helpers.GetStringValue(record["entityType"]),
672 | 		}
673 | 		if labels, ok := record["labelsOrTypes"].([]any); ok && len(labels) > 0 {
674 | 			constraint.Label = labels[0].(string)
675 | 		}
676 | 		if props, ok := record["properties"].([]any); ok {
677 | 			constraint.Properties = helpers.ConvertToStringSlice(props)
678 | 		}
679 | 		constraints = append(constraints, constraint)
680 | 	}
681 | 	return constraints, result.Err()
682 | }
683 | 
684 | // extractIndexes fetches all schema indexes from the database.
685 | func (t Tool) extractIndexes(ctx context.Context) ([]types.Index, error) {
686 | 	session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database})
687 | 	defer session.Close(ctx)
688 | 
689 | 	result, err := session.Run(ctx, "SHOW INDEXES", nil)
690 | 	if err != nil {
691 | 		return nil, err
692 | 	}
693 | 
694 | 	var indexes []types.Index
695 | 	for result.Next(ctx) {
696 | 		record := result.Record().AsMap()
697 | 		index := types.Index{
698 | 			Name:       helpers.GetStringValue(record["name"]),
699 | 			State:      helpers.GetStringValue(record["state"]),
700 | 			Type:       helpers.GetStringValue(record["type"]),
701 | 			EntityType: helpers.GetStringValue(record["entityType"]),
702 | 		}
703 | 		if labels, ok := record["labelsOrTypes"].([]any); ok && len(labels) > 0 {
704 | 			index.Label = labels[0].(string)
705 | 		}
706 | 		if props, ok := record["properties"].([]any); ok {
707 | 			index.Properties = helpers.ConvertToStringSlice(props)
708 | 		}
709 | 		indexes = append(indexes, index)
710 | 	}
711 | 	return indexes, result.Err()
712 | }
713 | 
```

--------------------------------------------------------------------------------
/tests/mongodb/mongodb_integration_test.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package mongodb
 16 | 
 17 | import (
 18 | 	"bytes"
 19 | 	"context"
 20 | 	"encoding/json"
 21 | 	"fmt"
 22 | 	"io"
 23 | 	"net/http"
 24 | 	"os"
 25 | 	"regexp"
 26 | 	"testing"
 27 | 	"time"
 28 | 
 29 | 	"github.com/googleapis/genai-toolbox/internal/testutils"
 30 | 	"github.com/googleapis/genai-toolbox/tests"
 31 | 	"go.mongodb.org/mongo-driver/mongo"
 32 | 	"go.mongodb.org/mongo-driver/mongo/options"
 33 | )
 34 | 
 35 | var (
 36 | 	MongoDbSourceKind   = "mongodb"
 37 | 	MongoDbToolKind     = "mongodb-find"
 38 | 	MongoDbUri          = os.Getenv("MONGODB_URI")
 39 | 	MongoDbDatabase     = os.Getenv("MONGODB_DATABASE")
 40 | 	ServiceAccountEmail = os.Getenv("SERVICE_ACCOUNT_EMAIL")
 41 | )
 42 | 
 43 | func getMongoDBVars(t *testing.T) map[string]any {
 44 | 	switch "" {
 45 | 	case MongoDbUri:
 46 | 		t.Fatal("'MongoDbUri' not set")
 47 | 	case MongoDbDatabase:
 48 | 		t.Fatal("'MongoDbDatabase' not set")
 49 | 	}
 50 | 	return map[string]any{
 51 | 		"kind": MongoDbSourceKind,
 52 | 		"uri":  MongoDbUri,
 53 | 	}
 54 | }
 55 | 
 56 | func initMongoDbDatabase(ctx context.Context, uri, database string) (*mongo.Database, error) {
 57 | 	// Create a new mongodb Database
 58 | 	client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri))
 59 | 	if err != nil {
 60 | 		return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
 61 | 	}
 62 | 	err = client.Ping(ctx, nil)
 63 | 	if err != nil {
 64 | 		return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
 65 | 	}
 66 | 	return client.Database(database), nil
 67 | }
 68 | 
 69 | func TestMongoDBToolEndpoints(t *testing.T) {
 70 | 	sourceConfig := getMongoDBVars(t)
 71 | 	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
 72 | 	defer cancel()
 73 | 
 74 | 	var args []string
 75 | 
 76 | 	database, err := initMongoDbDatabase(ctx, MongoDbUri, MongoDbDatabase)
 77 | 	if err != nil {
 78 | 		t.Fatalf("unable to create MongoDB connection: %s", err)
 79 | 	}
 80 | 
 81 | 	// set up data for param tool
 82 | 	teardownDB := setupMongoDB(t, ctx, database)
 83 | 	defer teardownDB(t)
 84 | 
 85 | 	// Write config into a file and pass it to command
 86 | 	toolsFile := getMongoDBToolsConfig(sourceConfig, MongoDbToolKind)
 87 | 
 88 | 	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
 89 | 	if err != nil {
 90 | 		t.Fatalf("command initialization returned an error: %s", err)
 91 | 	}
 92 | 	defer cleanup()
 93 | 
 94 | 	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
 95 | 	defer cancel()
 96 | 	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
 97 | 	if err != nil {
 98 | 		t.Logf("toolbox command logs: \n%s", out)
 99 | 		t.Fatalf("toolbox didn't start successfully: %s", err)
100 | 	}
101 | 
102 | 	// Get configs for tests
103 | 	select1Want := `[{"_id":3,"id":3,"name":"Sid"}]`
104 | 	myToolId3NameAliceWant := `[{"_id":5,"id":3,"name":"Alice"}]`
105 | 	myToolById4Want := `[{"_id":4,"id":4,"name":null}]`
106 | 	mcpMyFailToolWant := `invalid JSON input: missing colon after key `
107 | 	mcpMyToolId3NameAliceWant := `{"jsonrpc":"2.0","id":"my-simple-tool","result":{"content":[{"type":"text","text":"{\"_id\":5,\"id\":3,\"name\":\"Alice\"}"}]}}`
108 | 
109 | 	// Run tests
110 | 	tests.RunToolGetTest(t)
111 | 	tests.RunToolInvokeTest(t, select1Want,
112 | 		tests.WithMyToolId3NameAliceWant(myToolId3NameAliceWant),
113 | 		tests.WithMyArrayToolWant(myToolId3NameAliceWant),
114 | 		tests.WithMyToolById4Want(myToolById4Want),
115 | 	)
116 | 	tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, select1Want,
117 | 		tests.WithMcpMyToolId3NameAliceWant(mcpMyToolId3NameAliceWant),
118 | 	)
119 | 
120 | 	delete1Want := "1"
121 | 	deleteManyWant := "2"
122 | 	runToolDeleteInvokeTest(t, delete1Want, deleteManyWant)
123 | 
124 | 	insert1Want := `["68666e1035bb36bf1b4d47fb"]`
125 | 	insertManyWant := `["68667a6436ec7d0363668db7","68667a6436ec7d0363668db8","68667a6436ec7d0363668db9"]`
126 | 	runToolInsertInvokeTest(t, insert1Want, insertManyWant)
127 | 
128 | 	update1Want := "1"
129 | 	updateManyWant := "[2,0,2]"
130 | 	runToolUpdateInvokeTest(t, update1Want, updateManyWant)
131 | 
132 | 	aggregate1Want := `[{"id":2}]`
133 | 	aggregateManyWant := `[{"id":500},{"id":501}]`
134 | 	runToolAggregateInvokeTest(t, aggregate1Want, aggregateManyWant)
135 | }
136 | 
137 | func runToolDeleteInvokeTest(t *testing.T, delete1Want, deleteManyWant string) {
138 | 	// Test tool invoke endpoint
139 | 	invokeTcs := []struct {
140 | 		name          string
141 | 		api           string
142 | 		requestHeader map[string]string
143 | 		requestBody   io.Reader
144 | 		want          string
145 | 		isErr         bool
146 | 	}{
147 | 		{
148 | 			name:          "invoke my-delete-one-tool",
149 | 			api:           "http://127.0.0.1:5000/api/tool/my-delete-one-tool/invoke",
150 | 			requestHeader: map[string]string{},
151 | 			requestBody:   bytes.NewBuffer([]byte(`{ "id" : 100 }`)),
152 | 			want:          delete1Want,
153 | 			isErr:         false,
154 | 		},
155 | 		{
156 | 			name:          "invoke my-delete-many-tool",
157 | 			api:           "http://127.0.0.1:5000/api/tool/my-delete-many-tool/invoke",
158 | 			requestHeader: map[string]string{},
159 | 			requestBody:   bytes.NewBuffer([]byte(`{ "id" : 101 }`)),
160 | 			want:          deleteManyWant,
161 | 			isErr:         false,
162 | 		},
163 | 	}
164 | 
165 | 	for _, tc := range invokeTcs {
166 | 
167 | 		t.Run(tc.name, func(t *testing.T) {
168 | 			// Send Tool invocation request
169 | 			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
170 | 			if err != nil {
171 | 				t.Fatalf("unable to create request: %s", err)
172 | 			}
173 | 			req.Header.Add("Content-type", "application/json")
174 | 			for k, v := range tc.requestHeader {
175 | 				req.Header.Add(k, v)
176 | 			}
177 | 			resp, err := http.DefaultClient.Do(req)
178 | 			if err != nil {
179 | 				t.Fatalf("unable to send request: %s", err)
180 | 			}
181 | 			defer resp.Body.Close()
182 | 
183 | 			if resp.StatusCode != http.StatusOK {
184 | 				if tc.isErr {
185 | 					return
186 | 				}
187 | 				bodyBytes, _ := io.ReadAll(resp.Body)
188 | 				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
189 | 			}
190 | 
191 | 			// Check response body
192 | 			var body map[string]interface{}
193 | 			err = json.NewDecoder(resp.Body).Decode(&body)
194 | 			if err != nil {
195 | 				t.Fatalf("error parsing response body")
196 | 			}
197 | 
198 | 			got, ok := body["result"].(string)
199 | 			if !ok {
200 | 				t.Fatalf("unable to find result in response body")
201 | 			}
202 | 
203 | 			if got != tc.want {
204 | 				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
205 | 			}
206 | 		})
207 | 	}
208 | }
209 | 
210 | func runToolInsertInvokeTest(t *testing.T, insert1Want, insertManyWant string) {
211 | 	// Test tool invoke endpoint
212 | 	invokeTcs := []struct {
213 | 		name          string
214 | 		api           string
215 | 		requestHeader map[string]string
216 | 		requestBody   io.Reader
217 | 		want          string
218 | 		isErr         bool
219 | 	}{
220 | 		{
221 | 			name:          "invoke my-insert-one-tool",
222 | 			api:           "http://127.0.0.1:5000/api/tool/my-insert-one-tool/invoke",
223 | 			requestHeader: map[string]string{},
224 | 			requestBody:   bytes.NewBuffer([]byte(`{ "data" : "{ \"_id\": { \"$oid\": \"68666e1035bb36bf1b4d47fb\" },  \"id\" : 200 }" }"`)),
225 | 			want:          insert1Want,
226 | 			isErr:         false,
227 | 		},
228 | 		{
229 | 			name:          "invoke my-insert-many-tool",
230 | 			api:           "http://127.0.0.1:5000/api/tool/my-insert-many-tool/invoke",
231 | 			requestHeader: map[string]string{},
232 | 			requestBody:   bytes.NewBuffer([]byte(`{ "data" : "[{ \"_id\": { \"$oid\": \"68667a6436ec7d0363668db7\"} , \"id\" : 201 }, { \"_id\" : { \"$oid\": \"68667a6436ec7d0363668db8\"}, \"id\" : 202 }, { \"_id\": { \"$oid\": \"68667a6436ec7d0363668db9\"}, \"id\": 203 }]" }`)),
233 | 			want:          insertManyWant,
234 | 			isErr:         false,
235 | 		},
236 | 	}
237 | 
238 | 	for _, tc := range invokeTcs {
239 | 
240 | 		t.Run(tc.name, func(t *testing.T) {
241 | 			// Send Tool invocation request
242 | 			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
243 | 			if err != nil {
244 | 				t.Fatalf("unable to create request: %s", err)
245 | 			}
246 | 			req.Header.Add("Content-type", "application/json")
247 | 			for k, v := range tc.requestHeader {
248 | 				req.Header.Add(k, v)
249 | 			}
250 | 			resp, err := http.DefaultClient.Do(req)
251 | 			if err != nil {
252 | 				t.Fatalf("unable to send request: %s", err)
253 | 			}
254 | 			defer resp.Body.Close()
255 | 
256 | 			if resp.StatusCode != http.StatusOK {
257 | 				if tc.isErr {
258 | 					return
259 | 				}
260 | 				bodyBytes, _ := io.ReadAll(resp.Body)
261 | 				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
262 | 			}
263 | 
264 | 			// Check response body
265 | 			var body map[string]interface{}
266 | 			err = json.NewDecoder(resp.Body).Decode(&body)
267 | 			if err != nil {
268 | 				t.Fatalf("error parsing response body")
269 | 			}
270 | 
271 | 			got, ok := body["result"].(string)
272 | 			if !ok {
273 | 				t.Fatalf("unable to find result in response body")
274 | 			}
275 | 
276 | 			if got != tc.want {
277 | 				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
278 | 			}
279 | 		})
280 | 	}
281 | }
282 | 
283 | func runToolUpdateInvokeTest(t *testing.T, update1Want, updateManyWant string) {
284 | 	// Test tool invoke endpoint
285 | 	invokeTcs := []struct {
286 | 		name          string
287 | 		api           string
288 | 		requestHeader map[string]string
289 | 		requestBody   io.Reader
290 | 		want          string
291 | 		isErr         bool
292 | 	}{
293 | 		{
294 | 			name:          "invoke my-update-one-tool",
295 | 			api:           "http://127.0.0.1:5000/api/tool/my-update-one-tool/invoke",
296 | 			requestHeader: map[string]string{},
297 | 			requestBody:   bytes.NewBuffer([]byte(`{ "id": 300, "name": "Bob" }`)),
298 | 			want:          update1Want,
299 | 			isErr:         false,
300 | 		},
301 | 		{
302 | 			name:          "invoke my-update-many-tool",
303 | 			api:           "http://127.0.0.1:5000/api/tool/my-update-many-tool/invoke",
304 | 			requestHeader: map[string]string{},
305 | 			requestBody:   bytes.NewBuffer([]byte(`{ "id": 400, "name" : "Alice" }`)),
306 | 			want:          updateManyWant,
307 | 			isErr:         false,
308 | 		},
309 | 	}
310 | 
311 | 	for _, tc := range invokeTcs {
312 | 
313 | 		t.Run(tc.name, func(t *testing.T) {
314 | 			// Send Tool invocation request
315 | 			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
316 | 			if err != nil {
317 | 				t.Fatalf("unable to create request: %s", err)
318 | 			}
319 | 			req.Header.Add("Content-type", "application/json")
320 | 			for k, v := range tc.requestHeader {
321 | 				req.Header.Add(k, v)
322 | 			}
323 | 			resp, err := http.DefaultClient.Do(req)
324 | 			if err != nil {
325 | 				t.Fatalf("unable to send request: %s", err)
326 | 			}
327 | 			defer resp.Body.Close()
328 | 
329 | 			if resp.StatusCode != http.StatusOK {
330 | 				if tc.isErr {
331 | 					return
332 | 				}
333 | 				bodyBytes, _ := io.ReadAll(resp.Body)
334 | 				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
335 | 			}
336 | 
337 | 			// Check response body
338 | 			var body map[string]interface{}
339 | 			err = json.NewDecoder(resp.Body).Decode(&body)
340 | 			if err != nil {
341 | 				t.Fatalf("error parsing response body")
342 | 			}
343 | 
344 | 			got, ok := body["result"].(string)
345 | 			if !ok {
346 | 				t.Fatalf("unable to find result in response body")
347 | 			}
348 | 
349 | 			if got != tc.want {
350 | 				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
351 | 			}
352 | 		})
353 | 	}
354 | }
355 | func runToolAggregateInvokeTest(t *testing.T, aggregate1Want string, aggregateManyWant string) {
356 | 	// Test tool invoke endpoint
357 | 	invokeTcs := []struct {
358 | 		name          string
359 | 		api           string
360 | 		requestHeader map[string]string
361 | 		requestBody   io.Reader
362 | 		want          string
363 | 		isErr         bool
364 | 	}{
365 | 		{
366 | 			name:          "invoke my-aggregate-tool",
367 | 			api:           "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
368 | 			requestHeader: map[string]string{},
369 | 			requestBody:   bytes.NewBuffer([]byte(`{ "name": "Jane" }`)),
370 | 			want:          aggregate1Want,
371 | 			isErr:         false,
372 | 		},
373 | 		{
374 | 			name:          "invoke my-aggregate-tool",
375 | 			api:           "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
376 | 			requestHeader: map[string]string{},
377 | 			requestBody:   bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
378 | 			want:          aggregateManyWant,
379 | 			isErr:         false,
380 | 		},
381 | 		{
382 | 			name:          "invoke my-read-only-aggregate-tool",
383 | 			api:           "http://127.0.0.1:5000/api/tool/my-read-only-aggregate-tool/invoke",
384 | 			requestHeader: map[string]string{},
385 | 			requestBody:   bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
386 | 			want:          "",
387 | 			isErr:         true,
388 | 		},
389 | 		{
390 | 			name:          "invoke my-read-write-aggregate-tool",
391 | 			api:           "http://127.0.0.1:5000/api/tool/my-read-write-aggregate-tool/invoke",
392 | 			requestHeader: map[string]string{},
393 | 			requestBody:   bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
394 | 			want:          "[]",
395 | 			isErr:         false,
396 | 		},
397 | 	}
398 | 
399 | 	for _, tc := range invokeTcs {
400 | 
401 | 		t.Run(tc.name, func(t *testing.T) {
402 | 			// Send Tool invocation request
403 | 			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
404 | 			if err != nil {
405 | 				t.Fatalf("unable to create request: %s", err)
406 | 			}
407 | 			req.Header.Add("Content-type", "application/json")
408 | 			for k, v := range tc.requestHeader {
409 | 				req.Header.Add(k, v)
410 | 			}
411 | 			resp, err := http.DefaultClient.Do(req)
412 | 			if err != nil {
413 | 				t.Fatalf("unable to send request: %s", err)
414 | 			}
415 | 			defer resp.Body.Close()
416 | 
417 | 			if resp.StatusCode != http.StatusOK {
418 | 				if tc.isErr {
419 | 					return
420 | 				}
421 | 				bodyBytes, _ := io.ReadAll(resp.Body)
422 | 				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
423 | 			}
424 | 
425 | 			// Check response body
426 | 			var body map[string]interface{}
427 | 			err = json.NewDecoder(resp.Body).Decode(&body)
428 | 			if err != nil {
429 | 				t.Fatalf("error parsing response body")
430 | 			}
431 | 
432 | 			got, ok := body["result"].(string)
433 | 			if !ok {
434 | 				t.Fatalf("unable to find result in response body")
435 | 			}
436 | 
437 | 			if got != tc.want {
438 | 				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
439 | 			}
440 | 		})
441 | 	}
442 | }
443 | 
444 | func setupMongoDB(t *testing.T, ctx context.Context, database *mongo.Database) func(*testing.T) {
445 | 	collectionName := "test_collection"
446 | 
447 | 	documents := []map[string]any{
448 | 		{"_id": 1, "id": 1, "name": "Alice", "email": ServiceAccountEmail},
449 | 		{"_id": 1, "id": 2, "name": "FakeAlice", "email": "[email protected]"},
450 | 		{"_id": 2, "id": 2, "name": "Jane"},
451 | 		{"_id": 3, "id": 3, "name": "Sid"},
452 | 		{"_id": 4, "id": 4, "name": nil},
453 | 		{"_id": 5, "id": 3, "name": "Alice", "email": "[email protected]"},
454 | 		{"_id": 6, "id": 100, "name": "ToBeDeleted", "email": "[email protected]"},
455 | 		{"_id": 7, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
456 | 		{"_id": 8, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
457 | 		{"_id": 9, "id": 300, "name": "ToBeUpdatedToBob", "email": "[email protected]"},
458 | 		{"_id": 10, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
459 | 		{"_id": 11, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
460 | 		{"_id": 12, "id": 500, "name": "ToBeAggregated", "email": "[email protected]"},
461 | 		{"_id": 13, "id": 501, "name": "ToBeAggregated", "email": "[email protected]"},
462 | 	}
463 | 	for _, doc := range documents {
464 | 		_, err := database.Collection(collectionName).InsertOne(ctx, doc)
465 | 		if err != nil {
466 | 			t.Fatalf("unable to insert test data: %s", err)
467 | 		}
468 | 	}
469 | 
470 | 	return func(t *testing.T) {
471 | 		// tear down test
472 | 		err := database.Collection(collectionName).Drop(ctx)
473 | 		if err != nil {
474 | 			t.Errorf("Teardown failed: %s", err)
475 | 		}
476 | 	}
477 | 
478 | }
479 | 
480 | func getMongoDBToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
481 | 	toolsFile := map[string]any{
482 | 		"sources": map[string]any{
483 | 			"my-instance": sourceConfig,
484 | 		},
485 | 		"authServices": map[string]any{
486 | 			"my-google-auth": map[string]any{
487 | 				"kind":     "google",
488 | 				"clientId": tests.ClientId,
489 | 			},
490 | 		},
491 | 		"tools": map[string]any{
492 | 			"my-simple-tool": map[string]any{
493 | 				"kind":           "mongodb-find-one",
494 | 				"source":         "my-instance",
495 | 				"description":    "Simple tool to test end to end functionality.",
496 | 				"collection":     "test_collection",
497 | 				"filterPayload":  `{ "_id" : 3 }`,
498 | 				"filterParams":   []any{},
499 | 				"projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
500 | 				"database":       MongoDbDatabase,
501 | 				"limit":          1,
502 | 				"sort":           `{ "id": 1 }`,
503 | 			},
504 | 			"my-tool": map[string]any{
505 | 				"kind":          toolKind,
506 | 				"source":        "my-instance",
507 | 				"description":   "Tool to test invocation with params.",
508 | 				"authRequired":  []string{},
509 | 				"collection":    "test_collection",
510 | 				"filterPayload": `{ "id" : {{ .id }}, "name" : {{json .name }} }`,
511 | 				"filterParams": []map[string]any{
512 | 					{
513 | 						"name":        "id",
514 | 						"type":        "integer",
515 | 						"description": "user id",
516 | 					},
517 | 					{
518 | 						"name":        "name",
519 | 						"type":        "string",
520 | 						"description": "user name",
521 | 					},
522 | 				},
523 | 				"projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
524 | 				"database":       MongoDbDatabase,
525 | 			},
526 | 			"my-tool-by-id": map[string]any{
527 | 				"kind":          toolKind,
528 | 				"source":        "my-instance",
529 | 				"description":   "Tool to test invocation with params.",
530 | 				"authRequired":  []string{},
531 | 				"collection":    "test_collection",
532 | 				"filterPayload": `{ "id" : {{ .id }} }`,
533 | 				"filterParams": []map[string]any{
534 | 					{
535 | 						"name":        "id",
536 | 						"type":        "integer",
537 | 						"description": "user id",
538 | 					},
539 | 				},
540 | 				"projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
541 | 				"database":       MongoDbDatabase,
542 | 			},
543 | 			"my-tool-by-name": map[string]any{
544 | 				"kind":          toolKind,
545 | 				"source":        "my-instance",
546 | 				"description":   "Tool to test invocation with params.",
547 | 				"authRequired":  []string{},
548 | 				"collection":    "test_collection",
549 | 				"filterPayload": `{ "name" : {{ .name }} }`,
550 | 				"filterParams": []map[string]any{
551 | 					{
552 | 						"name":        "name",
553 | 						"type":        "string",
554 | 						"description": "user name",
555 | 						"required":    false,
556 | 					},
557 | 				},
558 | 				"projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
559 | 				"database":       MongoDbDatabase,
560 | 			},
561 | 			"my-array-tool": map[string]any{
562 | 				"kind":          toolKind,
563 | 				"source":        "my-instance",
564 | 				"description":   "Tool to test invocation with array.",
565 | 				"authRequired":  []string{},
566 | 				"collection":    "test_collection",
567 | 				"filterPayload": `{ "name": { "$in": {{json .nameArray}} }, "_id": 5 })`,
568 | 				"filterParams": []map[string]any{
569 | 					{
570 | 						"name":        "nameArray",
571 | 						"type":        "array",
572 | 						"description": "user names",
573 | 						"items": map[string]any{
574 | 							"name":        "username",
575 | 							"type":        "string",
576 | 							"description": "string item"},
577 | 					},
578 | 				},
579 | 				"projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
580 | 				"database":       MongoDbDatabase,
581 | 			},
582 | 			"my-auth-tool": map[string]any{
583 | 				"kind":          toolKind,
584 | 				"source":        "my-instance",
585 | 				"description":   "Tool to test authenticated parameters.",
586 | 				"authRequired":  []string{},
587 | 				"collection":    "test_collection",
588 | 				"filterPayload": `{ "email" : {{json .email }} }`,
589 | 				"filterParams": []map[string]any{
590 | 					{
591 | 						"name":        "email",
592 | 						"type":        "string",
593 | 						"description": "user email",
594 | 						"authServices": []map[string]string{
595 | 							{
596 | 								"name":  "my-google-auth",
597 | 								"field": "email",
598 | 							},
599 | 						},
600 | 					},
601 | 				},
602 | 				"projectPayload": `{ "_id": 0, "name" : 1 }`,
603 | 				"database":       MongoDbDatabase,
604 | 			},
605 | 			"my-auth-required-tool": map[string]any{
606 | 				"kind":        toolKind,
607 | 				"source":      "my-instance",
608 | 				"description": "Tool to test auth required invocation.",
609 | 				"authRequired": []string{
610 | 					"my-google-auth",
611 | 				},
612 | 				"collection":    "test_collection",
613 | 				"filterPayload": `{ "_id": 3, "id": 3 }`,
614 | 				"filterParams":  []any{},
615 | 				"database":      MongoDbDatabase,
616 | 			},
617 | 			"my-fail-tool": map[string]any{
618 | 				"kind":          toolKind,
619 | 				"source":        "my-instance",
620 | 				"description":   "Tool to test statement with incorrect syntax.",
621 | 				"authRequired":  []string{},
622 | 				"collection":    "test_collection",
623 | 				"filterPayload": `{ "id" ; 1 }"}`,
624 | 				"filterParams":  []any{},
625 | 				"database":      MongoDbDatabase,
626 | 			},
627 | 			"my-delete-one-tool": map[string]any{
628 | 				"kind":          "mongodb-delete-one",
629 | 				"source":        "my-instance",
630 | 				"description":   "Tool to test deleting an entry.",
631 | 				"authRequired":  []string{},
632 | 				"collection":    "test_collection",
633 | 				"filterPayload": `{ "id" : 100 }"}`,
634 | 				"filterParams":  []any{},
635 | 				"database":      MongoDbDatabase,
636 | 			},
637 | 			"my-delete-many-tool": map[string]any{
638 | 				"kind":          "mongodb-delete-many",
639 | 				"source":        "my-instance",
640 | 				"description":   "Tool to test deleting multiple entries.",
641 | 				"authRequired":  []string{},
642 | 				"collection":    "test_collection",
643 | 				"filterPayload": `{ "id" : 101 }"}`,
644 | 				"filterParams":  []any{},
645 | 				"database":      MongoDbDatabase,
646 | 			},
647 | 			"my-insert-one-tool": map[string]any{
648 | 				"kind":         "mongodb-insert-one",
649 | 				"source":       "my-instance",
650 | 				"description":  "Tool to test inserting an entry.",
651 | 				"authRequired": []string{},
652 | 				"collection":   "test_collection",
653 | 				"canonical":    true,
654 | 				"database":     MongoDbDatabase,
655 | 			},
656 | 			"my-insert-many-tool": map[string]any{
657 | 				"kind":         "mongodb-insert-many",
658 | 				"source":       "my-instance",
659 | 				"description":  "Tool to test inserting multiple entries.",
660 | 				"authRequired": []string{},
661 | 				"collection":   "test_collection",
662 | 				"canonical":    true,
663 | 				"database":     MongoDbDatabase,
664 | 			},
665 | 			"my-update-one-tool": map[string]any{
666 | 				"kind":          "mongodb-update-one",
667 | 				"source":        "my-instance",
668 | 				"description":   "Tool to test updating an entry.",
669 | 				"authRequired":  []string{},
670 | 				"collection":    "test_collection",
671 | 				"canonical":     true,
672 | 				"filterPayload": `{ "id" : {{ .id }} }`,
673 | 				"filterParams": []map[string]any{
674 | 					{
675 | 						"name":        "id",
676 | 						"type":        "integer",
677 | 						"description": "id",
678 | 					},
679 | 				},
680 | 				"updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
681 | 				"updateParams": []map[string]any{
682 | 					{
683 | 						"name":        "name",
684 | 						"type":        "string",
685 | 						"description": "user name",
686 | 					},
687 | 				},
688 | 				"database": MongoDbDatabase,
689 | 			},
690 | 			"my-update-many-tool": map[string]any{
691 | 				"kind":          "mongodb-update-many",
692 | 				"source":        "my-instance",
693 | 				"description":   "Tool to test updating multiple entries.",
694 | 				"authRequired":  []string{},
695 | 				"collection":    "test_collection",
696 | 				"canonical":     true,
697 | 				"filterPayload": `{ "id" : {{ .id }} }`,
698 | 				"filterParams": []map[string]any{
699 | 					{
700 | 						"name":        "id",
701 | 						"type":        "integer",
702 | 						"description": "id",
703 | 					},
704 | 				},
705 | 				"updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
706 | 				"updateParams": []map[string]any{
707 | 					{
708 | 						"name":        "name",
709 | 						"type":        "string",
710 | 						"description": "user name",
711 | 					},
712 | 				},
713 | 				"database": MongoDbDatabase,
714 | 			},
715 | 			"my-aggregate-tool": map[string]any{
716 | 				"kind":            "mongodb-aggregate",
717 | 				"source":          "my-instance",
718 | 				"description":     "Tool to test an aggregation.",
719 | 				"authRequired":    []string{},
720 | 				"collection":      "test_collection",
721 | 				"canonical":       true,
722 | 				"pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$project" : { "id" : 1, "_id" : 0 }}]`,
723 | 				"pipelineParams": []map[string]any{
724 | 					{
725 | 						"name":        "name",
726 | 						"type":        "string",
727 | 						"description": "user name",
728 | 					},
729 | 				},
730 | 				"database": MongoDbDatabase,
731 | 			},
732 | 			"my-read-only-aggregate-tool": map[string]any{
733 | 				"kind":            "mongodb-aggregate",
734 | 				"source":          "my-instance",
735 | 				"description":     "Tool to test an aggregation.",
736 | 				"authRequired":    []string{},
737 | 				"collection":      "test_collection",
738 | 				"canonical":       true,
739 | 				"readOnly":        true,
740 | 				"pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
741 | 				"pipelineParams": []map[string]any{
742 | 					{
743 | 						"name":        "name",
744 | 						"type":        "string",
745 | 						"description": "user name",
746 | 					},
747 | 				},
748 | 				"database": MongoDbDatabase,
749 | 			},
750 | 			"my-read-write-aggregate-tool": map[string]any{
751 | 				"kind":            "mongodb-aggregate",
752 | 				"source":          "my-instance",
753 | 				"description":     "Tool to test an aggregation.",
754 | 				"authRequired":    []string{},
755 | 				"collection":      "test_collection",
756 | 				"canonical":       true,
757 | 				"readOnly":        false,
758 | 				"pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
759 | 				"pipelineParams": []map[string]any{
760 | 					{
761 | 						"name":        "name",
762 | 						"type":        "string",
763 | 						"description": "user name",
764 | 					},
765 | 				},
766 | 				"database": MongoDbDatabase,
767 | 			},
768 | 		},
769 | 	}
770 | 
771 | 	return toolsFile
772 | 
773 | }
774 | 
```

--------------------------------------------------------------------------------
/internal/tools/spanner/spannerlisttables/spannerlisttables.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package spannerlisttables
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"fmt"
 20 | 	"strings"
 21 | 
 22 | 	"cloud.google.com/go/spanner"
 23 | 	yaml "github.com/goccy/go-yaml"
 24 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 25 | 	spannerdb "github.com/googleapis/genai-toolbox/internal/sources/spanner"
 26 | 	"github.com/googleapis/genai-toolbox/internal/tools"
 27 | 	"google.golang.org/api/iterator"
 28 | )
 29 | 
 30 | const kind string = "spanner-list-tables"
 31 | 
 32 | func init() {
 33 | 	if !tools.Register(kind, newConfig) {
 34 | 		panic(fmt.Sprintf("tool kind %q already registered", kind))
 35 | 	}
 36 | }
 37 | 
 38 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
 39 | 	actual := Config{Name: name}
 40 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 41 | 		return nil, err
 42 | 	}
 43 | 	return actual, nil
 44 | }
 45 | 
 46 | type compatibleSource interface {
 47 | 	SpannerClient() *spanner.Client
 48 | 	DatabaseDialect() string
 49 | }
 50 | 
 51 | // validate compatible sources are still compatible
 52 | var _ compatibleSource = &spannerdb.Source{}
 53 | 
 54 | var compatibleSources = [...]string{spannerdb.SourceKind}
 55 | 
 56 | type Config struct {
 57 | 	Name         string   `yaml:"name" validate:"required"`
 58 | 	Kind         string   `yaml:"kind" validate:"required"`
 59 | 	Source       string   `yaml:"source" validate:"required"`
 60 | 	Description  string   `yaml:"description"`
 61 | 	AuthRequired []string `yaml:"authRequired"`
 62 | }
 63 | 
 64 | // validate interface
 65 | var _ tools.ToolConfig = Config{}
 66 | 
 67 | func (cfg Config) ToolConfigKind() string {
 68 | 	return kind
 69 | }
 70 | 
 71 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
 72 | 	// verify source exists
 73 | 	rawS, ok := srcs[cfg.Source]
 74 | 	if !ok {
 75 | 		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
 76 | 	}
 77 | 
 78 | 	// verify the source is compatible
 79 | 	s, ok := rawS.(compatibleSource)
 80 | 	if !ok {
 81 | 		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
 82 | 	}
 83 | 
 84 | 	// Define parameters for the tool
 85 | 	allParameters := tools.Parameters{
 86 | 		tools.NewStringParameterWithDefault(
 87 | 			"table_names",
 88 | 			"",
 89 | 			"Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed.",
 90 | 		),
 91 | 		tools.NewStringParameterWithDefault(
 92 | 			"output_format",
 93 | 			"detailed",
 94 | 			"Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema.",
 95 | 		),
 96 | 	}
 97 | 
 98 | 	description := cfg.Description
 99 | 	if description == "" {
100 | 		description = "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
101 | 	}
102 | 	mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters)
103 | 
104 | 	// finish tool setup
105 | 	t := Tool{
106 | 		Name:         cfg.Name,
107 | 		Kind:         kind,
108 | 		AllParams:    allParameters,
109 | 		AuthRequired: cfg.AuthRequired,
110 | 		Client:       s.SpannerClient(),
111 | 		dialect:      s.DatabaseDialect(),
112 | 		manifest:     tools.Manifest{Description: description, Parameters: allParameters.Manifest(), AuthRequired: cfg.AuthRequired},
113 | 		mcpManifest:  mcpManifest,
114 | 	}
115 | 	return t, nil
116 | }
117 | 
118 | // validate interface
119 | var _ tools.Tool = Tool{}
120 | 
121 | type Tool struct {
122 | 	Name         string           `yaml:"name"`
123 | 	Kind         string           `yaml:"kind"`
124 | 	AuthRequired []string         `yaml:"authRequired"`
125 | 	AllParams    tools.Parameters `yaml:"allParams"`
126 | 	Client       *spanner.Client
127 | 	dialect      string
128 | 	manifest     tools.Manifest
129 | 	mcpManifest  tools.McpManifest
130 | }
131 | 
132 | // processRows iterates over the spanner.RowIterator and converts each row to a map[string]any.
133 | func processRows(iter *spanner.RowIterator) ([]any, error) {
134 | 	var out []any
135 | 	defer iter.Stop()
136 | 
137 | 	for {
138 | 		row, err := iter.Next()
139 | 		if err == iterator.Done {
140 | 			break
141 | 		}
142 | 		if err != nil {
143 | 			return nil, fmt.Errorf("unable to parse row: %w", err)
144 | 		}
145 | 
146 | 		vMap := make(map[string]any)
147 | 		cols := row.ColumnNames()
148 | 		for i, c := range cols {
149 | 			vMap[c] = row.ColumnValue(i)
150 | 		}
151 | 		out = append(out, vMap)
152 | 	}
153 | 	return out, nil
154 | }
155 | 
156 | func (t Tool) getStatement() string {
157 | 	switch strings.ToLower(t.dialect) {
158 | 	case "postgresql":
159 | 		return postgresqlStatement
160 | 	case "googlesql":
161 | 		return googleSQLStatement
162 | 	default:
163 | 		// Default to GoogleSQL
164 | 		return googleSQLStatement
165 | 	}
166 | }
167 | 
168 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
169 | 	paramsMap := params.AsMap()
170 | 
171 | 	// Get the appropriate SQL statement based on dialect
172 | 	statement := t.getStatement()
173 | 
174 | 	// Prepare parameters based on dialect
175 | 	var stmtParams map[string]interface{}
176 | 
177 | 	tableNames, _ := paramsMap["table_names"].(string)
178 | 	outputFormat, _ := paramsMap["output_format"].(string)
179 | 	if outputFormat == "" {
180 | 		outputFormat = "detailed"
181 | 	}
182 | 
183 | 	switch strings.ToLower(t.dialect) {
184 | 	case "postgresql":
185 | 		// PostgreSQL uses positional parameters ($1, $2)
186 | 		stmtParams = map[string]interface{}{
187 | 			"p1": tableNames,
188 | 			"p2": outputFormat,
189 | 		}
190 | 
191 | 	case "googlesql":
192 | 		// GoogleSQL uses named parameters (@table_names, @output_format)
193 | 		stmtParams = map[string]interface{}{
194 | 			"table_names":   tableNames,
195 | 			"output_format": outputFormat,
196 | 		}
197 | 	default:
198 | 		return nil, fmt.Errorf("unsupported dialect: %s", t.dialect)
199 | 	}
200 | 
201 | 	stmt := spanner.Statement{
202 | 		SQL:    statement,
203 | 		Params: stmtParams,
204 | 	}
205 | 
206 | 	// Execute the query (read-only)
207 | 	iter := t.Client.Single().Query(ctx, stmt)
208 | 	results, err := processRows(iter)
209 | 	if err != nil {
210 | 		return nil, fmt.Errorf("unable to execute query: %w", err)
211 | 	}
212 | 
213 | 	return results, nil
214 | }
215 | 
216 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
217 | 	return tools.ParseParams(t.AllParams, data, claims)
218 | }
219 | 
220 | func (t Tool) Manifest() tools.Manifest {
221 | 	return t.manifest
222 | }
223 | 
224 | func (t Tool) McpManifest() tools.McpManifest {
225 | 	return t.mcpManifest
226 | }
227 | 
228 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
229 | 	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
230 | }
231 | 
232 | func (t Tool) RequiresClientAuthorization() bool {
233 | 	return false
234 | }
235 | 
236 | // PostgreSQL statement for listing tables
237 | const postgresqlStatement = `
238 | WITH table_info_cte AS (
239 |     SELECT
240 |       T.TABLE_SCHEMA,
241 |       T.TABLE_NAME,
242 |       T.TABLE_TYPE,
243 |       T.PARENT_TABLE_NAME,
244 |       T.ON_DELETE_ACTION
245 |     FROM INFORMATION_SCHEMA.TABLES AS T
246 |     WHERE
247 |       T.TABLE_SCHEMA = 'public'
248 |       AND T.TABLE_TYPE = 'BASE TABLE'
249 |       AND (
250 |       NULLIF(TRIM($1), '') IS NULL OR
251 |       T.TABLE_NAME IN (
252 |         SELECT table_name
253 |         FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
254 |       )
255 |   ),
256 | 
257 |   columns_info_cte AS (
258 |     SELECT
259 |       C.TABLE_SCHEMA,
260 |       C.TABLE_NAME,
261 |       ARRAY_AGG(
262 |         CONCAT(
263 |           '{',
264 |           '"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
265 |           '"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
266 |           '"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
267 |           '"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
268 |           '"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
269 |           '}'
270 |         ) ORDER BY C.ORDINAL_POSITION
271 |       ) AS columns_json_array_elements
272 |     FROM INFORMATION_SCHEMA.COLUMNS AS C
273 |     WHERE C.TABLE_SCHEMA = 'public'
274 |       AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
275 |     GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
276 |   ),
277 | 
278 |   constraint_columns_agg_cte AS (
279 |     SELECT
280 |       CONSTRAINT_CATALOG,
281 |       CONSTRAINT_SCHEMA,
282 |       CONSTRAINT_NAME,
283 |       ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
284 |     FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
285 |     WHERE CONSTRAINT_SCHEMA = 'public'
286 |     GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
287 |   ),
288 | 
289 |   constraints_info_cte AS (
290 |     SELECT
291 |       TC.TABLE_SCHEMA,
292 |       TC.TABLE_NAME,
293 |       ARRAY_AGG(
294 |         CONCAT(
295 |           '{',
296 |           '"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
297 |           '"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
298 |           '"constraint_definition":',
299 |             CASE TC.CONSTRAINT_TYPE
300 |               WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
301 |               WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
302 |               WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
303 |               WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
304 |                                       COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
305 |                                       ' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
306 |               ELSE 'null'
307 |             END, ',',
308 |           '"constraint_columns":["', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"],',
309 |           '"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
310 |           '"foreign_key_referenced_columns":["', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"]',
311 |           '}'
312 |         ) ORDER BY TC.CONSTRAINT_NAME
313 |       ) AS constraints_json_array_elements
314 |     FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
315 |     LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
316 |       ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
317 |     LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
318 |       ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
319 |     LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
320 |       ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
321 |     LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
322 |       ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
323 |     LEFT JOIN constraint_columns_agg_cte AS KeyCols
324 |       ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
325 |     LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
326 |       ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
327 |     WHERE TC.TABLE_SCHEMA = 'public'
328 |       AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
329 |     GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
330 |   ),
331 | 
332 |   index_key_columns_agg_cte AS (
333 |     SELECT
334 |       TABLE_CATALOG,
335 |       TABLE_SCHEMA,
336 |       TABLE_NAME,
337 |       INDEX_NAME,
338 |       ARRAY_AGG(
339 |         CONCAT(
340 |           '{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
341 |           '"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
342 |         ) ORDER BY ORDINAL_POSITION
343 |       ) AS key_column_json_details
344 |     FROM INFORMATION_SCHEMA.INDEX_COLUMNS
345 |     WHERE ORDINAL_POSITION IS NOT NULL
346 |       AND TABLE_SCHEMA = 'public'
347 |     GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
348 |   ),
349 | 
350 |   index_storing_columns_agg_cte AS (
351 |     SELECT
352 |       TABLE_CATALOG,
353 |       TABLE_SCHEMA,
354 |       TABLE_NAME,
355 |       INDEX_NAME,
356 |       ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
357 |     FROM INFORMATION_SCHEMA.INDEX_COLUMNS
358 |     WHERE ORDINAL_POSITION IS NULL
359 |       AND TABLE_SCHEMA = 'public'
360 |     GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
361 |   ),
362 | 
363 |   indexes_info_cte AS (
364 |     SELECT
365 |       I.TABLE_SCHEMA,
366 |       I.TABLE_NAME,
367 |       ARRAY_AGG(
368 |         CONCAT(
369 |           '{',
370 |           '"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
371 |           '"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
372 |           '"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
373 |           '"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
374 |           '"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
375 |           '"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
376 |           '"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
377 |           '}'
378 |         ) ORDER BY I.INDEX_NAME
379 |       ) AS indexes_json_array_elements
380 |     FROM INFORMATION_SCHEMA.INDEXES AS I
381 |     LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
382 |       ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
383 |     LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
384 |       ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
385 |     AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
386 |     WHERE I.TABLE_SCHEMA = 'public'
387 |       AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
388 |     GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
389 |   )
390 | 
391 | SELECT
392 |   TI.TABLE_SCHEMA AS schema_name,
393 |   TI.TABLE_NAME AS object_name,
394 |   CASE
395 |     WHEN $2 = 'simple' THEN
396 |       -- IF format is 'simple', return basic JSON
397 |           CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
398 |     ELSE
399 |       CONCAT(
400 |         '{',
401 |         '"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
402 |         '"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
403 |         '"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
404 |         '"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
405 |         '"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
406 |         '"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
407 |         '}'
408 |       )
409 |   END AS object_details
410 | FROM table_info_cte AS TI
411 | LEFT JOIN columns_info_cte AS CI
412 |   ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
413 | LEFT JOIN constraints_info_cte AS CONSI
414 |   ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
415 | LEFT JOIN indexes_info_cte AS II
416 |   ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
417 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
418 | 
419 | // GoogleSQL statement for listing tables
420 | const googleSQLStatement = `
421 | WITH FilterTableNames AS (
422 |   SELECT DISTINCT TRIM(name) AS TABLE_NAME
423 |   FROM UNNEST(IF(@table_names = '' OR @table_names IS NULL, ['%'], SPLIT(@table_names, ','))) AS name
424 | ),
425 | 
426 | -- 1. Table Information
427 | table_info_cte AS (
428 |   SELECT
429 |     T.TABLE_SCHEMA,
430 |     T.TABLE_NAME,
431 |     T.TABLE_TYPE,
432 |     T.PARENT_TABLE_NAME, -- For interleaved tables
433 |     T.ON_DELETE_ACTION -- For interleaved tables
434 |   FROM INFORMATION_SCHEMA.TABLES AS T
435 |   WHERE
436 |     T.TABLE_SCHEMA = ''
437 |     AND T.TABLE_TYPE = 'BASE TABLE'
438 |     AND (EXISTS (SELECT 1 FROM FilterTableNames WHERE FilterTableNames.TABLE_NAME = '%') OR T.TABLE_NAME IN (SELECT TABLE_NAME FROM FilterTableNames))
439 | ),
440 | 
441 | -- 2. Column Information (with JSON string for each column)
442 | columns_info_cte AS (
443 |   SELECT
444 |     C.TABLE_SCHEMA,
445 |     C.TABLE_NAME,
446 |     ARRAY_AGG(
447 |       CONCAT(
448 |         '{',
449 |         '"column_name":"', IFNULL(C.COLUMN_NAME, ''), '",',
450 |         '"data_type":"', IFNULL(C.SPANNER_TYPE, ''), '",',
451 |         '"ordinal_position":', CAST(C.ORDINAL_POSITION AS STRING), ',',
452 |         '"is_not_nullable":', IF(C.IS_NULLABLE = 'NO', 'true', 'false'), ',',
453 |         '"column_default":', IF(C.COLUMN_DEFAULT IS NULL, 'null', CONCAT('"', C.COLUMN_DEFAULT, '"')),
454 |         '}'
455 |       ) ORDER BY C.ORDINAL_POSITION
456 |     ) AS columns_json_array_elements
457 |   FROM INFORMATION_SCHEMA.COLUMNS AS C
458 |   WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
459 |   GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
460 | ),
461 | 
462 | -- Helper CTE for aggregating constraint columns
463 | constraint_columns_agg_cte AS (
464 |   SELECT
465 |     CONSTRAINT_CATALOG,
466 |     CONSTRAINT_SCHEMA,
467 |     CONSTRAINT_NAME,
468 |     ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
469 |   FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
470 |   GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
471 | ),
472 | 
473 | -- 3. Constraint Information (with JSON string for each constraint)
474 | constraints_info_cte AS (
475 |   SELECT
476 |     TC.TABLE_SCHEMA,
477 |     TC.TABLE_NAME,
478 |     ARRAY_AGG(
479 |       CONCAT(
480 |         '{',
481 |         '"constraint_name":"', IFNULL(TC.CONSTRAINT_NAME, ''), '",',
482 |         '"constraint_type":"', IFNULL(TC.CONSTRAINT_TYPE, ''), '",',
483 |         '"constraint_definition":',
484 |           CASE TC.CONSTRAINT_TYPE
485 |             WHEN 'CHECK' THEN IF(CC.CHECK_CLAUSE IS NULL, 'null', CONCAT('"', CC.CHECK_CLAUSE, '"'))
486 |             WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
487 |             WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
488 |             WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ') REFERENCES ',
489 |                                     IFNULL(RefKeyTable.TABLE_NAME, ''),
490 |                                     ' (', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ', '), ')', '"')
491 |             ELSE 'null'
492 |           END, ',',
493 |         '"constraint_columns":["', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ','), '"],',
494 |         '"foreign_key_referenced_table":', IF(RefKeyTable.TABLE_NAME IS NULL, 'null', CONCAT('"', RefKeyTable.TABLE_NAME, '"')), ',',
495 |         '"foreign_key_referenced_columns":["', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ','), '"]',
496 |         '}'
497 |       ) ORDER BY TC.CONSTRAINT_NAME
498 |     ) AS constraints_json_array_elements
499 |   FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
500 |   LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
501 |     ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
502 |   LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
503 |     ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
504 |   LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
505 |     ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
506 |   LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
507 |     ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
508 |   LEFT JOIN constraint_columns_agg_cte AS KeyCols
509 |     ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
510 |   LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
511 |     ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
512 |   WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
513 |   GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
514 | ),
515 | 
516 | -- Helper CTE for aggregating index key columns (as JSON strings)
517 | index_key_columns_agg_cte AS (
518 |   SELECT
519 |     TABLE_CATALOG,
520 |     TABLE_SCHEMA,
521 |     TABLE_NAME,
522 |     INDEX_NAME,
523 |     ARRAY_AGG(
524 |       CONCAT(
525 |         '{"column_name":"', IFNULL(COLUMN_NAME, ''), '",',
526 |         '"ordering":"', IFNULL(COLUMN_ORDERING, ''), '"}'
527 |       ) ORDER BY ORDINAL_POSITION
528 |     ) AS key_column_json_details
529 |   FROM INFORMATION_SCHEMA.INDEX_COLUMNS
530 |   WHERE ORDINAL_POSITION IS NOT NULL -- Key columns
531 |   GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
532 | ),
533 | 
534 | -- Helper CTE for aggregating index storing columns (as JSON strings)
535 | index_storing_columns_agg_cte AS (
536 |   SELECT
537 |     TABLE_CATALOG,
538 |     TABLE_SCHEMA,
539 |     TABLE_NAME,
540 |     INDEX_NAME,
541 |     ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
542 |   FROM INFORMATION_SCHEMA.INDEX_COLUMNS
543 |   WHERE ORDINAL_POSITION IS NULL -- Storing columns
544 |   GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
545 | ),
546 | 
547 | -- 4. Index Information (with JSON string for each index)
548 | indexes_info_cte AS (
549 |   SELECT
550 |     I.TABLE_SCHEMA,
551 |     I.TABLE_NAME,
552 |     ARRAY_AGG(
553 |       CONCAT(
554 |         '{',
555 |         '"index_name":"', IFNULL(I.INDEX_NAME, ''), '",',
556 |         '"index_type":"', IFNULL(I.INDEX_TYPE, ''), '",',
557 |         '"is_unique":', IF(I.IS_UNIQUE, 'true', 'false'), ',',
558 |         '"is_null_filtered":', IF(I.IS_NULL_FILTERED, 'true', 'false'), ',',
559 |         '"interleaved_in_table":', IF(I.PARENT_TABLE_NAME IS NULL, 'null', CONCAT('"', I.PARENT_TABLE_NAME, '"')), ',',
560 |         '"index_key_columns":[', ARRAY_TO_STRING(COALESCE(KeyIndexCols.key_column_json_details, []), ','), '],',
561 |         '"storing_columns":[', ARRAY_TO_STRING(COALESCE(StoringIndexCols.storing_column_json_names, []), ','), ']',
562 |         '}'
563 |       ) ORDER BY I.INDEX_NAME
564 |     ) AS indexes_json_array_elements
565 |   FROM INFORMATION_SCHEMA.INDEXES AS I
566 |   LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
567 |     ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
568 |   LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
569 |     ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE = 'INDEX'
570 |   WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
571 |   GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
572 | )
573 | 
574 | -- Final SELECT to build the JSON output
575 | SELECT
576 |   TI.TABLE_SCHEMA AS schema_name,
577 |   TI.TABLE_NAME AS object_name,
578 |   CASE
579 |     WHEN @output_format = 'simple' THEN
580 |       -- IF format is 'simple', return basic JSON
581 |           CONCAT('{"name":"', IFNULL(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
582 |     ELSE
583 |       CONCAT(
584 |         '{',
585 |         '"schema_name":"', IFNULL(TI.TABLE_SCHEMA, ''), '",',
586 |         '"object_name":"', IFNULL(TI.TABLE_NAME, ''), '",',
587 |         '"object_type":"', IFNULL(TI.TABLE_TYPE, ''), '",',
588 |         '"columns":[', ARRAY_TO_STRING(COALESCE(CI.columns_json_array_elements, []), ','), '],',
589 |         '"constraints":[', ARRAY_TO_STRING(COALESCE(CONSI.constraints_json_array_elements, []), ','), '],',
590 |         '"indexes":[', ARRAY_TO_STRING(COALESCE(II.indexes_json_array_elements, []), ','), ']',
591 |         '}'
592 |       )
593 |   END AS object_details
594 | FROM table_info_cte AS TI
595 | LEFT JOIN columns_info_cte AS CI
596 |   ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
597 | LEFT JOIN constraints_info_cte AS CONSI
598 |   ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
599 | LEFT JOIN indexes_info_cte AS II
600 |   ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
601 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
602 | 
```
Page 39/52FirstPrevNextLast