This is page 27 of 35. Use http://codebase.md/googleapis/genai-toolbox?lines=false&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-tables.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ └── bigquery.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/internal/tools/spanner/spannerlisttables/spannerlisttables.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package spannerlisttables
import (
"context"
"fmt"
"strings"
"cloud.google.com/go/spanner"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
spannerdb "github.com/googleapis/genai-toolbox/internal/sources/spanner"
"github.com/googleapis/genai-toolbox/internal/tools"
"google.golang.org/api/iterator"
)
const kind string = "spanner-list-tables"
func init() {
if !tools.Register(kind, newConfig) {
panic(fmt.Sprintf("tool kind %q already registered", kind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
type compatibleSource interface {
SpannerClient() *spanner.Client
DatabaseDialect() string
}
// validate compatible sources are still compatible
var _ compatibleSource = &spannerdb.Source{}
var compatibleSources = [...]string{spannerdb.SourceKind}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
Source string `yaml:"source" validate:"required"`
Description string `yaml:"description"`
AuthRequired []string `yaml:"authRequired"`
}
// validate interface
var _ tools.ToolConfig = Config{}
func (cfg Config) ToolConfigKind() string {
return kind
}
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
// verify source exists
rawS, ok := srcs[cfg.Source]
if !ok {
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
}
// verify the source is compatible
s, ok := rawS.(compatibleSource)
if !ok {
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
// Define parameters for the tool
allParameters := tools.Parameters{
tools.NewStringParameterWithDefault(
"table_names",
"",
"Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed.",
),
tools.NewStringParameterWithDefault(
"output_format",
"detailed",
"Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema.",
),
}
description := cfg.Description
if description == "" {
description = "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
}
mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters)
// finish tool setup
t := Tool{
Name: cfg.Name,
Kind: kind,
AllParams: allParameters,
AuthRequired: cfg.AuthRequired,
Client: s.SpannerClient(),
dialect: s.DatabaseDialect(),
manifest: tools.Manifest{Description: description, Parameters: allParameters.Manifest(), AuthRequired: cfg.AuthRequired},
mcpManifest: mcpManifest,
}
return t, nil
}
// validate interface
var _ tools.Tool = Tool{}
type Tool struct {
Name string `yaml:"name"`
Kind string `yaml:"kind"`
AuthRequired []string `yaml:"authRequired"`
AllParams tools.Parameters `yaml:"allParams"`
Client *spanner.Client
dialect string
manifest tools.Manifest
mcpManifest tools.McpManifest
}
// processRows iterates over the spanner.RowIterator and converts each row to a map[string]any.
func processRows(iter *spanner.RowIterator) ([]any, error) {
var out []any
defer iter.Stop()
for {
row, err := iter.Next()
if err == iterator.Done {
break
}
if err != nil {
return nil, fmt.Errorf("unable to parse row: %w", err)
}
vMap := make(map[string]any)
cols := row.ColumnNames()
for i, c := range cols {
vMap[c] = row.ColumnValue(i)
}
out = append(out, vMap)
}
return out, nil
}
func (t Tool) getStatement() string {
switch strings.ToLower(t.dialect) {
case "postgresql":
return postgresqlStatement
case "googlesql":
return googleSQLStatement
default:
// Default to GoogleSQL
return googleSQLStatement
}
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
paramsMap := params.AsMap()
// Get the appropriate SQL statement based on dialect
statement := t.getStatement()
// Prepare parameters based on dialect
var stmtParams map[string]interface{}
tableNames, _ := paramsMap["table_names"].(string)
outputFormat, _ := paramsMap["output_format"].(string)
if outputFormat == "" {
outputFormat = "detailed"
}
switch strings.ToLower(t.dialect) {
case "postgresql":
// PostgreSQL uses positional parameters ($1, $2)
stmtParams = map[string]interface{}{
"p1": tableNames,
"p2": outputFormat,
}
case "googlesql":
// GoogleSQL uses named parameters (@table_names, @output_format)
stmtParams = map[string]interface{}{
"table_names": tableNames,
"output_format": outputFormat,
}
default:
return nil, fmt.Errorf("unsupported dialect: %s", t.dialect)
}
stmt := spanner.Statement{
SQL: statement,
Params: stmtParams,
}
// Execute the query (read-only)
iter := t.Client.Single().Query(ctx, stmt)
results, err := processRows(iter)
if err != nil {
return nil, fmt.Errorf("unable to execute query: %w", err)
}
return results, nil
}
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
return tools.ParseParams(t.AllParams, data, claims)
}
func (t Tool) Manifest() tools.Manifest {
return t.manifest
}
func (t Tool) McpManifest() tools.McpManifest {
return t.mcpManifest
}
func (t Tool) Authorized(verifiedAuthServices []string) bool {
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}
func (t Tool) RequiresClientAuthorization() bool {
return false
}
// PostgreSQL statement for listing tables
const postgresqlStatement = `
WITH table_info_cte AS (
SELECT
T.TABLE_SCHEMA,
T.TABLE_NAME,
T.TABLE_TYPE,
T.PARENT_TABLE_NAME,
T.ON_DELETE_ACTION
FROM INFORMATION_SCHEMA.TABLES AS T
WHERE
T.TABLE_SCHEMA = 'public'
AND T.TABLE_TYPE = 'BASE TABLE'
AND (
NULLIF(TRIM($1), '') IS NULL OR
T.TABLE_NAME IN (
SELECT table_name
FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
)
),
columns_info_cte AS (
SELECT
C.TABLE_SCHEMA,
C.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
'"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
'"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
'"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
'"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
'}'
) ORDER BY C.ORDINAL_POSITION
) AS columns_json_array_elements
FROM INFORMATION_SCHEMA.COLUMNS AS C
WHERE C.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
),
constraint_columns_agg_cte AS (
SELECT
CONSTRAINT_CATALOG,
CONSTRAINT_SCHEMA,
CONSTRAINT_NAME,
ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
WHERE CONSTRAINT_SCHEMA = 'public'
GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
),
constraints_info_cte AS (
SELECT
TC.TABLE_SCHEMA,
TC.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
'"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
'"constraint_definition":',
CASE TC.CONSTRAINT_TYPE
WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
ELSE 'null'
END, ',',
'"constraint_columns":["', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"],',
'"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
'"foreign_key_referenced_columns":["', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"]',
'}'
) ORDER BY TC.CONSTRAINT_NAME
) AS constraints_json_array_elements
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
LEFT JOIN constraint_columns_agg_cte AS KeyCols
ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
WHERE TC.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
),
index_key_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(
CONCAT(
'{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
'"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
) ORDER BY ORDINAL_POSITION
) AS key_column_json_details
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NOT NULL
AND TABLE_SCHEMA = 'public'
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
index_storing_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NULL
AND TABLE_SCHEMA = 'public'
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
indexes_info_cte AS (
SELECT
I.TABLE_SCHEMA,
I.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
'"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
'"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
'"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
'"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
'"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
'"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
'}'
) ORDER BY I.INDEX_NAME
) AS indexes_json_array_elements
FROM INFORMATION_SCHEMA.INDEXES AS I
LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
WHERE I.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
)
SELECT
TI.TABLE_SCHEMA AS schema_name,
TI.TABLE_NAME AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
ELSE
CONCAT(
'{',
'"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
'"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
'"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
'"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
'"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
'"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
'}'
)
END AS object_details
FROM table_info_cte AS TI
LEFT JOIN columns_info_cte AS CI
ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
LEFT JOIN constraints_info_cte AS CONSI
ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
LEFT JOIN indexes_info_cte AS II
ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
// GoogleSQL statement for listing tables
const googleSQLStatement = `
WITH FilterTableNames AS (
SELECT DISTINCT TRIM(name) AS TABLE_NAME
FROM UNNEST(IF(@table_names = '' OR @table_names IS NULL, ['%'], SPLIT(@table_names, ','))) AS name
),
-- 1. Table Information
table_info_cte AS (
SELECT
T.TABLE_SCHEMA,
T.TABLE_NAME,
T.TABLE_TYPE,
T.PARENT_TABLE_NAME, -- For interleaved tables
T.ON_DELETE_ACTION -- For interleaved tables
FROM INFORMATION_SCHEMA.TABLES AS T
WHERE
T.TABLE_SCHEMA = ''
AND T.TABLE_TYPE = 'BASE TABLE'
AND (EXISTS (SELECT 1 FROM FilterTableNames WHERE FilterTableNames.TABLE_NAME = '%') OR T.TABLE_NAME IN (SELECT TABLE_NAME FROM FilterTableNames))
),
-- 2. Column Information (with JSON string for each column)
columns_info_cte AS (
SELECT
C.TABLE_SCHEMA,
C.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"column_name":"', IFNULL(C.COLUMN_NAME, ''), '",',
'"data_type":"', IFNULL(C.SPANNER_TYPE, ''), '",',
'"ordinal_position":', CAST(C.ORDINAL_POSITION AS STRING), ',',
'"is_not_nullable":', IF(C.IS_NULLABLE = 'NO', 'true', 'false'), ',',
'"column_default":', IF(C.COLUMN_DEFAULT IS NULL, 'null', CONCAT('"', C.COLUMN_DEFAULT, '"')),
'}'
) ORDER BY C.ORDINAL_POSITION
) AS columns_json_array_elements
FROM INFORMATION_SCHEMA.COLUMNS AS C
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
),
-- Helper CTE for aggregating constraint columns
constraint_columns_agg_cte AS (
SELECT
CONSTRAINT_CATALOG,
CONSTRAINT_SCHEMA,
CONSTRAINT_NAME,
ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
),
-- 3. Constraint Information (with JSON string for each constraint)
constraints_info_cte AS (
SELECT
TC.TABLE_SCHEMA,
TC.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"constraint_name":"', IFNULL(TC.CONSTRAINT_NAME, ''), '",',
'"constraint_type":"', IFNULL(TC.CONSTRAINT_TYPE, ''), '",',
'"constraint_definition":',
CASE TC.CONSTRAINT_TYPE
WHEN 'CHECK' THEN IF(CC.CHECK_CLAUSE IS NULL, 'null', CONCAT('"', CC.CHECK_CLAUSE, '"'))
WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ') REFERENCES ',
IFNULL(RefKeyTable.TABLE_NAME, ''),
' (', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ', '), ')', '"')
ELSE 'null'
END, ',',
'"constraint_columns":["', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ','), '"],',
'"foreign_key_referenced_table":', IF(RefKeyTable.TABLE_NAME IS NULL, 'null', CONCAT('"', RefKeyTable.TABLE_NAME, '"')), ',',
'"foreign_key_referenced_columns":["', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ','), '"]',
'}'
) ORDER BY TC.CONSTRAINT_NAME
) AS constraints_json_array_elements
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
LEFT JOIN constraint_columns_agg_cte AS KeyCols
ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
),
-- Helper CTE for aggregating index key columns (as JSON strings)
index_key_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(
CONCAT(
'{"column_name":"', IFNULL(COLUMN_NAME, ''), '",',
'"ordering":"', IFNULL(COLUMN_ORDERING, ''), '"}'
) ORDER BY ORDINAL_POSITION
) AS key_column_json_details
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NOT NULL -- Key columns
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
-- Helper CTE for aggregating index storing columns (as JSON strings)
index_storing_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NULL -- Storing columns
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
-- 4. Index Information (with JSON string for each index)
indexes_info_cte AS (
SELECT
I.TABLE_SCHEMA,
I.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"index_name":"', IFNULL(I.INDEX_NAME, ''), '",',
'"index_type":"', IFNULL(I.INDEX_TYPE, ''), '",',
'"is_unique":', IF(I.IS_UNIQUE, 'true', 'false'), ',',
'"is_null_filtered":', IF(I.IS_NULL_FILTERED, 'true', 'false'), ',',
'"interleaved_in_table":', IF(I.PARENT_TABLE_NAME IS NULL, 'null', CONCAT('"', I.PARENT_TABLE_NAME, '"')), ',',
'"index_key_columns":[', ARRAY_TO_STRING(COALESCE(KeyIndexCols.key_column_json_details, []), ','), '],',
'"storing_columns":[', ARRAY_TO_STRING(COALESCE(StoringIndexCols.storing_column_json_names, []), ','), ']',
'}'
) ORDER BY I.INDEX_NAME
) AS indexes_json_array_elements
FROM INFORMATION_SCHEMA.INDEXES AS I
LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE = 'INDEX'
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
)
-- Final SELECT to build the JSON output
SELECT
TI.TABLE_SCHEMA AS schema_name,
TI.TABLE_NAME AS object_name,
CASE
WHEN @output_format = 'simple' THEN
-- IF format is 'simple', return basic JSON
CONCAT('{"name":"', IFNULL(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
ELSE
CONCAT(
'{',
'"schema_name":"', IFNULL(TI.TABLE_SCHEMA, ''), '",',
'"object_name":"', IFNULL(TI.TABLE_NAME, ''), '",',
'"object_type":"', IFNULL(TI.TABLE_TYPE, ''), '",',
'"columns":[', ARRAY_TO_STRING(COALESCE(CI.columns_json_array_elements, []), ','), '],',
'"constraints":[', ARRAY_TO_STRING(COALESCE(CONSI.constraints_json_array_elements, []), ','), '],',
'"indexes":[', ARRAY_TO_STRING(COALESCE(II.indexes_json_array_elements, []), ','), ']',
'}'
)
END AS object_details
FROM table_info_cte AS TI
LEFT JOIN columns_info_cte AS CI
ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
LEFT JOIN constraints_info_cte AS CONSI
ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
LEFT JOIN indexes_info_cte AS II
ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
```
--------------------------------------------------------------------------------
/.ci/integration.cloudbuild.yaml:
--------------------------------------------------------------------------------
```yaml
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- id: "install-dependencies"
name: golang:1
waitFor: ["-"]
env:
- "GOPATH=/gopath"
volumes:
- name: "go"
path: "/gopath"
script: |
go get -d ./...
- id: "compile-test-binary"
name: golang:1
waitFor: ["install-dependencies"]
env:
- "GOPATH=/gopath"
volumes:
- name: "go"
path: "/gopath"
script: |
go test -c -race -cover \
-coverpkg=./internal/sources/...,./internal/tools/... ./tests/...
chmod +x .ci/test_with_coverage.sh
- id: "cloud-sql-pg"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLOUD_SQL_POSTGRES_PROJECT=$PROJECT_ID"
- "CLOUD_SQL_POSTGRES_INSTANCE=$_CLOUD_SQL_POSTGRES_INSTANCE"
- "CLOUD_SQL_POSTGRES_DATABASE=$_DATABASE_NAME"
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud SQL Postgres" \
cloudsqlpg \
postgressql \
postgresexecutesql
- id: "alloydb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "ALLOYDB_PROJECT=$PROJECT_ID"
- "ALLOYDB_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
- "ALLOYDB_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
- "ALLOYDB_REGION=$_REGION"
secretEnv: ["ALLOYDB_POSTGRES_USER"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"AlloyDB" \
alloydb \
alloydb
- id: "alloydb-pg"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "ALLOYDB_POSTGRES_PROJECT=$PROJECT_ID"
- "ALLOYDB_POSTGRES_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
- "ALLOYDB_POSTGRES_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
- "ALLOYDB_POSTGRES_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"AlloyDB Postgres" \
alloydbpg \
postgressql \
postgresexecutesql
- id: "alloydb-ai-nl"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "ALLOYDB_AI_NL_PROJECT=$PROJECT_ID"
- "ALLOYDB_AI_NL_CLUSTER=$_ALLOYDB_AI_NL_CLUSTER"
- "ALLOYDB_AI_NL_INSTANCE=$_ALLOYDB_AI_NL_INSTANCE"
- "ALLOYDB_AI_NL_DATABASE=$_DATABASE_NAME"
- "ALLOYDB_AI_NL_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["ALLOYDB_AI_NL_USER", "ALLOYDB_AI_NL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"AlloyDB AI NL" \
alloydbainl \
alloydbainl
- id: "bigtable"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "BIGTABLE_PROJECT=$PROJECT_ID"
- "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Bigtable" \
bigtable \
bigtable
- id: "bigquery"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "BIGQUERY_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"BigQuery" \
bigquery \
bigquery
- id: "dataplex"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "DATAPLEX_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Dataplex" \
dataplex \
dataplex
- id: "dataform"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
apt-get update && apt-get install -y npm && \
npm install -g @dataform/cli && \
.ci/test_with_coverage.sh \
"Dataform" \
dataform \
dataform
- id: "postgres"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "POSTGRES_DATABASE=$_DATABASE_NAME"
- "POSTGRES_HOST=$_POSTGRES_HOST"
- "POSTGRES_PORT=$_POSTGRES_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Postgres" \
postgres \
postgressql \
postgresexecutesql
- id: "spanner"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SPANNER_PROJECT=$PROJECT_ID"
- "SPANNER_DATABASE=$_DATABASE_NAME"
- "SPANNER_INSTANCE=$_SPANNER_INSTANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Spanner" \
spanner \
spanner
- id: "neo4j"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "NEO4J_DATABASE=$_NEO4J_DATABASE"
- "NEO4J_URI=$_NEO4J_URI"
secretEnv: ["NEO4J_USER", "NEO4J_PASS"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Neo4j" \
neo4j \
neo4j
- id: "cloud-sql-mssql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLOUD_SQL_MSSQL_PROJECT=$PROJECT_ID"
- "CLOUD_SQL_MSSQL_INSTANCE=$_CLOUD_SQL_MSSQL_INSTANCE"
- "CLOUD_SQL_MSSQL_IP=$_CLOUD_SQL_MSSQL_IP"
- "CLOUD_SQL_MSSQL_DATABASE=$_DATABASE_NAME"
- "CLOUD_SQL_MSSQL_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLOUD_SQL_MSSQL_USER", "CLOUD_SQL_MSSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud SQL MSSQL" \
cloudsqlmssql \
mssql
- id: "cloud-sql-mysql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLOUD_SQL_MYSQL_PROJECT=$PROJECT_ID"
- "CLOUD_SQL_MYSQL_INSTANCE=$_CLOUD_SQL_MYSQL_INSTANCE"
- "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
- "CLOUD_SQL_MYSQL_REGION=$_REGION"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud SQL MySQL" \
cloudsqlmysql \
mysql || echo "Integration tests failed." # ignore test failures
- id: "mysql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "MYSQL_DATABASE=$_DATABASE_NAME"
- "MYSQL_HOST=$_MYSQL_HOST"
- "MYSQL_PORT=$_MYSQL_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["MYSQL_USER", "MYSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"MySQL" \
mysql \
mysql || echo "Integration tests failed." # ignore test failures
- id: "mssql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "MSSQL_DATABASE=$_DATABASE_NAME"
- "MSSQL_HOST=$_MSSQL_HOST"
- "MSSQL_PORT=$_MSSQL_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["MSSQL_USER", "MSSQL_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"MSSQL" \
mssql \
mssql
# - id: "dgraph"
# name: golang:1
# waitFor: ["compile-test-binary"]
# entrypoint: /bin/bash
# env:
# - "GOPATH=/gopath"
# - "DGRAPH_URL=$_DGRAPHURL"
# volumes:
# - name: "go"
# path: "/gopath"
# args:
# - -c
# - |
# .ci/test_with_coverage.sh \
# "Dgraph" \
# dgraph \
# dgraph
- id: "http"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"HTTP" \
http \
http
- id: "sqlite"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
volumes:
- name: "go"
path: "/gopath"
secretEnv: ["CLIENT_ID"]
args:
- -c
- |
.ci/test_with_coverage.sh \
"SQLite" \
sqlite \
sqlite
- id: "couchbase"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
- "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Couchbase" \
couchbase \
couchbase
- id: "redis"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["REDIS_ADDRESS", "REDIS_PASS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Redis" \
redis \
redis
- id: "valkey"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "VALKEY_DATABASE=$_VALKEY_DATABASE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["VALKEY_ADDRESS", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Valkey" \
valkey \
valkey
- id: "oceanbase"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "OCEANBASE_PORT=$_OCEANBASE_PORT"
- "OCEANBASE_DATABASE=$_OCEANBASE_DATABASE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"OceanBase" \
oceanbase \
oceanbase
- id: "firestore"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIRESTORE_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Firestore" \
firestore \
firestore
- id: "looker"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIRESTORE_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
- "LOOKER_PROJECT=$_LOOKER_PROJECT"
- "LOOKER_LOCATION=$_LOOKER_LOCATION"
secretEnv:
[
"CLIENT_ID",
"LOOKER_BASE_URL",
"LOOKER_CLIENT_ID",
"LOOKER_CLIENT_SECRET",
]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Looker" \
looker \
looker
- id: "cloud-sql"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud SQL Wait for Operation" \
cloudsql \
cloudsql
- id: "tidb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "TIDB_DATABASE=$_DATABASE_NAME"
- "TIDB_HOST=$_TIDB_HOST"
- "TIDB_PORT=$_TIDB_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "TIDB_USER", "TIDB_PASS"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"TiDB" \
tidb \
tidbsql tidbexecutesql
- id: "firebird"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "FIREBIRD_DATABASE=$_FIREBIRD_DATABASE_NAME"
- "FIREBIRD_HOST=$_FIREBIRD_HOST"
- "FIREBIRD_PORT=$_FIREBIRD_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "FIREBIRD_USER", "FIREBIRD_PASS"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Firebird" \
firebird \
firebirdsql firebirdexecutesql
- id: "clickhouse"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLICKHOUSE_DATABASE=$_CLICKHOUSE_DATABASE"
- "CLICKHOUSE_PORT=$_CLICKHOUSE_PORT"
- "CLICKHOUSE_PROTOCOL=$_CLICKHOUSE_PROTOCOL"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLICKHOUSE_HOST", "CLICKHOUSE_USER", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"ClickHouse" \
clickhouse \
clickhouse
- id: "trino"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "TRINO_HOST=$_TRINO_HOST"
- "TRINO_PORT=$_TRINO_PORT"
- "TRINO_CATALOG=$_TRINO_CATALOG"
- "TRINO_SCHEMA=$_TRINO_SCHEMA"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "TRINO_USER"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Trino" \
trino \
trinosql trinoexecutesql
- id: "yugabytedb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv:
["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
./yugabytedb.test -test.v
- id: "cassandra"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "CASSANDRA_USER", "CASSANDRA_PASS", "CASSANDRA_HOST"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cassandra" \
cassandra \
cassandra
- id: "oracle"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
- "ORACLE_SERVER_NAME=$_ORACLE_SERVER_NAME"
secretEnv: ["CLIENT_ID", "ORACLE_USER", "ORACLE_PASS", "ORACLE_HOST"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Oracle" \
oracle \
oracle
- id: "serverless-spark"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVERLESS_SPARK_PROJECT=$PROJECT_ID"
- "SERVERLESS_SPARK_LOCATION=$_REGION"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Serverless Spark" \
serverlessspark
availableSecrets:
secretManager:
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
env: CLOUD_SQL_POSTGRES_USER
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
env: CLOUD_SQL_POSTGRES_PASS
- versionName: projects/$PROJECT_ID/secrets/alloydb_pg_user/versions/latest
env: ALLOYDB_POSTGRES_USER
- versionName: projects/$PROJECT_ID/secrets/alloydb_pg_pass/versions/latest
env: ALLOYDB_POSTGRES_PASS
- versionName: projects/$PROJECT_ID/secrets/alloydb_ai_nl_user/versions/latest
env: ALLOYDB_AI_NL_USER
- versionName: projects/$PROJECT_ID/secrets/alloydb_ai_nl_pass/versions/latest
env: ALLOYDB_AI_NL_PASS
- versionName: projects/$PROJECT_ID/secrets/postgres_user/versions/latest
env: POSTGRES_USER
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
env: POSTGRES_PASS
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
env: CLIENT_ID
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
env: NEO4J_USER
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
env: NEO4J_PASS
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_mssql_user/versions/latest
env: CLOUD_SQL_MSSQL_USER
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_mssql_pass/versions/latest
env: CLOUD_SQL_MSSQL_PASS
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_mysql_user/versions/latest
env: CLOUD_SQL_MYSQL_USER
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_mysql_pass/versions/latest
env: CLOUD_SQL_MYSQL_PASS
- versionName: projects/$PROJECT_ID/secrets/mysql_user/versions/latest
env: MYSQL_USER
- versionName: projects/$PROJECT_ID/secrets/mysql_pass/versions/latest
env: MYSQL_PASS
- versionName: projects/$PROJECT_ID/secrets/mssql_user/versions/latest
env: MSSQL_USER
- versionName: projects/$PROJECT_ID/secrets/mssql_pass/versions/latest
env: MSSQL_PASS
- versionName: projects/$PROJECT_ID/secrets/couchbase_connection/versions/latest
env: COUCHBASE_CONNECTION
- versionName: projects/$PROJECT_ID/secrets/couchbase_user/versions/latest
env: COUCHBASE_USER
- versionName: projects/$PROJECT_ID/secrets/couchbase_pass/versions/latest
env: COUCHBASE_PASS
- versionName: projects/$PROJECT_ID/secrets/memorystore_redis_address/versions/latest
env: REDIS_ADDRESS
- versionName: projects/$PROJECT_ID/secrets/memorystore_redis_pass/versions/latest
env: REDIS_PASS
- versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
env: VALKEY_ADDRESS
- versionName: projects/$PROJECT_ID/secrets/looker_base_url/versions/latest
env: LOOKER_BASE_URL
- versionName: projects/$PROJECT_ID/secrets/looker_client_id/versions/latest
env: LOOKER_CLIENT_ID
- versionName: projects/$PROJECT_ID/secrets/looker_client_secret/versions/latest
env: LOOKER_CLIENT_SECRET
- versionName: projects/$PROJECT_ID/secrets/tidb_user/versions/latest
env: TIDB_USER
- versionName: projects/$PROJECT_ID/secrets/tidb_pass/versions/latest
env: TIDB_PASS
- versionName: projects/$PROJECT_ID/secrets/clickhouse_host/versions/latest
env: CLICKHOUSE_HOST
- versionName: projects/$PROJECT_ID/secrets/clickhouse_user/versions/latest
env: CLICKHOUSE_USER
- versionName: projects/$PROJECT_ID/secrets/firebird_user/versions/latest
env: FIREBIRD_USER
- versionName: projects/$PROJECT_ID/secrets/firebird_pass/versions/latest
env: FIREBIRD_PASS
- versionName: projects/$PROJECT_ID/secrets/trino_user/versions/latest
env: TRINO_USER
- versionName: projects/$PROJECT_ID/secrets/oceanbase_host/versions/latest
env: OCEANBASE_HOST
- versionName: projects/$PROJECT_ID/secrets/oceanbase_user/versions/latest
env: OCEANBASE_USER
- versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
env: OCEANBASE_PASSWORD
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
env: YUGABYTEDB_HOST
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
env: YUGABYTEDB_USER
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
env: YUGABYTEDB_PASS
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
env: CASSANDRA_USER
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
env: CASSANDRA_PASS
- versionName: projects/$PROJECT_ID/secrets/cassandra_host/versions/latest
env: CASSANDRA_HOST
- versionName: projects/$PROJECT_ID/secrets/oracle_user/versions/latest
env: ORACLE_USER
- versionName: projects/$PROJECT_ID/secrets/oracle_pass/versions/latest
env: ORACLE_PASS
- versionName: projects/$PROJECT_ID/secrets/oracle_host/versions/latest
env: ORACLE_HOST
options:
logging: CLOUD_LOGGING_ONLY
automapSubstitutions: true
substitutionOption: "ALLOW_LOOSE"
dynamicSubstitutions: true
pool:
name: projects/$PROJECT_ID/locations/us-central1/workerPools/integration-testing # Necessary for VPC network connection
substitutions:
_DATABASE_NAME: test_database
_FIREBIRD_DATABASE_NAME: /firebird/test_database.fdb
_REGION: "us-central1"
_CLOUD_SQL_POSTGRES_INSTANCE: "cloud-sql-pg-testing"
_ALLOYDB_POSTGRES_CLUSTER: "alloydb-pg-testing"
_ALLOYDB_POSTGRES_INSTANCE: "alloydb-pg-testing-instance"
_ALLOYDB_AI_NL_CLUSTER: "alloydb-ai-nl-testing"
_ALLOYDB_AI_NL_INSTANCE: "alloydb-ai-nl-testing-instance"
_BIGTABLE_INSTANCE: "bigtable-testing-instance"
_POSTGRES_HOST: 127.0.0.1
_POSTGRES_PORT: "5432"
_SPANNER_INSTANCE: "spanner-testing"
_NEO4J_DATABASE: "neo4j"
_CLOUD_SQL_MSSQL_INSTANCE: "cloud-sql-mssql-testing"
_CLOUD_SQL_MYSQL_INSTANCE: "cloud-sql-mysql-testing"
_MYSQL_HOST: 127.0.0.1
_MYSQL_PORT: "3306"
_MSSQL_HOST: 127.0.0.1
_MSSQL_PORT: "1433"
_DGRAPHURL: "https://play.dgraph.io"
_COUCHBASE_BUCKET: "couchbase-bucket"
_COUCHBASE_SCOPE: "couchbase-scope"
_LOOKER_LOCATION: "us"
_LOOKER_PROJECT: "149671255749"
_LOOKER_VERIFY_SSL: "true"
_TIDB_HOST: 127.0.0.1
_TIDB_PORT: "4000"
_CLICKHOUSE_DATABASE: "default"
_CLICKHOUSE_PORT: "8123"
_CLICKHOUSE_PROTOCOL: "http"
_FIREBIRD_HOST: 127.0.0.1
_FIREBIRD_PORT: "3050"
_TRINO_HOST: 127.0.0.1
_TRINO_PORT: "8080"
_TRINO_CATALOG: "memory"
_TRINO_SCHEMA: "default"
_OCEANBASE_PORT: "2883"
_OCEANBASE_DATABASE: "oceanbase"
_YUGABYTEDB_DATABASE: "yugabyte"
_YUGABYTEDB_PORT: "5433"
_YUGABYTEDB_LOADBALANCE: "false"
_ORACLE_SERVER_NAME: "FREEPDB1"
```
--------------------------------------------------------------------------------
/docs/en/reference/prebuilt-tools.md:
--------------------------------------------------------------------------------
```markdown
---
title: "Prebuilt Tools"
type: docs
weight: 1
description: >
This page lists all the prebuilt tools available.
---
Prebuilt tools are reusable, pre-packaged toolsets that are designed to extend
the capabilities of agents. These tools are built to be generic and adaptable,
allowing developers to interact with and take action on databases.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for
details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
## AlloyDB Postgres
* `--prebuilt` value: `alloydb-postgres`
* **Environment Variables:**
* `ALLOYDB_POSTGRES_PROJECT`: The GCP project ID.
* `ALLOYDB_POSTGRES_REGION`: The region of your AlloyDB instance.
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to
IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database
user. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
* `get_query_plan`: Generate the execution plan of a statement.
## AlloyDB Postgres Admin
* `--prebuilt` value: `alloydb-postgres-admin`
* **Permissions:**
* **AlloyDB Viewer** (`roles/alloydb.viewer`) is required for `list` and
`get` tools.
* **AlloyDB Admin** (`roles/alloydb.admin`) is required for `create` tools.
* **Tools:**
* `create_cluster`: Creates a new AlloyDB cluster.
* `list_clusters`: Lists all AlloyDB clusters in a project.
* `get_cluster`: Gets information about a specified AlloyDB cluster.
* `create_instance`: Creates a new AlloyDB instance within a cluster.
* `list_instances`: Lists all instances within an AlloyDB cluster.
* `get_instance`: Gets information about a specified AlloyDB instance.
* `create_user`: Creates a new database user in an AlloyDB cluster.
* `list_users`: Lists all database users within an AlloyDB cluster.
* `get_user`: Gets information about a specified database user in an
AlloyDB cluster.
* `wait_for_operation`: Polls the operations API to track the status of
long-running operations.
## AlloyDB Postgres Observability
* `--prebuilt` value: `alloydb-postgres-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for an AlloyDB instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in an AlloyDB instance using a
PromQL query.
## BigQuery
* `--prebuilt` value: `bigquery`
* **Environment Variables:**
* `BIGQUERY_PROJECT`: The GCP project ID.
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
OAuth access token for authentication. Defaults to `false`.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
metadata.
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view
all datasets.
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or
modify datasets and tables.
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the
conversational analytics API.
* **Tools:**
* `analyze_contribution`: Use this tool to perform contribution analysis,
also called key driver analysis.
* `ask_data_insights`: Use this tool to perform data analysis, get
insights, or answer complex questions about the contents of specific
BigQuery tables. For more information on required roles, API setup, and
IAM configuration, see the setup and authentication section of the
[Conversational Analytics API
documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
* `execute_sql`: Executes a SQL statement.
* `forecast`: Use this tool to forecast time series data.
* `get_dataset_info`: Gets dataset metadata.
* `get_table_info`: Gets table metadata.
* `list_dataset_ids`: Lists datasets.
* `list_table_ids`: Lists tables.
* `search_catalog`: Search for entries based on the provided query.
## Cloud SQL for MySQL
* `--prebuilt` value: `cloud-sql-mysql`
* **Environment Variables:**
* `CLOUD_SQL_MYSQL_PROJECT`: The GCP project ID.
* `CLOUD_SQL_MYSQL_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_MYSQL_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_MYSQL_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_MYSQL_USER`: The database username.
* `CLOUD_SQL_MYSQL_PASSWORD`: The password for the database user.
* `CLOUD_SQL_MYSQL_IP_TYPE`: The IP type i.e. "Public
or "Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `get_query_plan`: Provides information about how MySQL executes a SQL
statement.
* `list_active_queries`: Lists ongoing queries.
* `list_tables_missing_unique_indexes`: Looks for tables that do not have
primary or unique key contraint.
* `list_table_fragmentation`: Displays table fragmentation in MySQL.
## Cloud SQL for MySQL Observability
* `--prebuilt` value: `cloud-sql-mysql-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a MySQL instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in a MySQL instance using a
PromQL query.
## Cloud SQL for MySQL Admin
* `--prebuilt` value: `cloud-sql-mysql-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Cloud SQL for PostgreSQL
* `--prebuilt` value: `cloud-sql-postgres`
* **Environment Variables:**
* `CLOUD_SQL_POSTGRES_PROJECT`: The GCP project ID.
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to
IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database
user. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
* `get_query_plan`: Generate the execution plan of a statement.
## Cloud SQL for PostgreSQL Observability
* `--prebuilt` value: `cloud-sql-postgres-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a Postgres instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in Postgres instance using a
PromQL query.
## Cloud SQL for PostgreSQL Admin
* `--prebuilt` value: `cloud-sql-postgres-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Cloud SQL for SQL Server
* `--prebuilt` value: `cloud-sql-mssql`
* **Environment Variables:**
* `CLOUD_SQL_MSSQL_PROJECT`: The GCP project ID.
* `CLOUD_SQL_MSSQL_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_MSSQL_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_MSSQL_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
* `CLOUD_SQL_MSSQL_USER`: The database username.
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Cloud SQL for SQL Server Observability
* `--prebuilt` value: `cloud-sql-mssql-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a SQL Server instance using a PromQL query.
## Cloud SQL for SQL Server Admin
* `--prebuilt` value: `cloud-sql-mssql-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Dataplex
* `--prebuilt` value: `dataplex`
* **Environment Variables:**
* `DATAPLEX_PROJECT`: The GCP project ID.
* **Permissions:**
* **Dataplex Reader** (`roles/dataplex.viewer`) to search and look up
entries.
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
* **Tools:**
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex
Catalog.
* `dataplex_search_aspect_types`: Finds aspect types relevant to the
query.
## Firestore
* `--prebuilt` value: `firestore`
* **Environment Variables:**
* `FIRESTORE_PROJECT`: The GCP project ID.
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to
"(default)".
* **Permissions:**
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list
collections, and query collections.
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and
validate Firestore rules.
* **Tools:**
* `get_documents`: Gets multiple documents from Firestore by their paths.
* `add_documents`: Adds a new document to a Firestore collection.
* `update_document`: Updates an existing document in Firestore.
* `list_collections`: Lists Firestore collections for a given parent path.
* `delete_documents`: Deletes multiple documents from Firestore.
* `query_collection`: Retrieves one or more Firestore documents from a
collection.
* `get_rules`: Retrieves the active Firestore security rules.
* `validate_rules`: Checks the provided Firestore Rules source for syntax
and validation errors.
## Looker
* `--prebuilt` value: `looker`
* **Environment Variables:**
* `LOOKER_BASE_URL`: The URL of your Looker instance.
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
* `LOOKER_USE_CLIENT_OAUTH`: Whether to use OAuth for authentication.
* `LOOKER_SHOW_HIDDEN_MODELS`: Whether to show hidden models.
* `LOOKER_SHOW_HIDDEN_EXPLORES`: Whether to show hidden explores.
* `LOOKER_SHOW_HIDDEN_FIELDS`: Whether to show hidden fields.
* **Permissions:**
* A Looker account with permissions to access the desired models,
explores, and data is required.
* **Tools:**
* `get_models`: Retrieves the list of LookML models.
* `get_explores`: Retrieves the list of explores in a model.
* `get_dimensions`: Retrieves the list of dimensions in an explore.
* `get_measures`: Retrieves the list of measures in an explore.
* `get_filters`: Retrieves the list of filters in an explore.
* `get_parameters`: Retrieves the list of parameters in an explore.
* `query`: Runs a query against the LookML model.
* `query_sql`: Generates the SQL for a query.
* `query_url`: Generates a URL for a query in Looker.
* `get_looks`: Searches for saved looks.
* `run_look`: Runs the query associated with a look.
* `make_look`: Creates a new look.
* `get_dashboards`: Searches for saved dashboards.
* `make_dashboard`: Creates a new dashboard.
* `add_dashboard_element`: Adds a tile to a dashboard.
* `health_pulse`: Test the health of a Looker instance.
* `health_analyze`: Analyze the LookML usage of a Looker instance.
* `health_vacuum`: Suggest LookML elements that can be removed.
## Looker Conversational Analytics
* `--prebuilt` value: `looker-conversational-analytics`
* **Environment Variables:**
* `LOOKER_BASE_URL`: The URL of your Looker instance.
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
* `LOOKER_USE_CLIENT_OAUTH`: Whether to use OAuth for authentication.
* `LOOKER_PROJECT`: The GCP Project to use for Conversational Analytics.
* `LOOKER_LOCATION`: The GCP Location to use for Conversational Analytics.
* **Permissions:**
* A Looker account with permissions to access the desired models,
explores, and data is required.
* **Looker Instance User** (`roles/looker.instanceUser`): IAM role to
access Looker.
* **Gemini for Google Cloud User** (`roles/cloudaicompanion.user`): IAM
role to access Conversational Analytics.
* **Gemini Data Analytics Stateless Chat User (Beta)**
(`roles/geminidataanalytics.dataAgentStatelessUser`): IAM role to
access Conversational Analytics.
* **Tools:**
* `ask_data_insights`: Ask a question of the data.
* `get_models`: Retrieves the list of LookML models.
* `get_explores`: Retrieves the list of explores in a model.
## Microsoft SQL Server
* `--prebuilt` value: `mssql`
* **Environment Variables:**
* `MSSQL_HOST`: The hostname or IP address of the SQL Server instance.
* `MSSQL_PORT`: The port number for the SQL Server instance.
* `MSSQL_DATABASE`: The name of the database to connect to.
* `MSSQL_USER`: The database username.
* `MSSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## MySQL
* `--prebuilt` value: `mysql`
* **Environment Variables:**
* `MYSQL_HOST`: The hostname or IP address of the MySQL server.
* `MYSQL_PORT`: The port number for the MySQL server.
* `MYSQL_DATABASE`: The name of the database to connect to.
* `MYSQL_USER`: The database username.
* `MYSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `get_query_plan`: Provides information about how MySQL executes a SQL
statement.
* `list_active_queries`: Lists ongoing queries.
* `list_tables_missing_unique_indexes`: Looks for tables that do not have
primary or unique key contraint.
* `list_table_fragmentation`: Displays table fragmentation in MySQL.
## OceanBase
* `--prebuilt` value: `oceanbase`
* **Environment Variables:**
* `OCEANBASE_HOST`: The hostname or IP address of the OceanBase server.
* `OCEANBASE_PORT`: The port number for the OceanBase server.
* `OCEANBASE_DATABASE`: The name of the database to connect to.
* `OCEANBASE_USER`: The database username.
* `OCEANBASE_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## PostgreSQL
* `--prebuilt` value: `postgres`
* **Environment Variables:**
* `POSTGRES_HOST`: The hostname or IP address of the PostgreSQL server.
* `POSTGRES_PORT`: The port number for the PostgreSQL server.
* `POSTGRES_DATABASE`: The name of the database to connect to.
* `POSTGRES_USER`: The database username.
* `POSTGRES_PASSWORD`: The password for the database user.
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db
connection string.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
* `get_query_plan`: Generate the execution plan of a statement.
## Google Cloud Serverless for Apache Spark
* `--prebuilt` value: `serverless-spark`
* **Environment Variables:**
* `SERVERLESS_SPARK_PROJECT`: The GCP project ID
* `SERVERLESS_SPARK_LOCATION`: The GCP Location.
* **Permissions:**
* **Dataproc Serverless Viewer** (`roles/dataproc.serverlessViewer`) to
view serverless batches.
* **Dataproc Serverless Editor** (`roles/dataproc.serverlessEditor`) to
view serverless batches.
* **Tools:**
* `list_batches`: Lists Spark batches.
## Spanner (GoogleSQL dialect)
* `--prebuilt` value: `spanner`
* **Environment Variables:**
* `SPANNER_PROJECT`: The GCP project ID.
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to
execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to
execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query.
* `execute_sql_dql`: Executes a DQL SQL query.
* `list_tables`: Lists tables in the database.
## Spanner (PostgreSQL dialect)
* `--prebuilt` value: `spanner-postgres`
* **Environment Variables:**
* `SPANNER_PROJECT`: The GCP project ID.
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to
execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to
execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface
for Spanner.
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL
interface for Spanner.
* `list_tables`: Lists tables in the database.
## SQLite
* `--prebuilt` value: `sqlite`
* **Environment Variables:**
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g.,
`./sample.db`).
* **Permissions:**
* File system read/write permissions for the specified database file.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Neo4j
* `--prebuilt` value: `neo4j`
* **Environment Variables:**
* `NEO4J_URI`: The URI of the Neo4j instance (e.g.,
`bolt://localhost:7687`).
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
* `NEO4J_USERNAME`: The username for the Neo4j instance.
* `NEO4J_PASSWORD`: The password for the Neo4j instance.
* **Permissions:**
* **Database-level permissions** are required to execute Cypher queries.
* **Tools:**
* `execute_cypher`: Executes a Cypher query.
* `get_schema`: Retrieves the schema of the Neo4j database.
```
--------------------------------------------------------------------------------
/docs/en/samples/bigquery/local_quickstart.md:
--------------------------------------------------------------------------------
```markdown
---
title: "Quickstart (Local with BigQuery)"
type: docs
weight: 1
description: >
How to get started running Toolbox locally with Python, BigQuery, and
LangGraph, LlamaIndex, or ADK.
---
[](https://colab.research.google.com/github/googleapis/genai-toolbox/blob/main/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb)
## Before you begin
This guide assumes you have already done the following:
1. Installed [Python 3.9+][install-python] (including [pip][install-pip] and
your preferred virtual environment tool for managing dependencies e.g.
[venv][install-venv]).
1. Installed and configured the [Google Cloud SDK (gcloud CLI)][install-gcloud].
1. Authenticated with Google Cloud for Application Default Credentials (ADC):
```bash
gcloud auth login --update-adc
```
1. Set your default Google Cloud project (replace `YOUR_PROJECT_ID` with your
actual project ID):
```bash
gcloud config set project YOUR_PROJECT_ID
export GOOGLE_CLOUD_PROJECT=YOUR_PROJECT_ID
```
Toolbox and the client libraries will use this project for BigQuery, unless
overridden in configurations.
1. [Enabled the BigQuery API][enable-bq-api] in your Google Cloud project.
1. Installed the BigQuery client library for Python:
```bash
pip install google-cloud-bigquery
```
1. Completed setup for usage with an LLM model such as
{{< tabpane text=true persist=header >}}
{{% tab header="Core" lang="en" %}}
- [langchain-vertexai](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/#setup)
package.
- [langchain-google-genai](https://python.langchain.com/docs/integrations/chat/google_generative_ai/#setup)
package.
- [langchain-anthropic](https://python.langchain.com/docs/integrations/chat/anthropic/#setup)
package.
{{% /tab %}}
{{% tab header="LangChain" lang="en" %}}
- [langchain-vertexai](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/#setup)
package.
- [langchain-google-genai](https://python.langchain.com/docs/integrations/chat/google_generative_ai/#setup)
package.
- [langchain-anthropic](https://python.langchain.com/docs/integrations/chat/anthropic/#setup)
package.
{{% /tab %}}
{{% tab header="LlamaIndex" lang="en" %}}
- [llama-index-llms-google-genai](https://pypi.org/project/llama-index-llms-google-genai/)
package.
- [llama-index-llms-anthropic](https://docs.llamaindex.ai/en/stable/examples/llm/anthropic)
package.
{{% /tab %}}
{{% tab header="ADK" lang="en" %}}
- [google-adk](https://pypi.org/project/google-adk/) package.
{{% /tab %}}
{{< /tabpane >}}
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]:
https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-gcloud]: https://cloud.google.com/sdk/docs/install
[enable-bq-api]:
https://cloud.google.com/bigquery/docs/quickstarts/query-public-dataset-console#before-you-begin
## Step 1: Set up your BigQuery Dataset and Table
In this section, we will create a BigQuery dataset and a table, then insert some
data that needs to be accessed by our agent. BigQuery operations are performed
against your configured Google Cloud project.
1. Create a new BigQuery dataset (replace `YOUR_DATASET_NAME` with your desired
dataset name, e.g., `toolbox_ds`, and optionally specify a location like `US`
or `EU`):
```bash
export BQ_DATASET_NAME="YOUR_DATASET_NAME" # e.g., toolbox_ds
export BQ_LOCATION="US" # e.g., US, EU, asia-northeast1
bq --location=$BQ_LOCATION mk $BQ_DATASET_NAME
```
You can also do this through the [Google Cloud
Console](https://console.cloud.google.com/bigquery).
{{< notice tip >}}
For a real application, ensure that the service account or user running Toolbox
has the necessary IAM permissions (e.g., BigQuery Data Editor, BigQuery User)
on the dataset or project. For this local quickstart with user credentials,
your own permissions will apply.
{{< /notice >}}
1. The hotels table needs to be defined in your new dataset for use with the bq
query command. First, create a file named `create_hotels_table.sql` with the
following content:
```sql
CREATE TABLE IF NOT EXISTS `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (
id INT64 NOT NULL,
name STRING NOT NULL,
location STRING NOT NULL,
price_tier STRING NOT NULL,
checkin_date DATE NOT NULL,
checkout_date DATE NOT NULL,
booked BOOLEAN NOT NULL
);
```
> **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
> with your actual project ID and dataset name.
Then run the command below to execute the sql query:
```bash
bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < create_hotels_table.sql
```
1. Next, populate the hotels table with some initial data. To do this, create a
file named `insert_hotels_data.sql` and add the following SQL INSERT
statement to it.
```sql
INSERT INTO `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (id, name, location, price_tier, checkin_date, checkout_date, booked)
VALUES
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-20', '2024-04-22', FALSE),
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', FALSE),
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', FALSE),
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-05', '2024-04-24', FALSE),
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-01', '2024-04-23', FALSE),
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', FALSE),
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-02', '2024-04-27', FALSE),
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-09', '2024-04-24', FALSE),
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', FALSE),
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', FALSE);
```
> **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
> with your actual project ID and dataset name.
Then run the command below to execute the sql query:
```bash
bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < insert_hotels_data.sql
```
## Step 2: Install and configure Toolbox
In this section, we will download Toolbox, configure our tools in a `tools.yaml`
to use BigQuery, and then run the Toolbox server.
1. Download the latest version of Toolbox as a binary:
{{< notice tip >}}
Select the
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
corresponding to your OS and CPU architecture.
{{< /notice >}}
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/$OS/toolbox
```
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Write the following into a `tools.yaml` file. You must replace the
`YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` placeholder in the config with your
actual BigQuery project and dataset name. The `location` field is optional;
if not specified, it defaults to 'us'. The table name `hotels` is used
directly in the statements.
{{< notice tip >}}
Authentication with BigQuery is handled via Application Default Credentials
(ADC). Ensure you have run `gcloud auth application-default login`.
{{< /notice >}}
```yaml
sources:
my-bigquery-source:
kind: bigquery
project: YOUR_PROJECT_ID
location: us
tools:
search-hotels-by-name:
kind: bigquery-sql
source: my-bigquery-source
description: Search for hotels based on name.
parameters:
- name: name
type: string
description: The name of the hotel.
statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%'));
search-hotels-by-location:
kind: bigquery-sql
source: my-bigquery-source
description: Search for hotels based on location.
parameters:
- name: location
type: string
description: The location of the hotel.
statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%'));
book-hotel:
kind: bigquery-sql
source: my-bigquery-source
description: >-
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
parameters:
- name: hotel_id
type: integer
description: The ID of the hotel to book.
statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id;
update-hotel:
kind: bigquery-sql
source: my-bigquery-source
description: >-
Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not.
parameters:
- name: checkin_date
type: string
description: The new check-in date of the hotel.
- name: checkout_date
type: string
description: The new check-out date of the hotel.
- name: hotel_id
type: integer
description: The ID of the hotel to update.
statement: >-
UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id;
cancel-hotel:
kind: bigquery-sql
source: my-bigquery-source
description: Cancel a hotel by its ID.
parameters:
- name: hotel_id
type: integer
description: The ID of the hotel to cancel.
statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id;
```
**Important Note on `toolsets`**: The `tools.yaml` content above does not
include a `toolsets` section. The Python agent examples in Step 3 (e.g.,
`await toolbox_client.load_toolset("my-toolset")`) rely on a toolset named
`my-toolset`. To make those examples work, you will need to add a `toolsets`
section to your `tools.yaml` file, for example:
```yaml
# Add this to your tools.yaml if using load_toolset("my-toolset")
# Ensure it's at the same indentation level as 'sources:' and 'tools:'
toolsets:
my-toolset:
- search-hotels-by-name
- search-hotels-by-location
- book-hotel
- update-hotel
- cancel-hotel
```
Alternatively, you can modify the agent code to load tools individually
(e.g., using `await toolbox_client.load_tool("search-hotels-by-name")`).
For more info on tools, check out the [Resources](../../resources/) section
of the docs.
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
```bash
./toolbox --tools-file "tools.yaml"
```
{{< notice note >}}
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
{{< /notice >}}
## Step 3: Connect your agent to Toolbox
In this section, we will write and run an agent that will load the Tools
from Toolbox.
{{< notice tip>}} If you prefer to experiment within a Google Colab environment,
you can connect to a
[local runtime](https://research.google.com/colaboratory/local-runtimes.html).
{{< /notice >}}
1. In a new terminal, install the SDK package.
{{< tabpane persist=header >}}
{{< tab header="Core" lang="bash" >}}
pip install toolbox-core
{{< /tab >}}
{{< tab header="Langchain" lang="bash" >}}
pip install toolbox-langchain
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
pip install toolbox-llamaindex
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
pip install google-adk
{{< /tab >}}
{{< /tabpane >}}
1. Install other required dependencies:
{{< tabpane persist=header >}}
{{< tab header="Core" lang="bash" >}}
# TODO(developer): replace with correct package if needed
pip install langgraph langchain-google-vertexai
# pip install langchain-google-genai
# pip install langchain-anthropic
{{< /tab >}}
{{< tab header="Langchain" lang="bash" >}}
# TODO(developer): replace with correct package if needed
pip install langgraph langchain-google-vertexai
# pip install langchain-google-genai
# pip install langchain-anthropic
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
# TODO(developer): replace with correct package if needed
pip install llama-index-llms-google-genai
# pip install llama-index-llms-anthropic
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
pip install toolbox-core
{{< /tab >}}
{{< /tabpane >}}
1. Create a new file named `hotel_agent.py` and copy the following
code to create an agent:
{{< tabpane persist=header >}}
{{< tab header="Core" lang="python" >}}
import asyncio
from google import genai
from google.genai.types import (
Content,
FunctionDeclaration,
GenerateContentConfig,
Part,
Tool,
)
from toolbox_core import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel id while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in it's name.",
"Please book the hotel Hilton Basel for me.",
"This is too expensive. Please cancel it.",
"Please book Hyatt Regency for me",
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
]
async def run_application():
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
# The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
# integration. While this example uses Google's genai client, these callables can be adapted for
# various function-calling or agent frameworks. For easier integration with supported frameworks
# (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
# provided wrapper packages, which handle framework-specific boilerplate.
toolbox_tools = await toolbox_client.load_toolset("my-toolset")
genai_client = genai.Client(
vertexai=True, project="project-id", location="us-central1"
)
genai_tools = [
Tool(
function_declarations=[
FunctionDeclaration.from_callable_with_api_option(callable=tool)
]
)
for tool in toolbox_tools
]
history = []
for query in queries:
user_prompt_content = Content(
role="user",
parts=[Part.from_text(text=query)],
)
history.append(user_prompt_content)
response = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
system_instruction=prompt,
tools=genai_tools,
),
)
history.append(response.candidates[0].content)
function_response_parts = []
for function_call in response.function_calls:
fn_name = function_call.name
# The tools are sorted alphabetically
if fn_name == "search-hotels-by-name":
function_result = await toolbox_tools[3](**function_call.args)
elif fn_name == "search-hotels-by-location":
function_result = await toolbox_tools[2](**function_call.args)
elif fn_name == "book-hotel":
function_result = await toolbox_tools[0](**function_call.args)
elif fn_name == "update-hotel":
function_result = await toolbox_tools[4](**function_call.args)
elif fn_name == "cancel-hotel":
function_result = await toolbox_tools[1](**function_call.args)
else:
raise ValueError("Function name not present.")
function_response = {"result": function_result}
function_response_part = Part.from_function_response(
name=function_call.name,
response=function_response,
)
function_response_parts.append(function_response_part)
if function_response_parts:
tool_response_content = Content(role="tool", parts=function_response_parts)
history.append(tool_response_content)
response2 = genai_client.models.generate_content(
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
tools=genai_tools,
),
)
final_model_response_content = response2.candidates[0].content
history.append(final_model_response_content)
print(response2.text)
asyncio.run(run_application())
{{< /tab >}}
{{< tab header="LangChain" lang="python" >}}
import asyncio
from langgraph.prebuilt import create_react_agent
# TODO(developer): replace this with another import if needed
from langchain_google_vertexai import ChatVertexAI
# from langchain_google_genai import ChatGoogleGenerativeAI
# from langchain_anthropic import ChatAnthropic
from langgraph.checkpoint.memory import MemorySaver
from toolbox_langchain import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def main():
# TODO(developer): replace this with another model if needed
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Load the tools from the Toolbox server
client = ToolboxClient("http://127.0.0.1:5000")
tools = await client.aload_toolset()
agent = create_react_agent(model, tools, checkpointer=MemorySaver())
config = {"configurable": {"thread_id": "thread-1"}}
for query in queries:
inputs = {"messages": [("user", prompt + query)]}
response = await agent.ainvoke(inputs, stream_mode="values", config=config)
print(response["messages"][-1].content)
asyncio.run(main())
{{< /tab >}}
{{< tab header="LlamaIndex" lang="python" >}}
import asyncio
import os
from llama_index.core.agent.workflow import AgentWorkflow
from llama_index.core.workflow import Context
# TODO(developer): replace this with another import if needed
from llama_index.llms.google_genai import GoogleGenAI
# from llama_index.llms.anthropic import Anthropic
from toolbox_llamaindex import ToolboxClient
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
queries = [
"Find hotels in Basel with Basel in it's name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
async def main():
# TODO(developer): replace this with another model if needed
llm = GoogleGenAI(
model="gemini-2.0-flash-001",
vertexai_config={"location": "us-central1"},
)
# llm = GoogleGenAI(
# api_key=os.getenv("GOOGLE_API_KEY"),
# model="gemini-2.0-flash-001",
# )
# llm = Anthropic(
# model="claude-3-7-sonnet-latest",
# api_key=os.getenv("ANTHROPIC_API_KEY")
# )
# Load the tools from the Toolbox server
client = ToolboxClient("http://127.0.0.1:5000")
tools = await client.aload_toolset()
agent = AgentWorkflow.from_tools_or_functions(
tools,
llm=llm,
system_prompt=prompt,
)
ctx = Context(agent)
for query in queries:
response = await agent.arun(user_msg=query, ctx=ctx)
print(f"---- {query} ----")
print(str(response))
asyncio.run(main())
{{< /tab >}}
{{< tab header="ADK" lang="python" >}}
from google.adk.agents import Agent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
from google.genai import types # For constructing message content
from toolbox_core import ToolboxSyncClient
import os
os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
# TODO(developer): Replace 'YOUR_PROJECT_ID' with your Google Cloud Project ID
os.environ['GOOGLE_CLOUD_PROJECT'] = 'YOUR_PROJECT_ID'
# TODO(developer): Replace 'us-central1' with your Google Cloud Location (region)
os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1'
# --- Load Tools from Toolbox ---
# TODO(developer): Ensure the Toolbox server is running at <http://127.0.0.1:5000>
with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
# TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server.
agent_toolset = toolbox_client.load_toolset("my-toolset")
# --- Define the Agent's Prompt ---
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
# --- Configure the Agent ---
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant that can search and book hotels.',
instruction=prompt,
tools=agent_toolset, # Pass the loaded toolset
)
# --- Initialize Services for Running the Agent ---
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
# Create a new session for the interaction.
session = session_service.create_session(
state={}, app_name='hotel_agent', user_id='123'
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
# --- Define Queries and Run the Agent ---
queries = [
"Find hotels in Basel with Basel in it's name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
for query in queries:
content = types.Content(role='user', parts=[types.Part(text=query)])
events = runner.run(session_id=session.id,
user_id='123', new_message=content)
responses = (
part.text
for event in events
for part in event.content.parts
if part.text is not None
)
for text in responses:
print(text)
{{< /tab >}}
{{< /tabpane >}}
{{< tabpane text=true persist=header >}}
{{% tab header="Core" lang="en" %}}
To learn more about the Core SDK, check out the [Toolbox Core SDK
documentation.](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md)
{{% /tab %}}
{{% tab header="Langchain" lang="en" %}}
To learn more about Agents in LangChain, check out the [LangGraph Agent
documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
{{% /tab %}}
{{% tab header="LlamaIndex" lang="en" %}}
To learn more about Agents in LlamaIndex, check out the [LlamaIndex
AgentWorkflow
documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
{{% /tab %}}
{{% tab header="ADK" lang="en" %}}
To learn more about Agents in ADK, check out the [ADK
documentation.](https://google.github.io/adk-docs/)
{{% /tab %}}
{{< /tabpane >}}
1. Run your agent, and observe the results:
```sh
python hotel_agent.py
```
```
--------------------------------------------------------------------------------
/internal/server/mcp_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package server
import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"os"
"reflect"
"strings"
"testing"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
"github.com/googleapis/genai-toolbox/internal/telemetry"
"github.com/googleapis/genai-toolbox/internal/tools"
)
const jsonrpcVersion = "2.0"
const protocolVersion20241105 = "2024-11-05"
const protocolVersion20250326 = "2025-03-26"
const protocolVersion20250618 = "2025-06-18"
const serverName = "Toolbox"
var basicInputSchema = map[string]any{
"type": "object",
"properties": map[string]any{},
"required": []any{},
}
var tool2InputSchema = map[string]any{
"type": "object",
"properties": map[string]any{
"param1": map[string]any{"type": "integer", "description": "This is the first parameter."},
"param2": map[string]any{"type": "integer", "description": "This is the second parameter."},
},
"required": []any{"param1", "param2"},
}
var tool3InputSchema = map[string]any{
"type": "object",
"properties": map[string]any{
"my_array": map[string]any{
"type": "array",
"description": "this param is an array of strings",
"items": map[string]any{"type": "string", "description": "string item"},
},
},
"required": []any{"my_array"},
}
func TestMcpEndpointWithoutInitialized(t *testing.T) {
mockTools := []MockTool{tool1, tool2, tool3, tool4, tool5}
toolsMap, toolsets := setUpResources(t, mockTools)
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
testCases := []struct {
name string
url string
isErr bool
body jsonrpc.JSONRPCRequest
want map[string]any
}{
{
name: "ping",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "ping-test-123",
Request: jsonrpc.Request{
Method: "ping",
},
},
isErr: false,
want: map[string]any{
"jsonrpc": "2.0",
"id": "ping-test-123",
"result": map[string]any{},
},
},
{
name: "tools/list",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-list",
Request: jsonrpc.Request{
Method: "tools/list",
},
},
isErr: false,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-list",
"result": map[string]any{
"tools": []any{
map[string]any{
"name": "no_params",
"inputSchema": basicInputSchema,
},
map[string]any{
"name": "some_params",
"inputSchema": tool2InputSchema,
},
map[string]any{
"name": "array_param",
"description": "some description",
"inputSchema": tool3InputSchema,
},
map[string]any{
"name": "unauthorized_tool",
"inputSchema": basicInputSchema,
},
map[string]any{
"name": "require_client_auth_tool",
"inputSchema": basicInputSchema,
},
},
},
},
},
{
name: "missing method",
url: "/",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "missing-method",
Request: jsonrpc.Request{},
},
want: map[string]any{
"jsonrpc": "2.0",
"id": "missing-method",
"error": map[string]any{
"code": -32601.0,
"message": "method not found",
},
},
},
{
name: "invalid jsonrpc version",
url: "/",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: "1.0",
Id: "invalid-jsonrpc-version",
Request: jsonrpc.Request{
Method: "foo",
},
},
want: map[string]any{
"jsonrpc": "2.0",
"id": "invalid-jsonrpc-version",
"error": map[string]any{
"code": -32600.0,
"message": "invalid json-rpc version",
},
},
},
{
name: "call tool1 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool1",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "no_params",
},
},
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool1",
"result": map[string]any{
"content": []any{
map[string]any{
"type": "text",
"text": `"no_params"`,
},
},
},
},
},
{
name: "call tool4 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool4",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "unauthorized_tool",
},
},
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool4",
"error": map[string]any{
"code": -32600.0,
"message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized",
},
},
},
{
name: "call tool5 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool5",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "require_client_auth_tool",
},
},
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool5",
"error": map[string]any{
"code": -32600.0,
"message": "missing access token in the 'Authorization' header",
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
reqMarshal, err := json.Marshal(tc.body)
if err != nil {
t.Fatalf("unexpected error during marshaling of body")
}
resp, body, err := runRequest(ts, http.MethodPost, tc.url, bytes.NewBuffer(reqMarshal), nil)
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
// Notifications don't expect a response.
if tc.want != nil {
if contentType := resp.Header.Get("Content-type"); contentType != "application/json" {
t.Fatalf("unexpected content-type header: want %s, got %s", "application/json", contentType)
}
var got map[string]any
if err := json.Unmarshal(body, &got); err != nil {
t.Fatalf("unexpected error unmarshalling body: %s", err)
}
if !reflect.DeepEqual(got, tc.want) {
t.Fatalf("unexpected response: got %+v, want %+v", got, tc.want)
}
}
})
}
}
func runInitializeLifecycle(t *testing.T, ts *httptest.Server, protocolVersion string, initializeWant map[string]any, idHeader bool) string {
initializeRequestBody := map[string]any{
"jsonrpc": jsonrpcVersion,
"id": "mcp-initialize",
"method": "initialize",
"params": map[string]any{
"protocolVersion": protocolVersion,
},
}
reqMarshal, err := json.Marshal(initializeRequestBody)
if err != nil {
t.Fatalf("unexpected error during marshaling of body")
}
resp, body, err := runRequest(ts, http.MethodPost, "/", bytes.NewBuffer(reqMarshal), nil)
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
if contentType := resp.Header.Get("Content-type"); contentType != "application/json" {
t.Fatalf("unexpected content-type header: want %s, got %s", "application/json", contentType)
}
sessionId := resp.Header.Get("Mcp-Session-Id")
if idHeader && sessionId == "" {
t.Fatalf("Mcp-Session-Id header is expected")
}
var got map[string]any
if err := json.Unmarshal(body, &got); err != nil {
t.Fatalf("unexpected error unmarshalling body: %s", err)
}
if !reflect.DeepEqual(got, initializeWant) {
t.Fatalf("unexpected response: got %+v, want %+v", got, initializeWant)
}
header := map[string]string{}
if sessionId != "" {
header["Mcp-Session-Id"] = sessionId
}
initializeNotificationBody := map[string]any{
"jsonrpc": jsonrpcVersion,
"method": "notifications/initialized",
}
notiMarshal, err := json.Marshal(initializeNotificationBody)
if err != nil {
t.Fatalf("unexpected error during marshaling of notifications body")
}
_, _, err = runRequest(ts, http.MethodPost, "/", bytes.NewBuffer(notiMarshal), header)
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
return sessionId
}
func TestMcpEndpoint(t *testing.T) {
mockTools := []MockTool{tool1, tool2, tool3, tool4, tool5}
toolsMap, toolsets := setUpResources(t, mockTools)
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
versTestCases := []struct {
name string
protocol string
idHeader bool
initWant map[string]any
}{
{
name: "version 2024-11-05",
protocol: protocolVersion20241105,
idHeader: false,
initWant: map[string]any{
"jsonrpc": "2.0",
"id": "mcp-initialize",
"result": map[string]any{
"protocolVersion": "2024-11-05",
"capabilities": map[string]any{
"tools": map[string]any{"listChanged": false},
},
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
},
},
},
{
name: "version 2025-03-26",
protocol: protocolVersion20250326,
idHeader: true,
initWant: map[string]any{
"jsonrpc": "2.0",
"id": "mcp-initialize",
"result": map[string]any{
"protocolVersion": "2025-03-26",
"capabilities": map[string]any{
"tools": map[string]any{"listChanged": false},
},
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
},
},
},
{
name: "version 2025-06-18",
protocol: protocolVersion20250618,
idHeader: false,
initWant: map[string]any{
"jsonrpc": "2.0",
"id": "mcp-initialize",
"result": map[string]any{
"protocolVersion": "2025-06-18",
"capabilities": map[string]any{
"tools": map[string]any{"listChanged": false},
},
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
},
},
},
}
for _, vtc := range versTestCases {
t.Run(vtc.name, func(t *testing.T) {
sessionId := runInitializeLifecycle(t, ts, vtc.protocol, vtc.initWant, vtc.idHeader)
header := map[string]string{}
if sessionId != "" {
header["Mcp-Session-Id"] = sessionId
}
if vtc.protocol == protocolVersion20250618 {
header["MCP-Protocol-Version"] = vtc.protocol
}
testCases := []struct {
name string
url string
isErr bool
body any
wantStatusCode int
want map[string]any
}{
{
name: "basic notification",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Request: jsonrpc.Request{
Method: "notification",
},
},
wantStatusCode: http.StatusAccepted,
},
{
name: "ping",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "ping-test-123",
Request: jsonrpc.Request{
Method: "ping",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "ping-test-123",
"result": map[string]any{},
},
},
{
name: "tools/list",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-list",
Request: jsonrpc.Request{
Method: "tools/list",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-list",
"result": map[string]any{
"tools": []any{
map[string]any{
"name": "no_params",
"inputSchema": basicInputSchema,
},
map[string]any{
"name": "some_params",
"inputSchema": tool2InputSchema,
},
map[string]any{
"name": "array_param",
"description": "some description",
"inputSchema": tool3InputSchema,
},
map[string]any{
"name": "unauthorized_tool",
"inputSchema": basicInputSchema,
},
map[string]any{
"name": "require_client_auth_tool",
"inputSchema": basicInputSchema,
},
},
},
},
},
{
name: "tools/list on tool1_only",
url: "/tool1_only",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-list-tool1",
Request: jsonrpc.Request{
Method: "tools/list",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-list-tool1",
"result": map[string]any{
"tools": []any{
map[string]any{
"name": "no_params",
"inputSchema": basicInputSchema,
},
},
},
},
},
{
name: "tools/list on invalid tool set",
url: "/foo",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-list-invalid-toolset",
Request: jsonrpc.Request{
Method: "tools/list",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-list-invalid-toolset",
"error": map[string]any{
"code": -32600.0,
"message": "toolset does not exist",
},
},
},
{
name: "missing method",
url: "/",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "missing-method",
Request: jsonrpc.Request{},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "missing-method",
"error": map[string]any{
"code": -32601.0,
"message": "method not found",
},
},
},
{
name: "invalid method",
url: "/",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "invalid-method",
Request: jsonrpc.Request{
Method: "foo",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "invalid-method",
"error": map[string]any{
"code": -32601.0,
"message": "invalid method foo",
},
},
},
{
name: "invalid jsonrpc version",
url: "/",
isErr: true,
body: jsonrpc.JSONRPCRequest{
Jsonrpc: "1.0",
Id: "invalid-jsonrpc-version",
Request: jsonrpc.Request{
Method: "foo",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "invalid-jsonrpc-version",
"error": map[string]any{
"code": -32600.0,
"message": "invalid json-rpc version",
},
},
},
{
name: "batch requests",
url: "/",
isErr: true,
body: []any{
jsonrpc.JSONRPCRequest{
Jsonrpc: "1.0",
Id: "batch-requests1",
Request: jsonrpc.Request{
Method: "foo",
},
},
jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "batch-requests2",
Request: jsonrpc.Request{
Method: "tools/list",
},
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"error": map[string]any{
"code": -32600.0,
"message": "not supporting batch requests",
},
},
},
{
name: "call tool1 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool1",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "no_params",
},
},
wantStatusCode: http.StatusOK,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool1",
"result": map[string]any{
"content": []any{
map[string]any{
"type": "text",
"text": `"no_params"`,
},
},
},
},
},
{
name: "call tool4 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool4",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "unauthorized_tool",
},
},
wantStatusCode: http.StatusUnauthorized,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool4",
"error": map[string]any{
"code": -32600.0,
"message": "unauthorized Tool call: Please make sure your specify correct auth headers: unauthorized",
},
},
},
{
name: "call tool5 unauthorized tool",
url: "/",
body: jsonrpc.JSONRPCRequest{
Jsonrpc: jsonrpcVersion,
Id: "tools-call-tool5",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "require_client_auth_tool",
},
},
wantStatusCode: http.StatusUnauthorized,
want: map[string]any{
"jsonrpc": "2.0",
"id": "tools-call-tool5",
"error": map[string]any{
"code": -32600.0,
"message": "missing access token in the 'Authorization' header",
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
reqMarshal, err := json.Marshal(tc.body)
if err != nil {
t.Fatalf("unexpected error during marshaling of body")
}
if vtc.protocol != protocolVersion20241105 && len(header) == 0 {
t.Fatalf("header is missing")
}
resp, body, err := runRequest(ts, http.MethodPost, tc.url, bytes.NewBuffer(reqMarshal), header)
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
if resp.StatusCode != tc.wantStatusCode {
t.Errorf("StatusCode mismatch: got %d, want %d", resp.StatusCode, tc.wantStatusCode)
}
// Notifications don't expect a response.
if tc.want != nil {
if contentType := resp.Header.Get("Content-type"); contentType != "application/json" {
t.Fatalf("unexpected content-type header: want %s, got %s", "application/json", contentType)
}
var got map[string]any
if err := json.Unmarshal(body, &got); err != nil {
t.Fatalf("unexpected error unmarshalling body: %s", err)
}
// for decode failure, a random uuid is generated in server
if tc.want["id"] == nil {
tc.want["id"] = got["id"]
}
if !reflect.DeepEqual(got, tc.want) {
t.Fatalf("unexpected response: got %+v, want %+v", got, tc.want)
}
}
})
}
})
}
}
func TestInvalidProtocolVersionHeader(t *testing.T) {
toolsMap, toolsets := map[string]tools.Tool{}, map[string]tools.Toolset{}
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
header := map[string]string{}
header["MCP-Protocol-Version"] = "foo"
resp, body, err := runRequest(ts, http.MethodPost, "/", nil, header)
if resp.Status != "400 Bad Request" {
t.Fatalf("unexpected status: %s", resp.Status)
}
var got map[string]any
if err := json.Unmarshal(body, &got); err != nil {
t.Fatalf("unexpected error unmarshalling body: %s", err)
}
want := "invalid protocol version: foo"
if got["error"] != want {
t.Fatalf("unexpected error message: got %s, want %s", got["error"], want)
}
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
}
func TestDeleteEndpoint(t *testing.T) {
toolsMap, toolsets := map[string]tools.Tool{}, map[string]tools.Toolset{}
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
resp, _, err := runRequest(ts, http.MethodDelete, "/", nil, nil)
if resp.Status != "200 OK" {
t.Fatalf("unexpected status: %s", resp.Status)
}
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
}
func TestGetEndpoint(t *testing.T) {
toolsMap, toolsets := map[string]tools.Tool{}, map[string]tools.Toolset{}
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
resp, body, err := runRequest(ts, http.MethodGet, "/", nil, nil)
if resp.Status != "405 Method Not Allowed" {
t.Fatalf("unexpected status: %s", resp.Status)
}
var got map[string]any
if err := json.Unmarshal(body, &got); err != nil {
t.Fatalf("unexpected error unmarshalling body: %s", err)
}
want := "toolbox does not support streaming in streamable HTTP transport"
if got["error"] != want {
t.Fatalf("unexpected error message: %s", got["error"])
}
if err != nil {
t.Fatalf("unexpected error during request: %s", err)
}
}
func TestSseEndpoint(t *testing.T) {
r, shutdown := setUpServer(t, "mcp", nil, nil)
defer shutdown()
ts := runServer(r, false)
defer ts.Close()
if !strings.Contains(ts.URL, "http://127.0.0.1") {
t.Fatalf("unexpected url, got %s", ts.URL)
}
tsPort := strings.TrimPrefix(ts.URL, "http://127.0.0.1:")
tls := runServer(r, true)
defer tls.Close()
if !strings.Contains(tls.URL, "https://127.0.0.1") {
t.Fatalf("unexpected url, got %s", tls.URL)
}
tlsPort := strings.TrimPrefix(tls.URL, "https://127.0.0.1:")
contentType := "text/event-stream"
cacheControl := "no-cache"
connection := "keep-alive"
accessControlAllowOrigin := "*"
testCases := []struct {
name string
server *httptest.Server
path string
proto string
event string
}{
{
name: "basic",
server: ts,
path: "/sse",
event: fmt.Sprintf("event: endpoint\ndata: %s/mcp?sessionId=", ts.URL),
},
{
name: "toolset1",
server: ts,
path: "/tool1_only/sse",
event: fmt.Sprintf("event: endpoint\ndata: http://127.0.0.1:%s/mcp/tool1_only?sessionId=", tsPort),
},
{
name: "basic with http proto",
server: ts,
path: "/sse",
proto: "http",
event: fmt.Sprintf("event: endpoint\ndata: http://127.0.0.1:%s/mcp?sessionId=", tsPort),
},
{
name: "basic tls with https proto",
server: ts,
path: "/sse",
proto: "https",
event: fmt.Sprintf("event: endpoint\ndata: https://127.0.0.1:%s/mcp?sessionId=", tsPort),
},
{
name: "basic tls",
server: tls,
path: "/sse",
event: fmt.Sprintf("event: endpoint\ndata: https://127.0.0.1:%s/mcp?sessionId=", tlsPort),
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
resp, err := runSseRequest(tc.server, tc.path, tc.proto)
if err != nil {
t.Fatalf("unable to run sse request: %s", err)
}
defer resp.Body.Close()
if gotContentType := resp.Header.Get("Content-type"); gotContentType != contentType {
t.Fatalf("unexpected content-type header: want %s, got %s", contentType, gotContentType)
}
if gotCacheControl := resp.Header.Get("Cache-Control"); gotCacheControl != cacheControl {
t.Fatalf("unexpected cache-control header: want %s, got %s", cacheControl, gotCacheControl)
}
if gotConnection := resp.Header.Get("Connection"); gotConnection != connection {
t.Fatalf("unexpected content-type header: want %s, got %s", connection, gotConnection)
}
if gotAccessControlAllowOrigin := resp.Header.Get("Access-Control-Allow-Origin"); gotAccessControlAllowOrigin != accessControlAllowOrigin {
t.Fatalf("unexpected cache-control header: want %s, got %s", accessControlAllowOrigin, gotAccessControlAllowOrigin)
}
buffer := make([]byte, 1024)
n, err := resp.Body.Read(buffer)
if err != nil {
t.Fatalf("unable to read response: %s", err)
}
endpointEvent := string(buffer[:n])
if !strings.Contains(endpointEvent, tc.event) {
t.Fatalf("unexpected event: got %s, want to contain %s", endpointEvent, tc.event)
}
})
}
}
func runSseRequest(ts *httptest.Server, path string, proto string) (*http.Response, error) {
req, err := http.NewRequest(http.MethodGet, ts.URL+path, nil)
if err != nil {
return nil, fmt.Errorf("unable to create request: %w", err)
}
if proto != "" {
req.Header.Set("X-Forwarded-Proto", proto)
}
resp, err := ts.Client().Do(req)
if err != nil {
return nil, fmt.Errorf("unable to send request: %w", err)
}
return resp, nil
}
func TestStdioSession(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
mockTools := []MockTool{tool1, tool2, tool3}
toolsMap, toolsets := setUpResources(t, mockTools)
pr, pw, err := os.Pipe()
if err != nil {
t.Fatalf("error with Pipe: %s", err)
}
testLogger, err := log.NewStdLogger(pw, os.Stderr, "warn")
if err != nil {
t.Fatalf("unable to initialize logger: %s", err)
}
otelShutdown, err := telemetry.SetupOTel(ctx, fakeVersionString, "", false, "toolbox")
if err != nil {
t.Fatalf("unable to setup otel: %s", err)
}
defer func() {
err := otelShutdown(ctx)
if err != nil {
t.Fatalf("error shutting down OpenTelemetry: %s", err)
}
}()
instrumentation, err := telemetry.CreateTelemetryInstrumentation(fakeVersionString)
if err != nil {
t.Fatalf("unable to create custom metrics: %s", err)
}
sseManager := newSseManager(ctx)
resourceManager := NewResourceManager(nil, nil, toolsMap, toolsets)
server := &Server{
version: fakeVersionString,
logger: testLogger,
instrumentation: instrumentation,
sseManager: sseManager,
ResourceMgr: resourceManager,
}
in := bufio.NewReader(pr)
stdioSession := NewStdioSession(server, in, pw)
// test stdioSession.readLine()
input := "test readLine function\n"
_, err = fmt.Fprintf(pw, "%s", input)
if err != nil {
t.Fatalf("error writing into pipe w: %s", err)
}
line, err := stdioSession.readLine(ctx)
if err != nil {
t.Fatalf("error with stdioSession.readLine: %s", err)
}
if line != input {
t.Fatalf("unexpected line: got %s, want %s", line, input)
}
// test stdioSession.write()
write := "test write function"
err = stdioSession.write(ctx, write)
if err != nil {
t.Fatalf("error with stdioSession.write: %s", err)
}
read, err := in.ReadString('\n')
if err != nil {
t.Fatalf("error reading: %s", err)
}
want := fmt.Sprintf(`"%s"`, write) + "\n"
if read != want {
t.Fatalf("unexpected read: got %s, want %s", read, want)
}
}
```