This is page 28 of 35. Use http://codebase.md/googleapis/genai-toolbox?lines=false&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-tables.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ └── bigquery.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/tests/spanner/spanner_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package spanner
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"regexp"
"strings"
"testing"
"time"
"cloud.google.com/go/spanner"
database "cloud.google.com/go/spanner/admin/database/apiv1"
"cloud.google.com/go/spanner/admin/database/apiv1/databasepb"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/internal/tools"
"github.com/googleapis/genai-toolbox/tests"
)
var (
SpannerSourceKind = "spanner"
SpannerToolKind = "spanner-sql"
SpannerProject = os.Getenv("SPANNER_PROJECT")
SpannerDatabase = os.Getenv("SPANNER_DATABASE")
SpannerInstance = os.Getenv("SPANNER_INSTANCE")
)
func getSpannerVars(t *testing.T) map[string]any {
switch "" {
case SpannerProject:
t.Fatal("'SPANNER_PROJECT' not set")
case SpannerDatabase:
t.Fatal("'SPANNER_DATABASE' not set")
case SpannerInstance:
t.Fatal("'SPANNER_INSTANCE' not set")
}
return map[string]any{
"kind": SpannerSourceKind,
"project": SpannerProject,
"instance": SpannerInstance,
"database": SpannerDatabase,
}
}
func initSpannerClients(ctx context.Context, project, instance, dbname string) (*spanner.Client, *database.DatabaseAdminClient, error) {
// Configure the connection to the database
db := fmt.Sprintf("projects/%s/instances/%s/databases/%s", project, instance, dbname)
// Configure session pool to automatically clean inactive transactions
sessionPoolConfig := spanner.SessionPoolConfig{
TrackSessionHandles: true,
InactiveTransactionRemovalOptions: spanner.InactiveTransactionRemovalOptions{
ActionOnInactiveTransaction: spanner.WarnAndClose,
},
}
// Create Spanner client (for queries)
dataClient, err := spanner.NewClientWithConfig(context.Background(), db, spanner.ClientConfig{SessionPoolConfig: sessionPoolConfig})
if err != nil {
return nil, nil, fmt.Errorf("unable to create new Spanner client: %w", err)
}
// Create Spanner admin client (for creating databases)
adminClient, err := database.NewDatabaseAdminClient(ctx)
if err != nil {
return nil, nil, fmt.Errorf("unable to create new Spanner admin client: %w", err)
}
return dataClient, adminClient, nil
}
func TestSpannerToolEndpoints(t *testing.T) {
sourceConfig := getSpannerVars(t)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
defer cancel()
var args []string
// Create Spanner client
dataClient, adminClient, err := initSpannerClients(ctx, SpannerProject, SpannerInstance, SpannerDatabase)
if err != nil {
t.Fatalf("unable to create Spanner client: %s", err)
}
// create table name with UUID
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
// set up data for param tool
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getSpannerParamToolInfo(tableNameParam)
dbString := fmt.Sprintf(
"projects/%s/instances/%s/databases/%s",
SpannerProject,
SpannerInstance,
SpannerDatabase,
)
teardownTable1 := setupSpannerTable(t, ctx, adminClient, dataClient, createParamTableStmt, insertParamTableStmt, tableNameParam, dbString, paramTestParams)
defer teardownTable1(t)
// set up data for auth tool
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getSpannerAuthToolInfo(tableNameAuth)
teardownTable2 := setupSpannerTable(t, ctx, adminClient, dataClient, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, dbString, authTestParams)
defer teardownTable2(t)
// set up data for template param tool
createStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX), age INT64) PRIMARY KEY (id)", tableNameTemplateParam)
teardownTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createStatementTmpl, "", tableNameTemplateParam, dbString, nil)
defer teardownTableTmpl(t)
// Write config into a file and pass it to command
toolsFile := tests.GetToolsConfig(sourceConfig, SpannerToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
toolsFile = addSpannerExecuteSqlConfig(t, toolsFile)
toolsFile = addSpannerReadOnlyConfig(t, toolsFile)
toolsFile = addTemplateParamConfig(t, toolsFile)
toolsFile = addSpannerListTablesConfig(t, toolsFile)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
// Get configs for tests
select1Want := "[{\"\":\"1\"}]"
invokeParamWant := "[{\"id\":\"1\",\"name\":\"Alice\"},{\"id\":\"3\",\"name\":\"Sid\"}]"
accessSchemaWant := "[{\"schema_name\":\"INFORMATION_SCHEMA\"}]"
toolInvokeMyToolById4Want := `[{"id":"4","name":null}]`
mcpMyFailToolWant := `"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute client: unable to parse row: spanner: code = \"InvalidArgument\", desc = \"Syntax error: Unexpected identifier \\\\\\\"SELEC\\\\\\\" [at 1:1]\\\\nSELEC 1;\\\\n^\"`
mcpMyToolId3NameAliceWant := `{"jsonrpc":"2.0","id":"my-tool","result":{"content":[{"type":"text","text":"{\"id\":\"1\",\"name\":\"Alice\"}"},{"type":"text","text":"{\"id\":\"3\",\"name\":\"Sid\"}"}]}}`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"\":\"1\"}"}]}}`
tmplSelectAllWwant := "[{\"age\":\"21\",\"id\":\"1\",\"name\":\"Alex\"},{\"age\":\"100\",\"id\":\"2\",\"name\":\"Alice\"}]"
tmplSelectId1Want := "[{\"age\":\"21\",\"id\":\"1\",\"name\":\"Alex\"}]"
// Run tests
tests.RunToolGetTest(t)
tests.RunToolInvokeTest(t, select1Want,
tests.WithMyToolId3NameAliceWant(invokeParamWant),
tests.WithMyArrayToolWant(invokeParamWant),
tests.WithMyToolById4Want(toolInvokeMyToolById4Want),
)
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want, tests.WithMcpMyToolId3NameAliceWant(mcpMyToolId3NameAliceWant))
tests.RunToolInvokeWithTemplateParameters(
t, tableNameTemplateParam,
tests.WithSelectAllWant(tmplSelectAllWwant),
tests.WithTmplSelectId1Want(tmplSelectId1Want),
tests.DisableDdlTest(),
)
runSpannerSchemaToolInvokeTest(t, accessSchemaWant)
runSpannerExecuteSqlToolInvokeTest(t, select1Want, invokeParamWant, tableNameParam, tableNameAuth)
runSpannerListTablesTest(t, tableNameParam, tableNameAuth, tableNameTemplateParam)
}
// getSpannerToolInfo returns statements and param for my-tool for spanner-sql kind
func getSpannerParamToolInfo(tableName string) (string, string, string, string, string, string, map[string]any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX)) PRIMARY KEY (id)", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (id, name) VALUES (1, @name1), (2, @name2), (3, @name3), (4, @name4)", tableName)
toolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = @id OR name = @name", tableName)
idToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = @id", tableName)
nameToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE name = @name", tableName)
arrayToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id IN UNNEST(@idArray) AND name IN UNNEST(@nameArray)", tableName)
params := map[string]any{"name1": "Alice", "name2": "Jane", "name3": "Sid", "name4": nil}
return createStatement, insertStatement, toolStatement, idToolStatement, nameToolStatement, arrayToolStatement, params
}
// getSpannerAuthToolInfo returns statements and param of my-auth-tool for spanner-sql kind
func getSpannerAuthToolInfo(tableName string) (string, string, string, map[string]any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX), email STRING(MAX)) PRIMARY KEY (id)", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (id, name, email) VALUES (1, @name1, @email1), (2, @name2, @email2)", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = @email", tableName)
params := map[string]any{
"name1": "Alice",
"email1": tests.ServiceAccountEmail,
"name2": "Jane",
"email2": "[email protected]",
}
return createStatement, insertStatement, toolStatement, params
}
// setupSpannerTable creates and inserts data into a table of tool
// compatible with spanner-sql tool
func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.DatabaseAdminClient, dataClient *spanner.Client, createStatement, insertStatement, tableName, dbString string, params map[string]any) func(*testing.T) {
// Create table
op, err := adminClient.UpdateDatabaseDdl(ctx, &databasepb.UpdateDatabaseDdlRequest{
Database: dbString,
Statements: []string{createStatement},
})
if err != nil {
t.Fatalf("unable to start create table operation %s: %s", tableName, err)
}
err = op.Wait(ctx)
if err != nil {
t.Fatalf("unable to create test table %s: %s", tableName, err)
}
// Insert test data
if insertStatement != "" {
_, err = dataClient.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error {
stmt := spanner.Statement{
SQL: insertStatement,
Params: params,
}
_, err := txn.Update(ctx, stmt)
return err
})
if err != nil {
t.Fatalf("unable to insert test data: %s", err)
}
}
return func(t *testing.T) {
// tear down test
op, err = adminClient.UpdateDatabaseDdl(ctx, &databasepb.UpdateDatabaseDdlRequest{
Database: dbString,
Statements: []string{fmt.Sprintf("DROP TABLE %s", tableName)},
})
if err != nil {
t.Errorf("unable to start drop %s operation: %s", tableName, err)
return
}
opErr := op.Wait(ctx)
if opErr != nil {
t.Errorf("Teardown failed: %s", opErr)
}
}
}
// addSpannerExecuteSqlConfig gets the tools config for `spanner-execute-sql`
func addSpannerExecuteSqlConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["my-exec-sql-tool-read-only"] = map[string]any{
"kind": "spanner-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
"readOnly": true,
}
tools["my-exec-sql-tool"] = map[string]any{
"kind": "spanner-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
}
tools["my-auth-exec-sql-tool"] = map[string]any{
"kind": "spanner-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
"authRequired": []string{
"my-google-auth",
},
}
config["tools"] = tools
return config
}
func addSpannerReadOnlyConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["access-schema-read-only"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Tool to access information schema in read-only mode.",
"statement": "SELECT schema_name FROM `INFORMATION_SCHEMA`.SCHEMATA WHERE schema_name='INFORMATION_SCHEMA';",
"readOnly": true,
}
tools["access-schema"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Tool to access information schema.",
"statement": "SELECT schema_name FROM `INFORMATION_SCHEMA`.SCHEMATA WHERE schema_name='INFORMATION_SCHEMA';",
}
config["tools"] = tools
return config
}
// addSpannerListTablesConfig adds the spanner-list-tables tool configuration
func addSpannerListTablesConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
// Add spanner-list-tables tool
tools["list-tables-tool"] = map[string]any{
"kind": "spanner-list-tables",
"source": "my-instance",
"description": "Lists tables with their schema information",
}
config["tools"] = tools
return config
}
func addTemplateParamConfig(t *testing.T, config map[string]any) map[string]any {
toolsMap, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
toolsMap["insert-table-templateParams-tool"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Insert tool with template parameters",
"statement": "INSERT INTO {{.tableName}} ({{array .columns}}) VALUES ({{.values}})",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("columns", "The columns to insert into", tools.NewStringParameter("column", "A column name that will be returned from the query.")),
tools.NewStringParameter("values", "The values to insert as a comma separated string"),
},
}
toolsMap["select-templateParams-tool"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT * FROM {{.tableName}}",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-templateParams-combined-tool"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT * FROM {{.tableName}} WHERE id = @id",
"parameters": []tools.Parameter{tools.NewIntParameter("id", "the id of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-fields-templateParams-tool"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT {{array .fields}} FROM {{.tableName}}",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("fields", "The fields to select from", tools.NewStringParameter("field", "A field that will be returned from the query.")),
},
}
toolsMap["select-filter-templateParams-combined-tool"] = map[string]any{
"kind": "spanner-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = @name",
"parameters": []tools.Parameter{tools.NewStringParameter("name", "the name of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewStringParameter("columnFilter", "some description"),
},
}
config["tools"] = toolsMap
return config
}
func runSpannerExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamWant, tableNameParam, tableNameAuth string) {
// Get ID token
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
if err != nil {
t.Fatalf("error getting Google ID token: %s", err)
}
// Test tool invoke endpoint
invokeTcs := []struct {
name string
api string
requestHeader map[string]string
requestBody io.Reader
want string
isErr bool
}{
{
name: "invoke my-exec-sql-tool-read-only",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
want: select1Want,
isErr: false,
},
{
name: "invoke my-exec-sql-tool-read-only with data present in table",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"SELECT * FROM %s WHERE id = 3 OR name = 'Alice'\"}", tableNameParam))),
want: invokeParamWant,
isErr: false,
},
{
name: "invoke my-exec-sql-tool-read-only create table",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"}`)),
isErr: true,
},
{
name: "invoke my-exec-sql-tool-read-only drop table",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"DROP TABLE t"}`)),
isErr: true,
},
{
name: "invoke my-exec-sql-tool-read-only insert entry",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"INSERT INTO %s (id, name) VALUES (4, 'test_name')\"}", tableNameParam))),
isErr: true,
},
{
name: "invoke my-exec-sql-tool without body",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{}`)),
isErr: true,
},
{
name: "invoke my-exec-sql-tool",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
want: select1Want,
isErr: false,
},
{
name: "invoke my-exec-sql-tool create table",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"}`)),
isErr: true,
},
{
name: "invoke my-exec-sql-tool drop table",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"DROP TABLE t"}`)),
isErr: true,
},
{
name: "invoke my-exec-sql-tool insert entry",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"INSERT INTO %s (id, name) VALUES (5, 'test_name')\"}", tableNameParam))),
want: "null",
isErr: false,
},
{
name: "invoke my-exec-sql-tool without body",
api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{}`)),
isErr: true,
},
{
name: "Invoke my-auth-exec-sql-tool with auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
isErr: false,
want: select1Want,
},
{
name: "Invoke my-auth-exec-sql-tool with invalid auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
isErr: true,
},
{
name: "Invoke my-auth-exec-sql-tool without auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
isErr: true,
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
// Send Tool invocation request
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
if tc.isErr {
return
}
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
}
// Check response body
var body map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&body)
if err != nil {
t.Fatalf("error parsing response body")
}
got, ok := body["result"].(string)
if !ok {
t.Fatalf("unable to find result in response body")
}
if got != tc.want {
t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
}
})
}
}
// Helper function to verify table list results
func verifyTableListResult(t *testing.T, body map[string]interface{}, expectedTables []string, expectedSimpleFormat bool) {
// Parse the result
result, ok := body["result"].(string)
if !ok {
t.Fatalf("unable to find result in response body")
}
var tables []interface{}
err := json.Unmarshal([]byte(result), &tables)
if err != nil {
t.Fatalf("unable to parse result as JSON array: %s", err)
}
// If we expect specific tables, verify they exist
if len(expectedTables) > 0 {
tableNames := make(map[string]bool)
requiredKeys := []string{"schema_name", "object_name", "object_type", "columns", "constraints", "indexes"}
if expectedSimpleFormat {
requiredKeys = []string{"name"}
}
for _, table := range tables {
tableMap, ok := table.(map[string]interface{})
if !ok {
continue
}
// Parse object_details JSON string into map[string]interface{}
if objectDetailsStr, ok := tableMap["object_details"].(string); ok {
var objectDetails map[string]interface{}
if err := json.Unmarshal([]byte(objectDetailsStr), &objectDetails); err != nil {
t.Errorf("failed to parse object_details JSON: %v for %v", err, objectDetailsStr)
continue
}
for _, reqKey := range requiredKeys {
if _, hasKey := objectDetails[reqKey]; !hasKey {
t.Errorf("missing required key '%s', for object_details: %v", reqKey, objectDetails)
}
}
}
if name, ok := tableMap["object_name"].(string); ok {
tableNames[name] = true
}
}
for _, expected := range expectedTables {
if !tableNames[expected] {
t.Errorf("expected table %s not found in results", expected)
}
}
}
}
// runSpannerListTablesTest tests the spanner-list-tables tool
func runSpannerListTablesTest(t *testing.T, tableNameParam, tableNameAuth, tableNameTemplateParam string) {
invokeTcs := []struct {
name string
requestBody io.Reader
expectedTables []string // empty means don't check specific tables
useSimpleFormat bool
}{
{
name: "list all tables with detailed format",
requestBody: bytes.NewBuffer([]byte(`{}`)),
expectedTables: []string{tableNameParam, tableNameAuth, tableNameTemplateParam},
},
{
name: "list tables with simple format",
requestBody: bytes.NewBuffer([]byte(`{"output_format": "simple"}`)),
expectedTables: []string{tableNameParam, tableNameAuth, tableNameTemplateParam},
useSimpleFormat: true,
},
{
name: "list specific tables",
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,%s"}`, tableNameParam, tableNameAuth))),
expectedTables: []string{tableNameParam, tableNameAuth},
},
{
name: "list non-existent table",
requestBody: bytes.NewBuffer([]byte(`{"table_names": "non_existent_table_xyz"}`)),
expectedTables: []string{},
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
// Use RunRequest helper function from tests package
url := "http://127.0.0.1:5000/api/tool/list-tables-tool/invoke"
headers := map[string]string{}
resp, respBody := tests.RunRequest(t, http.MethodPost, url, tc.requestBody, headers)
if resp.StatusCode != http.StatusOK {
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(respBody))
}
// Check response body
var body map[string]interface{}
err := json.Unmarshal(respBody, &body)
if err != nil {
t.Fatalf("error parsing response body: %s", err)
}
verifyTableListResult(t, body, tc.expectedTables, tc.useSimpleFormat)
})
}
}
func runSpannerSchemaToolInvokeTest(t *testing.T, accessSchemaWant string) {
invokeTcs := []struct {
name string
api string
requestHeader map[string]string
requestBody io.Reader
want string
isErr bool
}{
{
name: "invoke list-tables-read-only",
api: "http://127.0.0.1:5000/api/tool/access-schema-read-only/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{}`)),
want: accessSchemaWant,
isErr: false,
},
{
name: "invoke list-tables",
api: "http://127.0.0.1:5000/api/tool/access-schema/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{}`)),
isErr: true,
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
// Send Tool invocation request
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
if tc.isErr {
return
}
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
}
// Check response body
var body map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&body)
if err != nil {
t.Fatalf("error parsing response body")
}
got, ok := body["result"].(string)
if !ok {
t.Fatalf("unable to find result in response body")
}
if got != tc.want {
t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
}
})
}
}
```
--------------------------------------------------------------------------------
/tests/dataplex/dataplex_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dataplex
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"regexp"
"strings"
"testing"
"time"
bigqueryapi "cloud.google.com/go/bigquery"
dataplex "cloud.google.com/go/dataplex/apiv1"
dataplexpb "cloud.google.com/go/dataplex/apiv1/dataplexpb"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/tests"
"golang.org/x/oauth2/google"
"google.golang.org/api/googleapi"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
var (
DataplexSourceKind = "dataplex"
DataplexSearchEntriesToolKind = "dataplex-search-entries"
DataplexLookupEntryToolKind = "dataplex-lookup-entry"
DataplexSearchAspectTypesToolKind = "dataplex-search-aspect-types"
DataplexProject = os.Getenv("DATAPLEX_PROJECT")
)
func getDataplexVars(t *testing.T) map[string]any {
switch "" {
case DataplexProject:
t.Fatal("'DATAPLEX_PROJECT' not set")
}
return map[string]any{
"kind": DataplexSourceKind,
"project": DataplexProject,
}
}
// Copied over from bigquery.go
func initBigQueryConnection(ctx context.Context, project string) (*bigqueryapi.Client, error) {
cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
if err != nil {
return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
}
client, err := bigqueryapi.NewClient(ctx, project, option.WithCredentials(cred))
if err != nil {
return nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
}
return client, nil
}
func initDataplexConnection(ctx context.Context) (*dataplex.CatalogClient, error) {
cred, err := google.FindDefaultCredentials(ctx)
if err != nil {
return nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
}
client, err := dataplex.NewCatalogClient(ctx, option.WithCredentials(cred))
if err != nil {
return nil, fmt.Errorf("failed to create Dataplex client %w", err)
}
return client, nil
}
func TestDataplexToolEndpoints(t *testing.T) {
sourceConfig := getDataplexVars(t)
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
defer cancel()
var args []string
bigqueryClient, err := initBigQueryConnection(ctx, DataplexProject)
if err != nil {
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
}
dataplexClient, err := initDataplexConnection(ctx)
if err != nil {
t.Fatalf("unable to create Dataplex connection: %s", err)
}
// create resources with UUID
datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
aspectTypeId := fmt.Sprintf("param-aspect-type-%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
teardownTable1 := setupBigQueryTable(t, ctx, bigqueryClient, datasetName, tableName)
teardownAspectType1 := setupDataplexThirdPartyAspectType(t, ctx, dataplexClient, aspectTypeId)
time.Sleep(2 * time.Minute) // wait for table and aspect type to be ingested
defer teardownTable1(t)
defer teardownAspectType1(t)
toolsFile := getDataplexToolsConfig(sourceConfig)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 3*time.Minute)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
runDataplexToolGetTest(t)
runDataplexSearchEntriesToolInvokeTest(t, tableName, datasetName)
runDataplexLookupEntryToolInvokeTest(t, tableName, datasetName)
runDataplexSearchAspectTypesToolInvokeTest(t, aspectTypeId)
}
func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, datasetName string, tableName string) func(*testing.T) {
// Create dataset
dataset := client.Dataset(datasetName)
_, err := dataset.Metadata(ctx)
if err != nil {
apiErr, ok := err.(*googleapi.Error)
if !ok || apiErr.Code != 404 {
t.Fatalf("Failed to check dataset %q existence: %v", datasetName, err)
}
metadataToCreate := &bigqueryapi.DatasetMetadata{Name: datasetName}
if err := dataset.Create(ctx, metadataToCreate); err != nil {
t.Fatalf("Failed to create dataset %q: %v", datasetName, err)
}
}
// Create table
tab := client.Dataset(datasetName).Table(tableName)
meta := &bigqueryapi.TableMetadata{}
if err := tab.Create(ctx, meta); err != nil {
t.Fatalf("Create table job for %s failed: %v", tableName, err)
}
return func(t *testing.T) {
// tear down table
dropSQL := fmt.Sprintf("drop table %s.%s", datasetName, tableName)
dropJob, err := client.Query(dropSQL).Run(ctx)
if err != nil {
t.Errorf("Failed to start drop table job for %s: %v", tableName, err)
return
}
dropStatus, err := dropJob.Wait(ctx)
if err != nil {
t.Errorf("Failed to wait for drop table job for %s: %v", tableName, err)
return
}
if err := dropStatus.Err(); err != nil {
t.Errorf("Error dropping table %s: %v", tableName, err)
}
// tear down dataset
datasetToTeardown := client.Dataset(datasetName)
tablesIterator := datasetToTeardown.Tables(ctx)
_, err = tablesIterator.Next()
if err == iterator.Done {
if err := datasetToTeardown.Delete(ctx); err != nil {
t.Errorf("Failed to delete dataset %s: %v", datasetName, err)
}
} else if err != nil {
t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
}
}
}
func setupDataplexThirdPartyAspectType(t *testing.T, ctx context.Context, client *dataplex.CatalogClient, aspectTypeId string) func(*testing.T) {
parent := fmt.Sprintf("projects/%s/locations/us", DataplexProject)
createAspectTypeReq := &dataplexpb.CreateAspectTypeRequest{
Parent: parent,
AspectTypeId: aspectTypeId,
AspectType: &dataplexpb.AspectType{
Name: fmt.Sprintf("%s/aspectTypes/%s", parent, aspectTypeId),
MetadataTemplate: &dataplexpb.AspectType_MetadataTemplate{
Name: "UserSchema",
Type: "record",
},
},
}
_, err := client.CreateAspectType(ctx, createAspectTypeReq)
if err != nil {
t.Fatalf("Failed to create aspect type %s: %v", aspectTypeId, err)
}
return func(t *testing.T) {
// tear down aspect type
deleteAspectTypeReq := &dataplexpb.DeleteAspectTypeRequest{
Name: fmt.Sprintf("%s/aspectTypes/%s", parent, aspectTypeId),
}
if _, err := client.DeleteAspectType(ctx, deleteAspectTypeReq); err != nil {
t.Errorf("Failed to delete aspect type %s: %v", aspectTypeId, err)
}
}
}
func getDataplexToolsConfig(sourceConfig map[string]any) map[string]any {
// Write config into a file and pass it to command
toolsFile := map[string]any{
"sources": map[string]any{
"my-dataplex-instance": sourceConfig,
},
"authServices": map[string]any{
"my-google-auth": map[string]any{
"kind": "google",
"clientId": tests.ClientId,
},
},
"tools": map[string]any{
"my-dataplex-search-entries-tool": map[string]any{
"kind": DataplexSearchEntriesToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex search entries tool to test end to end functionality.",
},
"my-auth-dataplex-search-entries-tool": map[string]any{
"kind": DataplexSearchEntriesToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex search entries tool to test end to end functionality.",
"authRequired": []string{"my-google-auth"},
},
"my-dataplex-lookup-entry-tool": map[string]any{
"kind": DataplexLookupEntryToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex lookup entry tool to test end to end functionality.",
},
"my-auth-dataplex-lookup-entry-tool": map[string]any{
"kind": DataplexLookupEntryToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex lookup entry tool to test end to end functionality.",
"authRequired": []string{"my-google-auth"},
},
"my-dataplex-search-aspect-types-tool": map[string]any{
"kind": DataplexSearchAspectTypesToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex search aspect types tool to test end to end functionality.",
},
"my-auth-dataplex-search-aspect-types-tool": map[string]any{
"kind": DataplexSearchAspectTypesToolKind,
"source": "my-dataplex-instance",
"description": "Simple dataplex search aspect types tool to test end to end functionality.",
"authRequired": []string{"my-google-auth"},
},
},
}
return toolsFile
}
func runDataplexToolGetTest(t *testing.T) {
testCases := []struct {
name string
toolName string
expectedParams []string
}{
{
name: "get my-dataplex-search-entries-tool",
toolName: "my-dataplex-search-entries-tool",
expectedParams: []string{"pageSize", "query", "orderBy"},
},
{
name: "get my-dataplex-lookup-entry-tool",
toolName: "my-dataplex-lookup-entry-tool",
expectedParams: []string{"name", "view", "aspectTypes", "entry"},
},
{
name: "get my-dataplex-search-aspect-types-tool",
toolName: "my-dataplex-search-aspect-types-tool",
expectedParams: []string{"pageSize", "query", "orderBy"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/", tc.toolName))
if err != nil {
t.Fatalf("error when sending a request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("response status code is not 200")
}
var body map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&body)
if err != nil {
t.Fatalf("error parsing response body")
}
got, ok := body["tools"]
if !ok {
t.Fatalf("unable to find tools in response body")
}
toolsMap, ok := got.(map[string]interface{})
if !ok {
t.Fatalf("expected 'tools' to be a map, got %T", got)
}
tool, ok := toolsMap[tc.toolName].(map[string]interface{})
if !ok {
t.Fatalf("expected tool %q to be a map, got %T", tc.toolName, toolsMap[tc.toolName])
}
params, ok := tool["parameters"].([]interface{})
if !ok {
t.Fatalf("expected 'parameters' to be a slice, got %T", tool["parameters"])
}
paramSet := make(map[string]struct{})
for _, param := range params {
paramMap, ok := param.(map[string]interface{})
if ok {
if name, ok := paramMap["name"].(string); ok {
paramSet[name] = struct{}{}
}
}
}
var missing []string
for _, want := range tc.expectedParams {
if _, found := paramSet[want]; !found {
missing = append(missing, want)
}
}
if len(missing) > 0 {
t.Fatalf("missing parameters for tool %q: %v", tc.toolName, missing)
}
})
}
}
func runDataplexSearchEntriesToolInvokeTest(t *testing.T, tableName string, datasetName string) {
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
if err != nil {
t.Fatalf("error getting Google ID token: %s", err)
}
testCases := []struct {
name string
api string
requestHeader map[string]string
requestBody io.Reader
wantStatusCode int
expectResult bool
wantContentKey string
}{
{
name: "Success - Entry Found",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-entries-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "dataplex_entry",
},
{
name: "Success with Authorization - Entry Found",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "dataplex_entry",
},
{
name: "Failure - Invalid Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
wantStatusCode: 401,
expectResult: false,
wantContentKey: "dataplex_entry",
},
{
name: "Failure - Without Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
wantStatusCode: 401,
expectResult: false,
wantContentKey: "dataplex_entry",
},
{
name: "Failure - Entry Not Found",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-entries-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"query":"displayname=\"\" system=bigquery parent:\"\""}`)),
wantStatusCode: 200,
expectResult: false,
wantContentKey: "",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != tc.wantStatusCode {
t.Fatalf("response status code is not %d. It is %d", tc.wantStatusCode, resp.StatusCode)
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("Response body: %s", string(bodyBytes))
}
var result map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
t.Fatalf("error parsing response body: %s", err)
}
resultStr, ok := result["result"].(string)
if !ok {
if result["result"] == nil && !tc.expectResult {
return
}
t.Fatalf("expected 'result' field to be a string, got %T", result["result"])
}
if !tc.expectResult && (resultStr == "" || resultStr == "[]") {
return
}
var entries []interface{}
if err := json.Unmarshal([]byte(resultStr), &entries); err != nil {
t.Fatalf("error unmarshalling result string: %v", err)
}
if tc.expectResult {
if len(entries) != 1 {
t.Fatalf("expected exactly one entry, but got %d", len(entries))
}
entry, ok := entries[0].(map[string]interface{})
if !ok {
t.Fatalf("expected first entry to be a map, got %T", entries[0])
}
if _, ok := entry[tc.wantContentKey]; !ok {
t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
}
} else {
if len(entries) != 0 {
t.Fatalf("expected 0 entries, but got %d", len(entries))
}
}
})
}
}
func runDataplexLookupEntryToolInvokeTest(t *testing.T, tableName string, datasetName string) {
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
if err != nil {
t.Fatalf("error getting Google ID token: %s", err)
}
testCases := []struct {
name string
wantStatusCode int
api string
requestHeader map[string]string
requestBody io.Reader
expectResult bool
wantContentKey string
dontWantContentKey string
aspectCheck bool
reqBodyMap map[string]any
}{
{
name: "Success - Entry Found",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "name",
},
{
name: "Success - Entry Found with Authorization",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "name",
},
{
name: "Failure - Invalid Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
wantStatusCode: 401,
expectResult: false,
wantContentKey: "name",
},
{
name: "Failure - Without Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
wantStatusCode: 401,
expectResult: false,
wantContentKey: "name",
},
{
name: "Failure - Entry Not Found or Permission Denied",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, "non-existent-dataset"))),
wantStatusCode: 400,
expectResult: false,
},
{
name: "Success - Entry Found with Basic View",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 1))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "name",
dontWantContentKey: "aspects",
},
{
name: "Failure - Entry with Custom View without Aspect Types",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 3))),
wantStatusCode: 400,
expectResult: false,
},
{
name: "Success - Entry Found with only Schema Aspect",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"aspectTypes\":[\"projects/dataplex-types/locations/global/aspectTypes/schema\"], \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 3))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "aspects",
aspectCheck: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != tc.wantStatusCode {
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("Response status code got %d, want %d\nResponse body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
}
var result map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
t.Fatalf("Error parsing response body: %v", err)
}
if tc.expectResult {
resultStr, ok := result["result"].(string)
if !ok {
t.Fatalf("Expected 'result' field to be a string on success, got %T", result["result"])
}
if resultStr == "" || resultStr == "{}" || resultStr == "null" {
t.Fatal("Expected an entry, but got empty result")
}
var entry map[string]interface{}
if err := json.Unmarshal([]byte(resultStr), &entry); err != nil {
t.Fatalf("Error unmarshalling result string into entry map: %v", err)
}
if _, ok := entry[tc.wantContentKey]; !ok {
t.Fatalf("Expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
}
if _, ok := entry[tc.dontWantContentKey]; ok {
t.Fatalf("Expected entry to not have key '%s', but it was found in %v", tc.dontWantContentKey, entry)
}
if tc.aspectCheck {
// Check length of aspects
aspects, ok := entry["aspects"].(map[string]interface{})
if !ok {
t.Fatalf("Expected 'aspects' to be a map, got %T", aspects)
}
if len(aspects) != 1 {
t.Fatalf("Expected exactly one aspect, but got %d", len(aspects))
}
}
} else { // Handle expected error response
_, ok := result["error"]
if !ok {
t.Fatalf("Expected 'error' field in response, got %v", result)
}
}
})
}
}
func runDataplexSearchAspectTypesToolInvokeTest(t *testing.T, aspectTypeId string) {
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
if err != nil {
t.Fatalf("error getting Google ID token: %s", err)
}
testCases := []struct {
name string
api string
requestHeader map[string]string
requestBody io.Reader
wantStatusCode int
expectResult bool
wantContentKey string
}{
{
name: "Success - Aspect Type Found",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-aspect-types-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "metadata_template",
},
{
name: "Success - Aspect Type Found with Authorization",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
wantStatusCode: 200,
expectResult: true,
wantContentKey: "metadata_template",
},
{
name: "Failure - Aspect Type Not Found",
api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-aspect-types-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`"{\"query\":\"name:_aspectType\"}"`)),
wantStatusCode: 400,
expectResult: false,
},
{
name: "Failure - Invalid Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
wantStatusCode: 401,
expectResult: false,
},
{
name: "Failure - No Authorization Token",
api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
wantStatusCode: 401,
expectResult: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != tc.wantStatusCode {
t.Fatalf("response status code is not %d. It is %d", tc.wantStatusCode, resp.StatusCode)
}
var result map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
t.Fatalf("error parsing response body: %s", err)
}
resultStr, ok := result["result"].(string)
if !ok {
if result["result"] == nil && !tc.expectResult {
return
}
t.Fatalf("expected 'result' field to be a string, got %T", result["result"])
}
if !tc.expectResult && (resultStr == "" || resultStr == "[]") {
return
}
var entries []interface{}
if err := json.Unmarshal([]byte(resultStr), &entries); err != nil {
t.Fatalf("error unmarshalling result string: %v", err)
}
if tc.expectResult {
if len(entries) != 1 {
t.Fatalf("expected exactly one entry, but got %d", len(entries))
}
entry, ok := entries[0].(map[string]interface{})
if !ok {
t.Fatalf("expected entry to be a map, got %T", entries[0])
}
if _, ok := entry[tc.wantContentKey]; !ok {
t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
}
} else {
if len(entries) != 0 {
t.Fatalf("expected 0 entries, but got %d", len(entries))
}
}
})
}
}
```
--------------------------------------------------------------------------------
/tests/common.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package tests contains end to end tests meant to verify the Toolbox Server
// works as expected when executed as a binary.
package tests
import (
"context"
"database/sql"
"fmt"
"strings"
"testing"
"github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/internal/tools"
"github.com/jackc/pgx/v5/pgxpool"
)
// GetToolsConfig returns a mock tools config file
func GetToolsConfig(sourceConfig map[string]any, toolKind, paramToolStatement, idParamToolStmt, nameParamToolStmt, arrayToolStatement, authToolStatement string) map[string]any {
// Write config into a file and pass it to command
toolsFile := map[string]any{
"sources": map[string]any{
"my-instance": sourceConfig,
},
"authServices": map[string]any{
"my-google-auth": map[string]any{
"kind": "google",
"clientId": ClientId,
},
},
"tools": map[string]any{
"my-simple-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Simple tool to test end to end functionality.",
"statement": "SELECT 1",
},
"my-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"statement": paramToolStatement,
"parameters": []any{
map[string]any{
"name": "id",
"type": "integer",
"description": "user ID",
},
map[string]any{
"name": "name",
"type": "string",
"description": "user name",
},
},
},
"my-tool-by-id": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"statement": idParamToolStmt,
"parameters": []any{
map[string]any{
"name": "id",
"type": "integer",
"description": "user ID",
},
},
},
"my-tool-by-name": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"statement": nameParamToolStmt,
"parameters": []any{
map[string]any{
"name": "name",
"type": "string",
"description": "user name",
"required": false,
},
},
},
"my-array-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with array params.",
"statement": arrayToolStatement,
"parameters": []any{
map[string]any{
"name": "idArray",
"type": "array",
"description": "ID array",
"items": map[string]any{
"name": "id",
"type": "integer",
"description": "ID",
},
},
map[string]any{
"name": "nameArray",
"type": "array",
"description": "user name array",
"items": map[string]any{
"name": "name",
"type": "string",
"description": "user name",
},
},
},
},
"my-auth-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test authenticated parameters.",
// statement to auto-fill authenticated parameter
"statement": authToolStatement,
"parameters": []map[string]any{
{
"name": "email",
"type": "string",
"description": "user email",
"authServices": []map[string]string{
{
"name": "my-google-auth",
"field": "email",
},
},
},
},
},
"my-auth-required-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test auth required invocation.",
"statement": "SELECT 1",
"authRequired": []string{
"my-google-auth",
},
},
"my-fail-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test statement with incorrect syntax.",
"statement": "SELEC 1;",
},
},
}
return toolsFile
}
// AddExecuteSqlConfig gets the tools config for `execute-sql` tools
func AddExecuteSqlConfig(t *testing.T, config map[string]any, toolKind string) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["my-exec-sql-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to execute sql",
}
tools["my-auth-exec-sql-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to execute sql",
"authRequired": []string{
"my-google-auth",
},
}
config["tools"] = tools
return config
}
func AddTemplateParamConfig(t *testing.T, config map[string]any, toolKind, tmplSelectCombined, tmplSelectFilterCombined string, tmplSelectAll string) map[string]any {
toolsMap, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
selectAll := "SELECT * FROM {{.tableName}} ORDER BY id"
if tmplSelectAll != "" {
selectAll = tmplSelectAll
}
toolsMap["create-table-templateParams-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "CREATE TABLE {{.tableName}} ({{array .columns}})",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("columns", "The columns to create", tools.NewStringParameter("column", "A column name that will be created")),
},
}
toolsMap["insert-table-templateParams-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Insert tool with template parameters",
"statement": "INSERT INTO {{.tableName}} ({{array .columns}}) VALUES ({{.values}})",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("columns", "The columns to insert into", tools.NewStringParameter("column", "A column name that will be returned from the query.")),
tools.NewStringParameter("values", "The values to insert as a comma separated string"),
},
}
toolsMap["select-templateParams-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": selectAll,
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-templateParams-combined-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": tmplSelectCombined,
"parameters": []tools.Parameter{tools.NewIntParameter("id", "the id of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-fields-templateParams-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT {{array .fields}} FROM {{.tableName}} ORDER BY id",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("fields", "The fields to select from", tools.NewStringParameter("field", "A field that will be returned from the query.")),
},
}
toolsMap["select-filter-templateParams-combined-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": tmplSelectFilterCombined,
"parameters": []tools.Parameter{tools.NewStringParameter("name", "the name of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewStringParameter("columnFilter", "some description"),
},
}
toolsMap["drop-table-templateParams-tool"] = map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Drop table tool with template parameters",
"statement": "DROP TABLE IF EXISTS {{.tableName}}",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
config["tools"] = toolsMap
return config
}
// AddMySqlExecuteSqlConfig gets the tools config for `mysql-execute-sql`
func AddMySqlExecuteSqlConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["my-exec-sql-tool"] = map[string]any{
"kind": "mysql-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
}
tools["my-auth-exec-sql-tool"] = map[string]any{
"kind": "mysql-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
"authRequired": []string{
"my-google-auth",
},
}
config["tools"] = tools
return config
}
// AddMySQLPrebuiltToolConfig gets the tools config for mysql prebuilt tools
func AddMySQLPrebuiltToolConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["list_tables"] = map[string]any{
"kind": "mysql-list-tables",
"source": "my-instance",
"description": "Lists tables in the database.",
}
tools["list_active_queries"] = map[string]any{
"kind": "mysql-list-active-queries",
"source": "my-instance",
"description": "Lists active queries in the database.",
}
tools["list_tables_missing_unique_indexes"] = map[string]any{
"kind": "mysql-list-tables-missing-unique-indexes",
"source": "my-instance",
"description": "Lists tables that do not have primary or unique indexes in the database.",
}
tools["list_table_fragmentation"] = map[string]any{
"kind": "mysql-list-table-fragmentation",
"source": "my-instance",
"description": "Lists table fragmentation in the database.",
}
config["tools"] = tools
return config
}
// AddMSSQLExecuteSqlConfig gets the tools config for `mssql-execute-sql`
func AddMSSQLExecuteSqlConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["my-exec-sql-tool"] = map[string]any{
"kind": "mssql-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
}
tools["my-auth-exec-sql-tool"] = map[string]any{
"kind": "mssql-execute-sql",
"source": "my-instance",
"description": "Tool to execute sql",
"authRequired": []string{
"my-google-auth",
},
}
config["tools"] = tools
return config
}
// AddMSSQLPrebuiltToolConfig gets the tools config for mssql prebuilt tools
func AddMSSQLPrebuiltToolConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["list_tables"] = map[string]any{
"kind": "mssql-list-tables",
"source": "my-instance",
"description": "Lists tables in the database.",
}
config["tools"] = tools
return config
}
// GetPostgresSQLParamToolInfo returns statements and param for my-tool postgres-sql kind
func GetPostgresSQLParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id SERIAL PRIMARY KEY, name TEXT);", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name) VALUES ($1), ($2), ($3), ($4);", tableName)
toolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = $1 OR name = $2;", tableName)
idParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = $1;", tableName)
nameParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE name = $1;", tableName)
arrayToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ANY($1) AND name = ANY($2);", tableName)
params := []any{"Alice", "Jane", "Sid", nil}
return createStatement, insertStatement, toolStatement, idParamStatement, nameParamStatement, arrayToolStatement, params
}
// GetPostgresSQLAuthToolInfo returns statements and param of my-auth-tool for postgres-sql kind
func GetPostgresSQLAuthToolInfo(tableName string) (string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id SERIAL PRIMARY KEY, name TEXT, email TEXT);", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name, email) VALUES ($1, $2), ($3, $4)", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = $1;", tableName)
params := []any{"Alice", ServiceAccountEmail, "Jane", "[email protected]"}
return createStatement, insertStatement, toolStatement, params
}
// GetPostgresSQLTmplToolStatement returns statements and param for template parameter test cases for postgres-sql kind
func GetPostgresSQLTmplToolStatement() (string, string) {
tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = $1"
tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = $1"
return tmplSelectCombined, tmplSelectFilterCombined
}
// GetMSSQLParamToolInfo returns statements and param for my-tool mssql-sql kind
func GetMSSQLParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT IDENTITY(1,1) PRIMARY KEY, name VARCHAR(255));", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name) VALUES (@alice), (@jane), (@sid), (@nil);", tableName)
toolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = @id OR name = @p2;", tableName)
idParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = @id;", tableName)
nameParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE name = @name;", tableName)
arrayToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ANY(@idArray) OR name = ANY(@p2);", tableName)
params := []any{sql.Named("alice", "Alice"), sql.Named("jane", "Jane"), sql.Named("sid", "Sid"), sql.Named("nil", nil)}
return createStatement, insertStatement, toolStatement, idParamStatement, nameParamStatement, arrayToolStatement, params
}
// GetMSSQLAuthToolInfo returns statements and param of my-auth-tool for mssql-sql kind
func GetMSSQLAuthToolInfo(tableName string) (string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT IDENTITY(1,1) PRIMARY KEY, name VARCHAR(255), email VARCHAR(255));", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name, email) VALUES (@alice, @aliceemail), (@jane, @janeemail);", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = @email;", tableName)
params := []any{sql.Named("alice", "Alice"), sql.Named("aliceemail", ServiceAccountEmail), sql.Named("jane", "Jane"), sql.Named("janeemail", "[email protected]")}
return createStatement, insertStatement, toolStatement, params
}
// GetMSSQLTmplToolStatement returns statements and param for template parameter test cases for mysql-sql kind
func GetMSSQLTmplToolStatement() (string, string) {
tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = @id"
tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = @name"
return tmplSelectCombined, tmplSelectFilterCombined
}
// GetMySQLParamToolInfo returns statements and param for my-tool mysql-sql kind
func GetMySQLParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(255));", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name) VALUES (?), (?), (?), (?);", tableName)
toolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ? OR name = ?;", tableName)
idParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ?;", tableName)
nameParamStatement := fmt.Sprintf("SELECT * FROM %s WHERE name = ?;", tableName)
arrayToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ANY(?) AND name = ANY(?);", tableName)
params := []any{"Alice", "Jane", "Sid", nil}
return createStatement, insertStatement, toolStatement, idParamStatement, nameParamStatement, arrayToolStatement, params
}
// GetMySQLAuthToolInfo returns statements and param of my-auth-tool for mysql-sql kind
func GetMySQLAuthToolInfo(tableName string) (string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, name VARCHAR(255), email VARCHAR(255));", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name, email) VALUES (?, ?), (?, ?)", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = ?;", tableName)
params := []any{"Alice", ServiceAccountEmail, "Jane", "[email protected]"}
return createStatement, insertStatement, toolStatement, params
}
// GetMySQLTmplToolStatement returns statements and param for template parameter test cases for mysql-sql kind
func GetMySQLTmplToolStatement() (string, string) {
tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = ?"
tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = ?"
return tmplSelectCombined, tmplSelectFilterCombined
}
// GetPostgresWants return the expected wants for postgres
func GetPostgresWants() (string, string, string, string) {
select1Want := "[{\"?column?\":1}]"
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute query: ERROR: syntax error at or near \"SELEC\" (SQLSTATE 42601)"}],"isError":true}}`
createTableStatement := `"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"?column?\":1}"}]}}`
return select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want
}
// GetMSSQLWants return the expected wants for mssql
func GetMSSQLWants() (string, string, string, string) {
select1Want := "[{\"\":1}]"
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute query: mssql: Could not find stored procedure 'SELEC'."}],"isError":true}}`
createTableStatement := `"CREATE TABLE t (id INT IDENTITY(1,1) PRIMARY KEY, name NVARCHAR(MAX))"`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"\":1}"}]}}`
return select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want
}
// GetMySQLWants return the expected wants for mysql
func GetMySQLWants() (string, string, string, string) {
select1Want := "[{\"1\":1}]"
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute query: Error 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'SELEC 1' at line 1"}],"isError":true}}`
createTableStatement := `"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"1\":1}"}]}}`
return select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want
}
// SetupPostgresSQLTable creates and inserts data into a table of tool
// compatible with postgres-sql tool
func SetupPostgresSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
err := pool.Ping(ctx)
if err != nil {
t.Fatalf("unable to connect to test database: %s", err)
}
// Create table
_, err = pool.Query(ctx, createStatement)
if err != nil {
t.Fatalf("unable to create test table %s: %s", tableName, err)
}
// Insert test data
_, err = pool.Query(ctx, insertStatement, params...)
if err != nil {
t.Fatalf("unable to insert test data: %s", err)
}
return func(t *testing.T) {
// tear down test
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
if err != nil {
t.Errorf("Teardown failed: %s", err)
}
}
}
// SetupMsSQLTable creates and inserts data into a table of tool
// compatible with mssql-sql tool
func SetupMsSQLTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
err := pool.PingContext(ctx)
if err != nil {
t.Fatalf("unable to connect to test database: %s", err)
}
// Create table
_, err = pool.QueryContext(ctx, createStatement)
if err != nil {
t.Fatalf("unable to create test table %s: %s", tableName, err)
}
// Insert test data
_, err = pool.QueryContext(ctx, insertStatement, params...)
if err != nil {
t.Fatalf("unable to insert test data: %s", err)
}
return func(t *testing.T) {
// tear down test
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
if err != nil {
t.Errorf("Teardown failed: %s", err)
}
}
}
// SetupMySQLTable creates and inserts data into a table of tool
// compatible with mysql-sql tool
func SetupMySQLTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
err := pool.PingContext(ctx)
if err != nil {
t.Fatalf("unable to connect to test database: %s", err)
}
// Create table
_, err = pool.QueryContext(ctx, createStatement)
if err != nil {
t.Fatalf("unable to create test table %s: %s", tableName, err)
}
// Insert test data
_, err = pool.QueryContext(ctx, insertStatement, params...)
if err != nil {
t.Fatalf("unable to insert test data: %s", err)
}
return func(t *testing.T) {
// tear down test
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
if err != nil {
t.Errorf("Teardown failed: %s", err)
}
}
}
// GetRedisWants return the expected wants for redis
func GetRedisValkeyWants() (string, string, string, string, string, string, string) {
select1Want := "[\"PONG\"]"
mcpMyFailToolWant := `unknown command 'SELEC 1;', with args beginning with: \""}]}}`
invokeParamWant := "[{\"id\":\"1\",\"name\":\"Alice\"},{\"id\":\"3\",\"name\":\"Sid\"}]"
invokeIdNullWant := `[{"id":"4","name":""}]`
nullWant := `["null"]`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"\"PONG\""}]}}`
mcpInvokeParamWant := `{"jsonrpc":"2.0","id":"my-tool","result":{"content":[{"type":"text","text":"{\"id\":\"1\",\"name\":\"Alice\"}"},{"type":"text","text":"{\"id\":\"3\",\"name\":\"Sid\"}"}]}}`
return select1Want, mcpMyFailToolWant, invokeParamWant, invokeIdNullWant, nullWant, mcpSelect1Want, mcpInvokeParamWant
}
func GetRedisValkeyToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
toolsFile := map[string]any{
"sources": map[string]any{
"my-instance": sourceConfig,
},
"authServices": map[string]any{
"my-google-auth": map[string]any{
"kind": "google",
"clientId": ClientId,
},
},
"tools": map[string]any{
"my-simple-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Simple tool to test end to end functionality.",
"commands": [][]string{{"PING"}},
},
"my-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"commands": [][]string{{"HGETALL", "row1"}, {"HGETALL", "row3"}},
"parameters": []any{
map[string]any{
"name": "id",
"type": "integer",
"description": "user ID",
},
map[string]any{
"name": "name",
"type": "string",
"description": "user name",
},
},
},
"my-tool-by-id": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"commands": [][]string{{"HGETALL", "row4"}},
"parameters": []any{
map[string]any{
"name": "id",
"type": "integer",
"description": "user ID",
},
},
},
"my-tool-by-name": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with params.",
"commands": [][]string{{"GET", "null"}},
"parameters": []any{
map[string]any{
"name": "name",
"type": "string",
"description": "user name",
"required": false,
},
},
},
"my-array-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test invocation with array params.",
"commands": [][]string{{"HGETALL", "row1"}, {"$cmdArray"}},
"parameters": []any{
map[string]any{
"name": "cmdArray",
"type": "array",
"description": "cmd array",
"items": map[string]any{
"name": "cmd",
"type": "string",
"description": "field",
},
},
},
},
"my-auth-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test authenticated parameters.",
// statement to auto-fill authenticated parameter
"commands": [][]string{{"HGETALL", "$email"}},
"parameters": []map[string]any{
{
"name": "email",
"type": "string",
"description": "user email",
"authServices": []map[string]string{
{
"name": "my-google-auth",
"field": "email",
},
},
},
},
},
"my-auth-required-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test auth required invocation.",
"commands": [][]string{{"PING"}},
"authRequired": []string{
"my-google-auth",
},
},
"my-fail-tool": map[string]any{
"kind": toolKind,
"source": "my-instance",
"description": "Tool to test statement with incorrect syntax.",
"commands": [][]string{{"SELEC 1;"}},
},
},
}
return toolsFile
}
// TestCloudSQLMySQL_IPTypeParsingFromYAML verifies the IPType field parsing from YAML
// for the cloud-sql-mysql source, mimicking the structure of tests in cloudsql_mysql_test.go.
func TestCloudSQLMySQL_IPTypeParsingFromYAML(t *testing.T) {
tcs := []struct {
desc string
in string
want server.SourceConfigs
}{
{
desc: "IPType Defaulting to Public",
in: `
sources:
my-mysql-instance:
kind: cloud-sql-mysql
project: my-project
region: my-region
instance: my-instance
database: my_db
user: my_user
password: my_pass
`,
want: server.SourceConfigs{
"my-mysql-instance": cloudsqlmysql.Config{
Name: "my-mysql-instance",
Kind: cloudsqlmysql.SourceKind,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
IPType: "public", // Default value
Database: "my_db",
User: "my_user",
Password: "my_pass",
},
},
},
{
desc: "IPType Explicit Public",
in: `
sources:
my-mysql-instance:
kind: cloud-sql-mysql
project: my-project
region: my-region
instance: my-instance
ipType: Public
database: my_db
user: my_user
password: my_pass
`,
want: server.SourceConfigs{
"my-mysql-instance": cloudsqlmysql.Config{
Name: "my-mysql-instance",
Kind: cloudsqlmysql.SourceKind,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
IPType: "public",
Database: "my_db",
User: "my_user",
Password: "my_pass",
},
},
},
{
desc: "IPType Explicit Private",
in: `
sources:
my-mysql-instance:
kind: cloud-sql-mysql
project: my-project
region: my-region
instance: my-instance
ipType: private
database: my_db
user: my_user
password: my_pass
`,
want: server.SourceConfigs{
"my-mysql-instance": cloudsqlmysql.Config{
Name: "my-mysql-instance",
Kind: cloudsqlmysql.SourceKind,
Project: "my-project",
Region: "my-region",
Instance: "my-instance",
IPType: "private",
Database: "my_db",
User: "my_user",
Password: "my_pass",
},
},
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
got := struct {
Sources server.SourceConfigs `yaml:"sources"`
}{}
// Parse contents
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
if err != nil {
t.Fatalf("unable to unmarshal: %s", err)
}
if !cmp.Equal(tc.want, got.Sources) {
t.Fatalf("incorrect parse: diff (-want +got):\n%s", cmp.Diff(tc.want, got.Sources))
}
})
}
}
// Finds and drops all tables in a postgres database.
func CleanupPostgresTables(t *testing.T, ctx context.Context, pool *pgxpool.Pool) {
query := `
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_type = 'BASE TABLE';`
rows, err := pool.Query(ctx, query)
if err != nil {
t.Fatalf("Failed to query for all tables in 'public' schema: %v", err)
}
defer rows.Close()
var tablesToDrop []string
for rows.Next() {
var tableName string
if err := rows.Scan(&tableName); err != nil {
t.Errorf("Failed to scan table name: %v", err)
continue
}
tablesToDrop = append(tablesToDrop, fmt.Sprintf("public.%q", tableName))
}
if len(tablesToDrop) == 0 {
return
}
dropQuery := fmt.Sprintf("DROP TABLE IF EXISTS %s CASCADE;", strings.Join(tablesToDrop, ", "))
if _, err := pool.Exec(ctx, dropQuery); err != nil {
t.Fatalf("Failed to drop all tables in 'public' schema: %v", err)
}
}
// Finds and drops all tables in a mysql database.
func CleanupMySQLTables(t *testing.T, ctx context.Context, pool *sql.DB) {
query := `
SELECT table_name FROM information_schema.tables
WHERE table_schema = DATABASE() AND table_type = 'BASE TABLE';`
rows, err := pool.QueryContext(ctx, query)
if err != nil {
t.Fatalf("Failed to query for all MySQL tables: %v", err)
}
defer rows.Close()
var tablesToDrop []string
for rows.Next() {
var tableName string
if err := rows.Scan(&tableName); err != nil {
t.Errorf("Failed to scan MySQL table name: %v", err)
continue
}
tablesToDrop = append(tablesToDrop, fmt.Sprintf("`%s`", tableName))
}
if len(tablesToDrop) == 0 {
return
}
// Disable foreign key checks, drop all tables and re-enable
if _, err := pool.ExecContext(ctx, "SET FOREIGN_KEY_CHECKS = 0;"); err != nil {
t.Fatalf("Failed to disable MySQL foreign key checks: %v", err)
}
dropQuery := fmt.Sprintf("DROP TABLE IF EXISTS %s;", strings.Join(tablesToDrop, ", "))
if _, err := pool.ExecContext(ctx, dropQuery); err != nil {
// Try to re-enable checks even if drop fails
if _, err := pool.ExecContext(ctx, "SET FOREIGN_KEY_CHECKS = 1;"); err != nil {
t.Logf("Also failed to re-enable foreign key checks: %v", err)
}
t.Fatalf("Failed to drop all MySQL tables: %v", err)
}
// Re-enable foreign key checks
if _, err := pool.ExecContext(ctx, "SET FOREIGN_KEY_CHECKS = 1;"); err != nil {
t.Fatalf("Failed to re-enable MySQL foreign key checks: %v", err)
}
}
// Finds and drops all tables in an mssql database.
func CleanupMSSQLTables(t *testing.T, ctx context.Context, pool *sql.DB) {
disableConstraintsCmd := "EXEC sp_MSforeachtable 'ALTER TABLE ? NOCHECK CONSTRAINT ALL'"
if _, err := pool.ExecContext(ctx, disableConstraintsCmd); err != nil {
t.Fatalf("Failed to disable MSSQL constraints: %v", err)
}
// drop 'U' (User Tables)
dropTablesCmd := "EXEC sp_MSforeachtable 'DROP TABLE ?', @whereand = 'AND O.Type = ''U'''"
if _, err := pool.ExecContext(ctx, dropTablesCmd); err != nil {
t.Fatalf("Failed to drop all MSSQL tables: %v", err)
}
}
```
--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/looker.yaml:
--------------------------------------------------------------------------------
```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
looker-source:
kind: looker
base_url: ${LOOKER_BASE_URL}
client_id: ${LOOKER_CLIENT_ID:}
client_secret: ${LOOKER_CLIENT_SECRET:}
verify_ssl: ${LOOKER_VERIFY_SSL:true}
timeout: 600s
use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false}
show_hidden_models: ${LOOKER_SHOW_HIDDEN_MODELS:true}
show_hidden_explores: ${LOOKER_SHOW_HIDDEN_EXPLORES:true}
show_hidden_fields: ${LOOKER_SHOW_HIDDEN_FIELDS:true}
tools:
get_models:
kind: looker-get-models
source: looker-source
description: |
The get_models tool retrieves the list of LookML models in the Looker system.
It takes no parameters.
get_explores:
kind: looker-get-explores
source: looker-source
description: |
The get_explores tool retrieves the list of explores defined in a LookML model
in the Looker system.
It takes one parameter, the model_name looked up from get_models.
get_dimensions:
kind: looker-get-dimensions
source: looker-source
description: |
The get_dimensions tool retrieves the list of dimensions defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
If this returns a suggestions field for a dimension, the contents of suggestions
can be used as filters for this field. If this returns a suggest_explore and
suggest_dimension, a query against that explore and dimension can be used to find
valid filters for this field.
get_measures:
kind: looker-get-measures
source: looker-source
description: |
The get_measures tool retrieves the list of measures defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
If this returns a suggestions field for a measure, the contents of suggestions
can be used as filters for this field. If this returns a suggest_explore and
suggest_dimension, a query against that explore and dimension can be used to find
valid filters for this field.
get_filters:
kind: looker-get-filters
source: looker-source
description: |
The get_filters tool retrieves the list of filters defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
get_parameters:
kind: looker-get-parameters
source: looker-source
description: |
The get_parameters tool retrieves the list of parameters defined in
an explore.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
query:
kind: looker-query
source: looker-source
description: |
Query Tool
This tool is used to run a query against the LookML model. The
model, explore, and fields list must be specified. Pivots,
filters and sorts are optional.
The model can be found from the get_models tool. The explore
can be found from the get_explores tool passing in the model.
The fields can be found from the get_dimensions, get_measures,
get_filters, and get_parameters tools, passing in the model
and the explore.
Provide a model_id and explore_name, then a list
of fields. Optionally a list of pivots can be provided.
The pivots must also be included in the fields list.
Filters are provided as a map of {"field.id": "condition",
"field.id2": "condition2", ...}. Do not put the field.id in
quotes. Filter expressions can be found at
https://cloud.google.com/looker/docs/filter-expressions. There
is one mistake in that, however, Use `not null` instead of `-NULL`.
Sorts can be specified like [ "field.id desc 0" ].
An optional row limit can be added. If not provided the limit
will default to 500. "-1" can be specified for unlimited.
An optional query timezone can be added. The query_timezone to
will default to that of the workstation where this MCP server
is running, or Etc/UTC if that can't be determined. Not all
models support custom timezones.
The result of the query tool is JSON
query_sql:
kind: looker-query-sql
source: looker-source
description: |
Query SQL Tool
This tool is used to generate the SQL that Looker would
run against the underlying database. The parameters are
the same as the query tool.
The result of the query sql tool is SQL text.
query_url:
kind: looker-query-url
source: looker-source
description: |
Query URL Tool
This tool is used to generate the URL of a query in Looker.
The user can then explore the query further inside Looker.
The tool also returns the query_id and slug. The parameters
are the same as the query tool with an additional vis_config
parameter.
The vis_config is optional. If provided, it will be used to
control the default visualization for the query. Here are
some notes on making visualizations.
### Cartesian Charts (Area, Bar, Column, Line, Scatter)
These chart types share a large number of configuration options.
**General**
* `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`).
* `series_types`: Override the chart type for individual series.
* `show_view_names`: Display view names in labels and tooltips (`true`/`false`).
* `series_labels`: Provide custom names for series.
**Styling & Colors**
* `colors`: An array of color values to be used for the chart series.
* `series_colors`: A mapping of series names to specific color values.
* `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.).
* `font_size`: Font size for labels (e.g., '12px').
**Legend**
* `hide_legend`: Show or hide the chart legend (`true`/`false`).
* `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`).
**Axes**
* `swap_axes`: Swap the X and Y axes (`true`/`false`).
* `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`).
* `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`).
* `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`).
* `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`).
* `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`).
* `x_axis_label`, `y_axis_label`: Set a custom title for an axis.
* `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`).
* `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis.
* `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels.
* `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`).
* `y_axes`: An array of configuration objects for multiple y-axes.
**Data & Series**
* `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`).
* `ordering`: Order of series in a stack (`'none'`, etc.).
* `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`).
* `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`).
* `discontinuous_nulls`: How to render null values in line charts (`true`/`false`).
* `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`).
* `series_point_styles`: Override point styles for individual series.
* `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.).
* `show_value_labels`: Display values on data points (`true`/`false`).
* `label_value_format`: A format string for value labels.
* `show_totals_labels`: Display total labels on stacked charts (`true`/`false`).
* `totals_color`: Color for total labels.
* `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`).
* `hidden_series`: An array of series names to hide from the visualization.
**Scatter/Bubble Specific**
* `size_by_field`: The field used to determine the size of bubbles.
* `color_by_field`: The field used to determine the color of bubbles.
* `plot_size_by_field`: Whether to display the size-by field in the legend.
* `cluster_points`: Group nearby points into clusters (`true`/`false`).
* `quadrants_enabled`: Display quadrants on the chart (`true`/`false`).
* `quadrant_properties`: Configuration for quadrant labels and colors.
* `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage.
* `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value.
**Miscellaneous**
* `reference_lines`: Configuration for displaying reference lines.
* `trend_lines`: Configuration for displaying trend lines.
* `trellis`: Configuration for creating trellis (small multiple) charts.
* `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions.
### Boxplot
* Inherits most of the Cartesian chart options.
* `type`: Must be `looker_boxplot`.
### Funnel
* `type`: Must be `looker_funnel`.
* `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`).
* `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`).
* `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`).
* `labelColor`, `labelColorEnabled`: Set a custom color for labels.
* `labelOverlap`: Allow labels to overlap (`true`/`false`).
* `barColors`: An array of colors for the funnel steps.
* `color_application`: Advanced color palette controls.
* `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.
### Pie / Donut
* Pie charts must have exactly one dimension and one numerical measure.
* `type`: Must be `looker_pie`.
* `value_labels`: Where to display values (`'legend'`, `'labels'`).
* `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`).
* `start_angle`, `end_angle`: The start and end angles of the pie chart.
* `inner_radius`: The inner radius, used to create a donut chart.
* `series_colors`, `series_labels`: Override colors and labels for specific slices.
* `color_application`: Advanced color palette controls.
* `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.
* `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration.
### Waterfall
* Inherits most of the Cartesian chart options.
* `type`: Must be `looker_waterfall`.
* `up_color`: Color for positive (increasing) values.
* `down_color`: Color for negative (decreasing) values.
* `total_color`: Color for the total bar.
### Word Cloud
* `type`: Must be `looker_wordcloud`.
* `rotation`: Enable random word rotation (`true`/`false`).
* `colors`: An array of colors for the words.
* `color_application`: Advanced color palette controls.
* `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.
These are some sample vis_config settings.
A bar chart -
{{
"defaults_version": 1,
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_bar",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A column chart with an option advanced_vis_config -
{{
"advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }",
"colors": [
"grey"
],
"defaults_version": 1,
"hidden_fields": [],
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"note_display": "below",
"note_state": "collapsed",
"note_text": "Unsold inventory only",
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"series_colors": {},
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": true,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_column",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axes": [],
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A line chart -
{{
"defaults_version": 1,
"hidden_pivots": {},
"hidden_series": [],
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_points": true,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "",
"trellis": "",
"type": "looker_line",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5
}}
An area chart -
{{
"defaults_version": 1,
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_points": true,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "normal",
"totals_color": "#808080",
"trellis": "",
"type": "looker_area",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A scatter plot -
{{
"cluster_points": false,
"custom_quadrant_point_x": 5,
"custom_quadrant_point_y": 5,
"custom_value_label_column": "",
"custom_x_column": "",
"custom_y_column": "",
"defaults_version": 1,
"hidden_fields": [],
"hidden_pivots": {},
"hidden_points_if_no": [],
"hidden_series": [],
"interpolation": "linear",
"label_density": 25,
"legend_position": "center",
"limit_displayed_rows": false,
"limit_displayed_rows_values": {
"first_last": "first",
"num_rows": 0,
"show_hide": "hide"
},
"plot_size_by_field": false,
"point_style": "circle",
"quadrant_properties": {
"0": {
"color": "",
"label": "Quadrant 1"
},
"1": {
"color": "",
"label": "Quadrant 2"
},
"2": {
"color": "",
"label": "Quadrant 3"
},
"3": {
"color": "",
"label": "Quadrant 4"
}
},
"quadrants_enabled": false,
"series_labels": {},
"series_types": {},
"show_null_points": false,
"show_value_labels": false,
"show_view_names": true,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"size_by_field": "roi",
"stacking": "normal",
"swap_axes": true,
"trellis": "",
"type": "looker_scatter",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"x_axis_zoom": true,
"y_axes": [
{
"label": "",
"orientation": "bottom",
"series": [
{
"axisId": "Channel_0 - average_of_roi_first",
"id": "Channel_0 - average_of_roi_first",
"name": "Channel_0"
},
{
"axisId": "Channel_1 - average_of_roi_first",
"id": "Channel_1 - average_of_roi_first",
"name": "Channel_1"
},
{
"axisId": "Channel_2 - average_of_roi_first",
"id": "Channel_2 - average_of_roi_first",
"name": "Channel_2"
},
{
"axisId": "Channel_3 - average_of_roi_first",
"id": "Channel_3 - average_of_roi_first",
"name": "Channel_3"
},
{
"axisId": "Channel_4 - average_of_roi_first",
"id": "Channel_4 - average_of_roi_first",
"name": "Channel_4"
}
],
"showLabels": true,
"showValues": true,
"tickDensity": "custom",
"tickDensityCustom": 100,
"type": "linear",
"unpinAxis": false
}
],
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5,
"y_axis_zoom": true
}}
A single record visualization -
{{
"defaults_version": 1,
"show_view_names": false,
"type": "looker_single_record"
}}
A single value visualization -
{{
"comparison_reverse_colors": false,
"comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false,
"custom_color": "#1A73E8",
"custom_color_enabled": true,
"defaults_version": 1,
"enable_conditional_formatting": false,
"series_types": {},
"show_comparison": false,
"show_comparison_label": true,
"show_single_value_title": true,
"single_value_title": "Total Clicks",
"type": "single_value"
}}
A Pie chart -
{{
"defaults_version": 1,
"label_density": 25,
"label_type": "labPer",
"legend_position": "center",
"limit_displayed_rows": false,
"ordering": "none",
"plot_size_by_field": false,
"point_style": "none",
"series_types": {},
"show_null_labels": false,
"show_silhouette": false,
"show_totals_labels": false,
"show_value_labels": false,
"show_view_names": false,
"show_x_axis_label": true,
"show_x_axis_ticks": true,
"show_y_axis_labels": true,
"show_y_axis_ticks": true,
"stacking": "",
"totals_color": "#808080",
"trellis": "",
"type": "looker_pie",
"value_labels": "legend",
"x_axis_gridlines": false,
"x_axis_reversed": false,
"x_axis_scale": "auto",
"y_axis_combined": true,
"y_axis_gridlines": true,
"y_axis_reversed": false,
"y_axis_scale_mode": "linear",
"y_axis_tick_density": "default",
"y_axis_tick_density_custom": 5
}}
The result is a JSON object with the id, slug, the url, and
the long_url.
get_looks:
kind: looker-get-looks
source: looker-source
description: |
get_looks Tool
This tool is used to search for saved looks in a Looker instance.
String search params use case-insensitive matching. String search
params can contain % and '_' as SQL LIKE pattern match wildcard
expressions. example="dan%" will match "danger" and "Danzig" but
not "David" example="D_m%" will match "Damage" and "dump".
Most search params can accept "IS NULL" and "NOT NULL" as special
expressions to match or exclude (respectively) rows where the
column is null.
The limit and offset are used to paginate the results.
The result of the get_looks tool is a list of json objects.
run_look:
kind: looker-run-look
source: looker-source
description: |
run_look Tool
This tool runs the query associated with a look and returns
the data in a JSON structure. It accepts the look_id as the
parameter.
make_look:
kind: looker-make-look
source: looker-source
description: |
make_look Tool
This tool creates a new look in Looker, using the query
parameters and the vis_config specified.
Most of the parameters are the same as the query_url
tool. In addition, there is a title and a description
that must be provided.
The newly created look will be created in the user's
personal folder in looker. The look name must be unique.
The result is a json document with a link to the newly
created look.
get_dashboards:
kind: looker-get-dashboards
source: looker-source
description: |
get_dashboards Tool
This tool is used to search for saved dashboards in a Looker instance.
String search params use case-insensitive matching. String search
params can contain % and '_' as SQL LIKE pattern match wildcard
expressions. example="dan%" will match "danger" and "Danzig" but
not "David" example="D_m%" will match "Damage" and "dump".
Most search params can accept "IS NULL" and "NOT NULL" as special
expressions to match or exclude (respectively) rows where the
column is null.
The limit and offset are used to paginate the results.
The result of the get_dashboards tool is a list of json objects.
make_dashboard:
kind: looker-make-dashboard
source: looker-source
description: |
make_dashboard Tool
This tool creates a new dashboard in Looker. The dashboard is
initially empty and the add_dashboard_element tool is used to
add content to the dashboard.
The newly created dashboard will be created in the user's
personal folder in looker. The dashboard name must be unique.
The result is a json document with a link to the newly
created dashboard and the id of the dashboard. Use the id
when calling add_dashboard_element.
add_dashboard_element:
kind: looker-add-dashboard-element
source: looker-source
description: |
add_dashboard_element Tool
This tool creates a new tile in a Looker dashboard using
the query parameters and the vis_config specified.
Most of the parameters are the same as the query_url
tool. In addition, there is a title that may be provided.
The dashboard_id must be specified. That is obtained
from calling make_dashboard.
This tool can be called many times for one dashboard_id
and the resulting tiles will be added in order.
health_pulse:
kind: looker-health-pulse
source: looker-source
description: |
health-pulse Tool
This tool takes the pulse of a Looker instance by taking
one of the following actions:
1. `check_db_connections`,
2. `check_dashboard_performance`,
3. `check_dashboard_errors`,
4. `check_explore_performance`,
5. `check_schedule_failures`, or
6. `check_legacy_features`
health_analyze:
kind: looker-health-analyze
source: looker-source
description: |
health-analyze Tool
This tool calculates the usage of projects, models and explores.
It accepts 6 parameters:
1. `action`: can be "projects", "models", or "explores"
2. `project`: the project to analyze (optional)
3. `model`: the model to analyze (optional)
4. `explore`: the explore to analyze (optional)
5. `timeframe`: the lookback period in days, default is 90
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
health_vacuum:
kind: looker-health-vacuum
source: looker-source
description: |
health-vacuum Tool
This tool suggests models or explores that can removed
because they are unused.
It accepts 6 parameters:
1. `action`: can be "models" or "explores"
2. `project`: the project to vacuum (optional)
3. `model`: the model to vacuum (optional)
4. `explore`: the explore to vacuum (optional)
5. `timeframe`: the lookback period in days, default is 90
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
The result is a list of objects that are candidates for deletion.
dev_mode:
kind: looker-dev-mode
source: looker-source
description: |
dev_mode Tool
Passing true to this tool switches the session to dev mode. Passing false to this tool switches the
session to production mode.
get_projects:
kind: looker-get-projects
source: looker-source
description: |
get_projects Tool
This tool returns the project_id and project_name for
all the LookML projects on the looker instance.
get_project_files:
kind: looker-get-project-files
source: looker-source
description: |
get_project_files Tool
Given a project_id this tool returns the details about
the LookML files that make up that project.
get_project_file:
kind: looker-get-project-file
source: looker-source
description: |
get_project_file Tool
Given a project_id and a file path within the project, this tool returns
the contents of the LookML file.
create_project_file:
kind: looker-create-project-file
source: looker-source
description: |
create_project_file Tool
Given a project_id and a file path within the project, as well as the content
of a LookML file, this tool will create a new file within the project.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
update_project_file:
kind: looker-update-project-file
source: looker-source
description: |
update_project_file Tool
Given a project_id and a file path within the project, as well as the content
of a LookML file, this tool will modify the file within the project.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
delete_project_file:
kind: looker-delete-project-file
source: looker-source
description: |
delete_project_file Tool
Given a project_id and a file path within the project, this tool will delete
the file from the project.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
toolsets:
looker_tools:
- get_models
- get_explores
- get_dimensions
- get_measures
- get_filters
- get_parameters
- query
- query_sql
- query_url
- get_looks
- run_look
- make_look
- get_dashboards
- make_dashboard
- add_dashboard_element
- health_pulse
- health_analyze
- health_vacuum
- dev_mode
- get_projects
- get_project_files
- get_project_file
- create_project_file
- update_project_file
- delete_project_file
```