This is page 37 of 49. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-connection-databases.md
│ │ │ ├── looker-get-connection-schemas.md
│ │ │ ├── looker-get-connection-table-columns.md
│ │ │ ├── looker-get-connection-tables.md
│ │ │ ├── looker-get-connections.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-tables.md
│ │ │ ├── postgres-list-views.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ ├── serverless-spark-get-batch.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ └── bigquery.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/tests/mongodb/mongodb_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package mongodb
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "os"
25 | "regexp"
26 | "testing"
27 | "time"
28 |
29 | "github.com/googleapis/genai-toolbox/internal/testutils"
30 | "github.com/googleapis/genai-toolbox/tests"
31 | "go.mongodb.org/mongo-driver/mongo"
32 | "go.mongodb.org/mongo-driver/mongo/options"
33 | )
34 |
35 | var (
36 | MongoDbSourceKind = "mongodb"
37 | MongoDbToolKind = "mongodb-find"
38 | MongoDbUri = os.Getenv("MONGODB_URI")
39 | MongoDbDatabase = os.Getenv("MONGODB_DATABASE")
40 | ServiceAccountEmail = os.Getenv("SERVICE_ACCOUNT_EMAIL")
41 | )
42 |
43 | func getMongoDBVars(t *testing.T) map[string]any {
44 | switch "" {
45 | case MongoDbUri:
46 | t.Fatal("'MongoDbUri' not set")
47 | case MongoDbDatabase:
48 | t.Fatal("'MongoDbDatabase' not set")
49 | }
50 | return map[string]any{
51 | "kind": MongoDbSourceKind,
52 | "uri": MongoDbUri,
53 | }
54 | }
55 |
56 | func initMongoDbDatabase(ctx context.Context, uri, database string) (*mongo.Database, error) {
57 | // Create a new mongodb Database
58 | client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri))
59 | if err != nil {
60 | return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
61 | }
62 | err = client.Ping(ctx, nil)
63 | if err != nil {
64 | return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
65 | }
66 | return client.Database(database), nil
67 | }
68 |
69 | func TestMongoDBToolEndpoints(t *testing.T) {
70 | sourceConfig := getMongoDBVars(t)
71 | ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
72 | defer cancel()
73 |
74 | var args []string
75 |
76 | database, err := initMongoDbDatabase(ctx, MongoDbUri, MongoDbDatabase)
77 | if err != nil {
78 | t.Fatalf("unable to create MongoDB connection: %s", err)
79 | }
80 |
81 | // set up data for param tool
82 | teardownDB := setupMongoDB(t, ctx, database)
83 | defer teardownDB(t)
84 |
85 | // Write config into a file and pass it to command
86 | toolsFile := getMongoDBToolsConfig(sourceConfig, MongoDbToolKind)
87 |
88 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
89 | if err != nil {
90 | t.Fatalf("command initialization returned an error: %s", err)
91 | }
92 | defer cleanup()
93 |
94 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
95 | defer cancel()
96 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
97 | if err != nil {
98 | t.Logf("toolbox command logs: \n%s", out)
99 | t.Fatalf("toolbox didn't start successfully: %s", err)
100 | }
101 |
102 | // Get configs for tests
103 | select1Want := `[{"_id":3,"id":3,"name":"Sid"}]`
104 | myToolId3NameAliceWant := `[{"_id":5,"id":3,"name":"Alice"}]`
105 | myToolById4Want := `[{"_id":4,"id":4,"name":null}]`
106 | mcpMyFailToolWant := `invalid JSON input: missing colon after key `
107 | mcpMyToolId3NameAliceWant := `{"jsonrpc":"2.0","id":"my-simple-tool","result":{"content":[{"type":"text","text":"{\"_id\":5,\"id\":3,\"name\":\"Alice\"}"}]}}`
108 |
109 | // Run tests
110 | tests.RunToolGetTest(t)
111 | tests.RunToolInvokeTest(t, select1Want,
112 | tests.WithMyToolId3NameAliceWant(myToolId3NameAliceWant),
113 | tests.WithMyArrayToolWant(myToolId3NameAliceWant),
114 | tests.WithMyToolById4Want(myToolById4Want),
115 | )
116 | tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, select1Want,
117 | tests.WithMcpMyToolId3NameAliceWant(mcpMyToolId3NameAliceWant),
118 | )
119 |
120 | delete1Want := "1"
121 | deleteManyWant := "2"
122 | runToolDeleteInvokeTest(t, delete1Want, deleteManyWant)
123 |
124 | insert1Want := `["68666e1035bb36bf1b4d47fb"]`
125 | insertManyWant := `["68667a6436ec7d0363668db7","68667a6436ec7d0363668db8","68667a6436ec7d0363668db9"]`
126 | runToolInsertInvokeTest(t, insert1Want, insertManyWant)
127 |
128 | update1Want := "1"
129 | updateManyWant := "[2,0,2]"
130 | runToolUpdateInvokeTest(t, update1Want, updateManyWant)
131 |
132 | aggregate1Want := `[{"id":2}]`
133 | aggregateManyWant := `[{"id":500},{"id":501}]`
134 | runToolAggregateInvokeTest(t, aggregate1Want, aggregateManyWant)
135 | }
136 |
137 | func runToolDeleteInvokeTest(t *testing.T, delete1Want, deleteManyWant string) {
138 | // Test tool invoke endpoint
139 | invokeTcs := []struct {
140 | name string
141 | api string
142 | requestHeader map[string]string
143 | requestBody io.Reader
144 | want string
145 | isErr bool
146 | }{
147 | {
148 | name: "invoke my-delete-one-tool",
149 | api: "http://127.0.0.1:5000/api/tool/my-delete-one-tool/invoke",
150 | requestHeader: map[string]string{},
151 | requestBody: bytes.NewBuffer([]byte(`{ "id" : 100 }`)),
152 | want: delete1Want,
153 | isErr: false,
154 | },
155 | {
156 | name: "invoke my-delete-many-tool",
157 | api: "http://127.0.0.1:5000/api/tool/my-delete-many-tool/invoke",
158 | requestHeader: map[string]string{},
159 | requestBody: bytes.NewBuffer([]byte(`{ "id" : 101 }`)),
160 | want: deleteManyWant,
161 | isErr: false,
162 | },
163 | }
164 |
165 | for _, tc := range invokeTcs {
166 |
167 | t.Run(tc.name, func(t *testing.T) {
168 | // Send Tool invocation request
169 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
170 | if err != nil {
171 | t.Fatalf("unable to create request: %s", err)
172 | }
173 | req.Header.Add("Content-type", "application/json")
174 | for k, v := range tc.requestHeader {
175 | req.Header.Add(k, v)
176 | }
177 | resp, err := http.DefaultClient.Do(req)
178 | if err != nil {
179 | t.Fatalf("unable to send request: %s", err)
180 | }
181 | defer resp.Body.Close()
182 |
183 | if resp.StatusCode != http.StatusOK {
184 | if tc.isErr {
185 | return
186 | }
187 | bodyBytes, _ := io.ReadAll(resp.Body)
188 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
189 | }
190 |
191 | // Check response body
192 | var body map[string]interface{}
193 | err = json.NewDecoder(resp.Body).Decode(&body)
194 | if err != nil {
195 | t.Fatalf("error parsing response body")
196 | }
197 |
198 | got, ok := body["result"].(string)
199 | if !ok {
200 | t.Fatalf("unable to find result in response body")
201 | }
202 |
203 | if got != tc.want {
204 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
205 | }
206 | })
207 | }
208 | }
209 |
210 | func runToolInsertInvokeTest(t *testing.T, insert1Want, insertManyWant string) {
211 | // Test tool invoke endpoint
212 | invokeTcs := []struct {
213 | name string
214 | api string
215 | requestHeader map[string]string
216 | requestBody io.Reader
217 | want string
218 | isErr bool
219 | }{
220 | {
221 | name: "invoke my-insert-one-tool",
222 | api: "http://127.0.0.1:5000/api/tool/my-insert-one-tool/invoke",
223 | requestHeader: map[string]string{},
224 | requestBody: bytes.NewBuffer([]byte(`{ "data" : "{ \"_id\": { \"$oid\": \"68666e1035bb36bf1b4d47fb\" }, \"id\" : 200 }" }"`)),
225 | want: insert1Want,
226 | isErr: false,
227 | },
228 | {
229 | name: "invoke my-insert-many-tool",
230 | api: "http://127.0.0.1:5000/api/tool/my-insert-many-tool/invoke",
231 | requestHeader: map[string]string{},
232 | requestBody: bytes.NewBuffer([]byte(`{ "data" : "[{ \"_id\": { \"$oid\": \"68667a6436ec7d0363668db7\"} , \"id\" : 201 }, { \"_id\" : { \"$oid\": \"68667a6436ec7d0363668db8\"}, \"id\" : 202 }, { \"_id\": { \"$oid\": \"68667a6436ec7d0363668db9\"}, \"id\": 203 }]" }`)),
233 | want: insertManyWant,
234 | isErr: false,
235 | },
236 | }
237 |
238 | for _, tc := range invokeTcs {
239 |
240 | t.Run(tc.name, func(t *testing.T) {
241 | // Send Tool invocation request
242 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
243 | if err != nil {
244 | t.Fatalf("unable to create request: %s", err)
245 | }
246 | req.Header.Add("Content-type", "application/json")
247 | for k, v := range tc.requestHeader {
248 | req.Header.Add(k, v)
249 | }
250 | resp, err := http.DefaultClient.Do(req)
251 | if err != nil {
252 | t.Fatalf("unable to send request: %s", err)
253 | }
254 | defer resp.Body.Close()
255 |
256 | if resp.StatusCode != http.StatusOK {
257 | if tc.isErr {
258 | return
259 | }
260 | bodyBytes, _ := io.ReadAll(resp.Body)
261 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
262 | }
263 |
264 | // Check response body
265 | var body map[string]interface{}
266 | err = json.NewDecoder(resp.Body).Decode(&body)
267 | if err != nil {
268 | t.Fatalf("error parsing response body")
269 | }
270 |
271 | got, ok := body["result"].(string)
272 | if !ok {
273 | t.Fatalf("unable to find result in response body")
274 | }
275 |
276 | if got != tc.want {
277 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
278 | }
279 | })
280 | }
281 | }
282 |
283 | func runToolUpdateInvokeTest(t *testing.T, update1Want, updateManyWant string) {
284 | // Test tool invoke endpoint
285 | invokeTcs := []struct {
286 | name string
287 | api string
288 | requestHeader map[string]string
289 | requestBody io.Reader
290 | want string
291 | isErr bool
292 | }{
293 | {
294 | name: "invoke my-update-one-tool",
295 | api: "http://127.0.0.1:5000/api/tool/my-update-one-tool/invoke",
296 | requestHeader: map[string]string{},
297 | requestBody: bytes.NewBuffer([]byte(`{ "id": 300, "name": "Bob" }`)),
298 | want: update1Want,
299 | isErr: false,
300 | },
301 | {
302 | name: "invoke my-update-many-tool",
303 | api: "http://127.0.0.1:5000/api/tool/my-update-many-tool/invoke",
304 | requestHeader: map[string]string{},
305 | requestBody: bytes.NewBuffer([]byte(`{ "id": 400, "name" : "Alice" }`)),
306 | want: updateManyWant,
307 | isErr: false,
308 | },
309 | }
310 |
311 | for _, tc := range invokeTcs {
312 |
313 | t.Run(tc.name, func(t *testing.T) {
314 | // Send Tool invocation request
315 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
316 | if err != nil {
317 | t.Fatalf("unable to create request: %s", err)
318 | }
319 | req.Header.Add("Content-type", "application/json")
320 | for k, v := range tc.requestHeader {
321 | req.Header.Add(k, v)
322 | }
323 | resp, err := http.DefaultClient.Do(req)
324 | if err != nil {
325 | t.Fatalf("unable to send request: %s", err)
326 | }
327 | defer resp.Body.Close()
328 |
329 | if resp.StatusCode != http.StatusOK {
330 | if tc.isErr {
331 | return
332 | }
333 | bodyBytes, _ := io.ReadAll(resp.Body)
334 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
335 | }
336 |
337 | // Check response body
338 | var body map[string]interface{}
339 | err = json.NewDecoder(resp.Body).Decode(&body)
340 | if err != nil {
341 | t.Fatalf("error parsing response body")
342 | }
343 |
344 | got, ok := body["result"].(string)
345 | if !ok {
346 | t.Fatalf("unable to find result in response body")
347 | }
348 |
349 | if got != tc.want {
350 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
351 | }
352 | })
353 | }
354 | }
355 | func runToolAggregateInvokeTest(t *testing.T, aggregate1Want string, aggregateManyWant string) {
356 | // Test tool invoke endpoint
357 | invokeTcs := []struct {
358 | name string
359 | api string
360 | requestHeader map[string]string
361 | requestBody io.Reader
362 | want string
363 | isErr bool
364 | }{
365 | {
366 | name: "invoke my-aggregate-tool",
367 | api: "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
368 | requestHeader: map[string]string{},
369 | requestBody: bytes.NewBuffer([]byte(`{ "name": "Jane" }`)),
370 | want: aggregate1Want,
371 | isErr: false,
372 | },
373 | {
374 | name: "invoke my-aggregate-tool",
375 | api: "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
376 | requestHeader: map[string]string{},
377 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
378 | want: aggregateManyWant,
379 | isErr: false,
380 | },
381 | {
382 | name: "invoke my-read-only-aggregate-tool",
383 | api: "http://127.0.0.1:5000/api/tool/my-read-only-aggregate-tool/invoke",
384 | requestHeader: map[string]string{},
385 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
386 | want: "",
387 | isErr: true,
388 | },
389 | {
390 | name: "invoke my-read-write-aggregate-tool",
391 | api: "http://127.0.0.1:5000/api/tool/my-read-write-aggregate-tool/invoke",
392 | requestHeader: map[string]string{},
393 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
394 | want: "[]",
395 | isErr: false,
396 | },
397 | }
398 |
399 | for _, tc := range invokeTcs {
400 |
401 | t.Run(tc.name, func(t *testing.T) {
402 | // Send Tool invocation request
403 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
404 | if err != nil {
405 | t.Fatalf("unable to create request: %s", err)
406 | }
407 | req.Header.Add("Content-type", "application/json")
408 | for k, v := range tc.requestHeader {
409 | req.Header.Add(k, v)
410 | }
411 | resp, err := http.DefaultClient.Do(req)
412 | if err != nil {
413 | t.Fatalf("unable to send request: %s", err)
414 | }
415 | defer resp.Body.Close()
416 |
417 | if resp.StatusCode != http.StatusOK {
418 | if tc.isErr {
419 | return
420 | }
421 | bodyBytes, _ := io.ReadAll(resp.Body)
422 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
423 | }
424 |
425 | // Check response body
426 | var body map[string]interface{}
427 | err = json.NewDecoder(resp.Body).Decode(&body)
428 | if err != nil {
429 | t.Fatalf("error parsing response body")
430 | }
431 |
432 | got, ok := body["result"].(string)
433 | if !ok {
434 | t.Fatalf("unable to find result in response body")
435 | }
436 |
437 | if got != tc.want {
438 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
439 | }
440 | })
441 | }
442 | }
443 |
444 | func setupMongoDB(t *testing.T, ctx context.Context, database *mongo.Database) func(*testing.T) {
445 | collectionName := "test_collection"
446 |
447 | documents := []map[string]any{
448 | {"_id": 1, "id": 1, "name": "Alice", "email": ServiceAccountEmail},
449 | {"_id": 1, "id": 2, "name": "FakeAlice", "email": "[email protected]"},
450 | {"_id": 2, "id": 2, "name": "Jane"},
451 | {"_id": 3, "id": 3, "name": "Sid"},
452 | {"_id": 4, "id": 4, "name": nil},
453 | {"_id": 5, "id": 3, "name": "Alice", "email": "[email protected]"},
454 | {"_id": 6, "id": 100, "name": "ToBeDeleted", "email": "[email protected]"},
455 | {"_id": 7, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
456 | {"_id": 8, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
457 | {"_id": 9, "id": 300, "name": "ToBeUpdatedToBob", "email": "[email protected]"},
458 | {"_id": 10, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
459 | {"_id": 11, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
460 | {"_id": 12, "id": 500, "name": "ToBeAggregated", "email": "[email protected]"},
461 | {"_id": 13, "id": 501, "name": "ToBeAggregated", "email": "[email protected]"},
462 | }
463 | for _, doc := range documents {
464 | _, err := database.Collection(collectionName).InsertOne(ctx, doc)
465 | if err != nil {
466 | t.Fatalf("unable to insert test data: %s", err)
467 | }
468 | }
469 |
470 | return func(t *testing.T) {
471 | // tear down test
472 | err := database.Collection(collectionName).Drop(ctx)
473 | if err != nil {
474 | t.Errorf("Teardown failed: %s", err)
475 | }
476 | }
477 |
478 | }
479 |
480 | func getMongoDBToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
481 | toolsFile := map[string]any{
482 | "sources": map[string]any{
483 | "my-instance": sourceConfig,
484 | },
485 | "authServices": map[string]any{
486 | "my-google-auth": map[string]any{
487 | "kind": "google",
488 | "clientId": tests.ClientId,
489 | },
490 | },
491 | "tools": map[string]any{
492 | "my-simple-tool": map[string]any{
493 | "kind": "mongodb-find-one",
494 | "source": "my-instance",
495 | "description": "Simple tool to test end to end functionality.",
496 | "collection": "test_collection",
497 | "filterPayload": `{ "_id" : 3 }`,
498 | "filterParams": []any{},
499 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
500 | "database": MongoDbDatabase,
501 | "limit": 1,
502 | "sort": `{ "id": 1 }`,
503 | },
504 | "my-tool": map[string]any{
505 | "kind": toolKind,
506 | "source": "my-instance",
507 | "description": "Tool to test invocation with params.",
508 | "authRequired": []string{},
509 | "collection": "test_collection",
510 | "filterPayload": `{ "id" : {{ .id }}, "name" : {{json .name }} }`,
511 | "filterParams": []map[string]any{
512 | {
513 | "name": "id",
514 | "type": "integer",
515 | "description": "user id",
516 | },
517 | {
518 | "name": "name",
519 | "type": "string",
520 | "description": "user name",
521 | },
522 | },
523 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
524 | "database": MongoDbDatabase,
525 | },
526 | "my-tool-by-id": map[string]any{
527 | "kind": toolKind,
528 | "source": "my-instance",
529 | "description": "Tool to test invocation with params.",
530 | "authRequired": []string{},
531 | "collection": "test_collection",
532 | "filterPayload": `{ "id" : {{ .id }} }`,
533 | "filterParams": []map[string]any{
534 | {
535 | "name": "id",
536 | "type": "integer",
537 | "description": "user id",
538 | },
539 | },
540 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
541 | "database": MongoDbDatabase,
542 | },
543 | "my-tool-by-name": map[string]any{
544 | "kind": toolKind,
545 | "source": "my-instance",
546 | "description": "Tool to test invocation with params.",
547 | "authRequired": []string{},
548 | "collection": "test_collection",
549 | "filterPayload": `{ "name" : {{ .name }} }`,
550 | "filterParams": []map[string]any{
551 | {
552 | "name": "name",
553 | "type": "string",
554 | "description": "user name",
555 | "required": false,
556 | },
557 | },
558 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
559 | "database": MongoDbDatabase,
560 | },
561 | "my-array-tool": map[string]any{
562 | "kind": toolKind,
563 | "source": "my-instance",
564 | "description": "Tool to test invocation with array.",
565 | "authRequired": []string{},
566 | "collection": "test_collection",
567 | "filterPayload": `{ "name": { "$in": {{json .nameArray}} }, "_id": 5 })`,
568 | "filterParams": []map[string]any{
569 | {
570 | "name": "nameArray",
571 | "type": "array",
572 | "description": "user names",
573 | "items": map[string]any{
574 | "name": "username",
575 | "type": "string",
576 | "description": "string item"},
577 | },
578 | },
579 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
580 | "database": MongoDbDatabase,
581 | },
582 | "my-auth-tool": map[string]any{
583 | "kind": toolKind,
584 | "source": "my-instance",
585 | "description": "Tool to test authenticated parameters.",
586 | "authRequired": []string{},
587 | "collection": "test_collection",
588 | "filterPayload": `{ "email" : {{json .email }} }`,
589 | "filterParams": []map[string]any{
590 | {
591 | "name": "email",
592 | "type": "string",
593 | "description": "user email",
594 | "authServices": []map[string]string{
595 | {
596 | "name": "my-google-auth",
597 | "field": "email",
598 | },
599 | },
600 | },
601 | },
602 | "projectPayload": `{ "_id": 0, "name" : 1 }`,
603 | "database": MongoDbDatabase,
604 | },
605 | "my-auth-required-tool": map[string]any{
606 | "kind": toolKind,
607 | "source": "my-instance",
608 | "description": "Tool to test auth required invocation.",
609 | "authRequired": []string{
610 | "my-google-auth",
611 | },
612 | "collection": "test_collection",
613 | "filterPayload": `{ "_id": 3, "id": 3 }`,
614 | "filterParams": []any{},
615 | "database": MongoDbDatabase,
616 | },
617 | "my-fail-tool": map[string]any{
618 | "kind": toolKind,
619 | "source": "my-instance",
620 | "description": "Tool to test statement with incorrect syntax.",
621 | "authRequired": []string{},
622 | "collection": "test_collection",
623 | "filterPayload": `{ "id" ; 1 }"}`,
624 | "filterParams": []any{},
625 | "database": MongoDbDatabase,
626 | },
627 | "my-delete-one-tool": map[string]any{
628 | "kind": "mongodb-delete-one",
629 | "source": "my-instance",
630 | "description": "Tool to test deleting an entry.",
631 | "authRequired": []string{},
632 | "collection": "test_collection",
633 | "filterPayload": `{ "id" : 100 }"}`,
634 | "filterParams": []any{},
635 | "database": MongoDbDatabase,
636 | },
637 | "my-delete-many-tool": map[string]any{
638 | "kind": "mongodb-delete-many",
639 | "source": "my-instance",
640 | "description": "Tool to test deleting multiple entries.",
641 | "authRequired": []string{},
642 | "collection": "test_collection",
643 | "filterPayload": `{ "id" : 101 }"}`,
644 | "filterParams": []any{},
645 | "database": MongoDbDatabase,
646 | },
647 | "my-insert-one-tool": map[string]any{
648 | "kind": "mongodb-insert-one",
649 | "source": "my-instance",
650 | "description": "Tool to test inserting an entry.",
651 | "authRequired": []string{},
652 | "collection": "test_collection",
653 | "canonical": true,
654 | "database": MongoDbDatabase,
655 | },
656 | "my-insert-many-tool": map[string]any{
657 | "kind": "mongodb-insert-many",
658 | "source": "my-instance",
659 | "description": "Tool to test inserting multiple entries.",
660 | "authRequired": []string{},
661 | "collection": "test_collection",
662 | "canonical": true,
663 | "database": MongoDbDatabase,
664 | },
665 | "my-update-one-tool": map[string]any{
666 | "kind": "mongodb-update-one",
667 | "source": "my-instance",
668 | "description": "Tool to test updating an entry.",
669 | "authRequired": []string{},
670 | "collection": "test_collection",
671 | "canonical": true,
672 | "filterPayload": `{ "id" : {{ .id }} }`,
673 | "filterParams": []map[string]any{
674 | {
675 | "name": "id",
676 | "type": "integer",
677 | "description": "id",
678 | },
679 | },
680 | "updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
681 | "updateParams": []map[string]any{
682 | {
683 | "name": "name",
684 | "type": "string",
685 | "description": "user name",
686 | },
687 | },
688 | "database": MongoDbDatabase,
689 | },
690 | "my-update-many-tool": map[string]any{
691 | "kind": "mongodb-update-many",
692 | "source": "my-instance",
693 | "description": "Tool to test updating multiple entries.",
694 | "authRequired": []string{},
695 | "collection": "test_collection",
696 | "canonical": true,
697 | "filterPayload": `{ "id" : {{ .id }} }`,
698 | "filterParams": []map[string]any{
699 | {
700 | "name": "id",
701 | "type": "integer",
702 | "description": "id",
703 | },
704 | },
705 | "updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
706 | "updateParams": []map[string]any{
707 | {
708 | "name": "name",
709 | "type": "string",
710 | "description": "user name",
711 | },
712 | },
713 | "database": MongoDbDatabase,
714 | },
715 | "my-aggregate-tool": map[string]any{
716 | "kind": "mongodb-aggregate",
717 | "source": "my-instance",
718 | "description": "Tool to test an aggregation.",
719 | "authRequired": []string{},
720 | "collection": "test_collection",
721 | "canonical": true,
722 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$project" : { "id" : 1, "_id" : 0 }}]`,
723 | "pipelineParams": []map[string]any{
724 | {
725 | "name": "name",
726 | "type": "string",
727 | "description": "user name",
728 | },
729 | },
730 | "database": MongoDbDatabase,
731 | },
732 | "my-read-only-aggregate-tool": map[string]any{
733 | "kind": "mongodb-aggregate",
734 | "source": "my-instance",
735 | "description": "Tool to test an aggregation.",
736 | "authRequired": []string{},
737 | "collection": "test_collection",
738 | "canonical": true,
739 | "readOnly": true,
740 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
741 | "pipelineParams": []map[string]any{
742 | {
743 | "name": "name",
744 | "type": "string",
745 | "description": "user name",
746 | },
747 | },
748 | "database": MongoDbDatabase,
749 | },
750 | "my-read-write-aggregate-tool": map[string]any{
751 | "kind": "mongodb-aggregate",
752 | "source": "my-instance",
753 | "description": "Tool to test an aggregation.",
754 | "authRequired": []string{},
755 | "collection": "test_collection",
756 | "canonical": true,
757 | "readOnly": false,
758 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
759 | "pipelineParams": []map[string]any{
760 | {
761 | "name": "name",
762 | "type": "string",
763 | "description": "user name",
764 | },
765 | },
766 | "database": MongoDbDatabase,
767 | },
768 | },
769 | }
770 |
771 | return toolsFile
772 |
773 | }
774 |
```
--------------------------------------------------------------------------------
/internal/tools/spanner/spannerlisttables/spannerlisttables.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package spannerlisttables
16 |
17 | import (
18 | "context"
19 | "fmt"
20 | "strings"
21 |
22 | "cloud.google.com/go/spanner"
23 | yaml "github.com/goccy/go-yaml"
24 | "github.com/googleapis/genai-toolbox/internal/sources"
25 | spannerdb "github.com/googleapis/genai-toolbox/internal/sources/spanner"
26 | "github.com/googleapis/genai-toolbox/internal/tools"
27 | "google.golang.org/api/iterator"
28 | )
29 |
30 | const kind string = "spanner-list-tables"
31 |
32 | func init() {
33 | if !tools.Register(kind, newConfig) {
34 | panic(fmt.Sprintf("tool kind %q already registered", kind))
35 | }
36 | }
37 |
38 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
39 | actual := Config{Name: name}
40 | if err := decoder.DecodeContext(ctx, &actual); err != nil {
41 | return nil, err
42 | }
43 | return actual, nil
44 | }
45 |
46 | type compatibleSource interface {
47 | SpannerClient() *spanner.Client
48 | DatabaseDialect() string
49 | }
50 |
51 | // validate compatible sources are still compatible
52 | var _ compatibleSource = &spannerdb.Source{}
53 |
54 | var compatibleSources = [...]string{spannerdb.SourceKind}
55 |
56 | type Config struct {
57 | Name string `yaml:"name" validate:"required"`
58 | Kind string `yaml:"kind" validate:"required"`
59 | Source string `yaml:"source" validate:"required"`
60 | Description string `yaml:"description"`
61 | AuthRequired []string `yaml:"authRequired"`
62 | }
63 |
64 | // validate interface
65 | var _ tools.ToolConfig = Config{}
66 |
67 | func (cfg Config) ToolConfigKind() string {
68 | return kind
69 | }
70 |
71 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
72 | // verify source exists
73 | rawS, ok := srcs[cfg.Source]
74 | if !ok {
75 | return nil, fmt.Errorf("no source named %q configured", cfg.Source)
76 | }
77 |
78 | // verify the source is compatible
79 | s, ok := rawS.(compatibleSource)
80 | if !ok {
81 | return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
82 | }
83 |
84 | // Define parameters for the tool
85 | allParameters := tools.Parameters{
86 | tools.NewStringParameterWithDefault(
87 | "table_names",
88 | "",
89 | "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed.",
90 | ),
91 | tools.NewStringParameterWithDefault(
92 | "output_format",
93 | "detailed",
94 | "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema.",
95 | ),
96 | }
97 |
98 | description := cfg.Description
99 | if description == "" {
100 | description = "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables. Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
101 | }
102 | mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters)
103 |
104 | // finish tool setup
105 | t := Tool{
106 | Name: cfg.Name,
107 | Kind: kind,
108 | AllParams: allParameters,
109 | AuthRequired: cfg.AuthRequired,
110 | Client: s.SpannerClient(),
111 | dialect: s.DatabaseDialect(),
112 | manifest: tools.Manifest{Description: description, Parameters: allParameters.Manifest(), AuthRequired: cfg.AuthRequired},
113 | mcpManifest: mcpManifest,
114 | }
115 | return t, nil
116 | }
117 |
118 | // validate interface
119 | var _ tools.Tool = Tool{}
120 |
121 | type Tool struct {
122 | Name string `yaml:"name"`
123 | Kind string `yaml:"kind"`
124 | AuthRequired []string `yaml:"authRequired"`
125 | AllParams tools.Parameters `yaml:"allParams"`
126 | Client *spanner.Client
127 | dialect string
128 | manifest tools.Manifest
129 | mcpManifest tools.McpManifest
130 | }
131 |
132 | // processRows iterates over the spanner.RowIterator and converts each row to a map[string]any.
133 | func processRows(iter *spanner.RowIterator) ([]any, error) {
134 | var out []any
135 | defer iter.Stop()
136 |
137 | for {
138 | row, err := iter.Next()
139 | if err == iterator.Done {
140 | break
141 | }
142 | if err != nil {
143 | return nil, fmt.Errorf("unable to parse row: %w", err)
144 | }
145 |
146 | vMap := make(map[string]any)
147 | cols := row.ColumnNames()
148 | for i, c := range cols {
149 | vMap[c] = row.ColumnValue(i)
150 | }
151 | out = append(out, vMap)
152 | }
153 | return out, nil
154 | }
155 |
156 | func (t Tool) getStatement() string {
157 | switch strings.ToLower(t.dialect) {
158 | case "postgresql":
159 | return postgresqlStatement
160 | case "googlesql":
161 | return googleSQLStatement
162 | default:
163 | // Default to GoogleSQL
164 | return googleSQLStatement
165 | }
166 | }
167 |
168 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
169 | paramsMap := params.AsMap()
170 |
171 | // Get the appropriate SQL statement based on dialect
172 | statement := t.getStatement()
173 |
174 | // Prepare parameters based on dialect
175 | var stmtParams map[string]interface{}
176 |
177 | tableNames, _ := paramsMap["table_names"].(string)
178 | outputFormat, _ := paramsMap["output_format"].(string)
179 | if outputFormat == "" {
180 | outputFormat = "detailed"
181 | }
182 |
183 | switch strings.ToLower(t.dialect) {
184 | case "postgresql":
185 | // PostgreSQL uses positional parameters ($1, $2)
186 | stmtParams = map[string]interface{}{
187 | "p1": tableNames,
188 | "p2": outputFormat,
189 | }
190 |
191 | case "googlesql":
192 | // GoogleSQL uses named parameters (@table_names, @output_format)
193 | stmtParams = map[string]interface{}{
194 | "table_names": tableNames,
195 | "output_format": outputFormat,
196 | }
197 | default:
198 | return nil, fmt.Errorf("unsupported dialect: %s", t.dialect)
199 | }
200 |
201 | stmt := spanner.Statement{
202 | SQL: statement,
203 | Params: stmtParams,
204 | }
205 |
206 | // Execute the query (read-only)
207 | iter := t.Client.Single().Query(ctx, stmt)
208 | results, err := processRows(iter)
209 | if err != nil {
210 | return nil, fmt.Errorf("unable to execute query: %w", err)
211 | }
212 |
213 | return results, nil
214 | }
215 |
216 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
217 | return tools.ParseParams(t.AllParams, data, claims)
218 | }
219 |
220 | func (t Tool) Manifest() tools.Manifest {
221 | return t.manifest
222 | }
223 |
224 | func (t Tool) McpManifest() tools.McpManifest {
225 | return t.mcpManifest
226 | }
227 |
228 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
229 | return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
230 | }
231 |
232 | func (t Tool) RequiresClientAuthorization() bool {
233 | return false
234 | }
235 |
236 | // PostgreSQL statement for listing tables
237 | const postgresqlStatement = `
238 | WITH table_info_cte AS (
239 | SELECT
240 | T.TABLE_SCHEMA,
241 | T.TABLE_NAME,
242 | T.TABLE_TYPE,
243 | T.PARENT_TABLE_NAME,
244 | T.ON_DELETE_ACTION
245 | FROM INFORMATION_SCHEMA.TABLES AS T
246 | WHERE
247 | T.TABLE_SCHEMA = 'public'
248 | AND T.TABLE_TYPE = 'BASE TABLE'
249 | AND (
250 | NULLIF(TRIM($1), '') IS NULL OR
251 | T.TABLE_NAME IN (
252 | SELECT table_name
253 | FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
254 | )
255 | ),
256 |
257 | columns_info_cte AS (
258 | SELECT
259 | C.TABLE_SCHEMA,
260 | C.TABLE_NAME,
261 | ARRAY_AGG(
262 | CONCAT(
263 | '{',
264 | '"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
265 | '"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
266 | '"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
267 | '"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
268 | '"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
269 | '}'
270 | ) ORDER BY C.ORDINAL_POSITION
271 | ) AS columns_json_array_elements
272 | FROM INFORMATION_SCHEMA.COLUMNS AS C
273 | WHERE C.TABLE_SCHEMA = 'public'
274 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
275 | GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
276 | ),
277 |
278 | constraint_columns_agg_cte AS (
279 | SELECT
280 | CONSTRAINT_CATALOG,
281 | CONSTRAINT_SCHEMA,
282 | CONSTRAINT_NAME,
283 | ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
284 | FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
285 | WHERE CONSTRAINT_SCHEMA = 'public'
286 | GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
287 | ),
288 |
289 | constraints_info_cte AS (
290 | SELECT
291 | TC.TABLE_SCHEMA,
292 | TC.TABLE_NAME,
293 | ARRAY_AGG(
294 | CONCAT(
295 | '{',
296 | '"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
297 | '"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
298 | '"constraint_definition":',
299 | CASE TC.CONSTRAINT_TYPE
300 | WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
301 | WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
302 | WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
303 | WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
304 | COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
305 | ' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
306 | ELSE 'null'
307 | END, ',',
308 | '"constraint_columns":["', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"],',
309 | '"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
310 | '"foreign_key_referenced_columns":["', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"]',
311 | '}'
312 | ) ORDER BY TC.CONSTRAINT_NAME
313 | ) AS constraints_json_array_elements
314 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
315 | LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
316 | ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
317 | LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
318 | ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
319 | LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
320 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
321 | LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
322 | ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
323 | LEFT JOIN constraint_columns_agg_cte AS KeyCols
324 | ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
325 | LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
326 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
327 | WHERE TC.TABLE_SCHEMA = 'public'
328 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
329 | GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
330 | ),
331 |
332 | index_key_columns_agg_cte AS (
333 | SELECT
334 | TABLE_CATALOG,
335 | TABLE_SCHEMA,
336 | TABLE_NAME,
337 | INDEX_NAME,
338 | ARRAY_AGG(
339 | CONCAT(
340 | '{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
341 | '"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
342 | ) ORDER BY ORDINAL_POSITION
343 | ) AS key_column_json_details
344 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
345 | WHERE ORDINAL_POSITION IS NOT NULL
346 | AND TABLE_SCHEMA = 'public'
347 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
348 | ),
349 |
350 | index_storing_columns_agg_cte AS (
351 | SELECT
352 | TABLE_CATALOG,
353 | TABLE_SCHEMA,
354 | TABLE_NAME,
355 | INDEX_NAME,
356 | ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
357 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
358 | WHERE ORDINAL_POSITION IS NULL
359 | AND TABLE_SCHEMA = 'public'
360 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
361 | ),
362 |
363 | indexes_info_cte AS (
364 | SELECT
365 | I.TABLE_SCHEMA,
366 | I.TABLE_NAME,
367 | ARRAY_AGG(
368 | CONCAT(
369 | '{',
370 | '"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
371 | '"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
372 | '"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
373 | '"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
374 | '"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
375 | '"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
376 | '"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
377 | '}'
378 | ) ORDER BY I.INDEX_NAME
379 | ) AS indexes_json_array_elements
380 | FROM INFORMATION_SCHEMA.INDEXES AS I
381 | LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
382 | ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
383 | LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
384 | ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
385 | AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
386 | WHERE I.TABLE_SCHEMA = 'public'
387 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
388 | GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
389 | )
390 |
391 | SELECT
392 | TI.TABLE_SCHEMA AS schema_name,
393 | TI.TABLE_NAME AS object_name,
394 | CASE
395 | WHEN $2 = 'simple' THEN
396 | -- IF format is 'simple', return basic JSON
397 | CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
398 | ELSE
399 | CONCAT(
400 | '{',
401 | '"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
402 | '"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
403 | '"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
404 | '"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
405 | '"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
406 | '"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
407 | '}'
408 | )
409 | END AS object_details
410 | FROM table_info_cte AS TI
411 | LEFT JOIN columns_info_cte AS CI
412 | ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
413 | LEFT JOIN constraints_info_cte AS CONSI
414 | ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
415 | LEFT JOIN indexes_info_cte AS II
416 | ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
417 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
418 |
419 | // GoogleSQL statement for listing tables
420 | const googleSQLStatement = `
421 | WITH FilterTableNames AS (
422 | SELECT DISTINCT TRIM(name) AS TABLE_NAME
423 | FROM UNNEST(IF(@table_names = '' OR @table_names IS NULL, ['%'], SPLIT(@table_names, ','))) AS name
424 | ),
425 |
426 | -- 1. Table Information
427 | table_info_cte AS (
428 | SELECT
429 | T.TABLE_SCHEMA,
430 | T.TABLE_NAME,
431 | T.TABLE_TYPE,
432 | T.PARENT_TABLE_NAME, -- For interleaved tables
433 | T.ON_DELETE_ACTION -- For interleaved tables
434 | FROM INFORMATION_SCHEMA.TABLES AS T
435 | WHERE
436 | T.TABLE_SCHEMA = ''
437 | AND T.TABLE_TYPE = 'BASE TABLE'
438 | AND (EXISTS (SELECT 1 FROM FilterTableNames WHERE FilterTableNames.TABLE_NAME = '%') OR T.TABLE_NAME IN (SELECT TABLE_NAME FROM FilterTableNames))
439 | ),
440 |
441 | -- 2. Column Information (with JSON string for each column)
442 | columns_info_cte AS (
443 | SELECT
444 | C.TABLE_SCHEMA,
445 | C.TABLE_NAME,
446 | ARRAY_AGG(
447 | CONCAT(
448 | '{',
449 | '"column_name":"', IFNULL(C.COLUMN_NAME, ''), '",',
450 | '"data_type":"', IFNULL(C.SPANNER_TYPE, ''), '",',
451 | '"ordinal_position":', CAST(C.ORDINAL_POSITION AS STRING), ',',
452 | '"is_not_nullable":', IF(C.IS_NULLABLE = 'NO', 'true', 'false'), ',',
453 | '"column_default":', IF(C.COLUMN_DEFAULT IS NULL, 'null', CONCAT('"', C.COLUMN_DEFAULT, '"')),
454 | '}'
455 | ) ORDER BY C.ORDINAL_POSITION
456 | ) AS columns_json_array_elements
457 | FROM INFORMATION_SCHEMA.COLUMNS AS C
458 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
459 | GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
460 | ),
461 |
462 | -- Helper CTE for aggregating constraint columns
463 | constraint_columns_agg_cte AS (
464 | SELECT
465 | CONSTRAINT_CATALOG,
466 | CONSTRAINT_SCHEMA,
467 | CONSTRAINT_NAME,
468 | ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
469 | FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
470 | GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
471 | ),
472 |
473 | -- 3. Constraint Information (with JSON string for each constraint)
474 | constraints_info_cte AS (
475 | SELECT
476 | TC.TABLE_SCHEMA,
477 | TC.TABLE_NAME,
478 | ARRAY_AGG(
479 | CONCAT(
480 | '{',
481 | '"constraint_name":"', IFNULL(TC.CONSTRAINT_NAME, ''), '",',
482 | '"constraint_type":"', IFNULL(TC.CONSTRAINT_TYPE, ''), '",',
483 | '"constraint_definition":',
484 | CASE TC.CONSTRAINT_TYPE
485 | WHEN 'CHECK' THEN IF(CC.CHECK_CLAUSE IS NULL, 'null', CONCAT('"', CC.CHECK_CLAUSE, '"'))
486 | WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
487 | WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
488 | WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ') REFERENCES ',
489 | IFNULL(RefKeyTable.TABLE_NAME, ''),
490 | ' (', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ', '), ')', '"')
491 | ELSE 'null'
492 | END, ',',
493 | '"constraint_columns":["', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ','), '"],',
494 | '"foreign_key_referenced_table":', IF(RefKeyTable.TABLE_NAME IS NULL, 'null', CONCAT('"', RefKeyTable.TABLE_NAME, '"')), ',',
495 | '"foreign_key_referenced_columns":["', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ','), '"]',
496 | '}'
497 | ) ORDER BY TC.CONSTRAINT_NAME
498 | ) AS constraints_json_array_elements
499 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
500 | LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
501 | ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
502 | LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
503 | ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
504 | LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
505 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
506 | LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
507 | ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
508 | LEFT JOIN constraint_columns_agg_cte AS KeyCols
509 | ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
510 | LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
511 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
512 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
513 | GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
514 | ),
515 |
516 | -- Helper CTE for aggregating index key columns (as JSON strings)
517 | index_key_columns_agg_cte AS (
518 | SELECT
519 | TABLE_CATALOG,
520 | TABLE_SCHEMA,
521 | TABLE_NAME,
522 | INDEX_NAME,
523 | ARRAY_AGG(
524 | CONCAT(
525 | '{"column_name":"', IFNULL(COLUMN_NAME, ''), '",',
526 | '"ordering":"', IFNULL(COLUMN_ORDERING, ''), '"}'
527 | ) ORDER BY ORDINAL_POSITION
528 | ) AS key_column_json_details
529 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
530 | WHERE ORDINAL_POSITION IS NOT NULL -- Key columns
531 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
532 | ),
533 |
534 | -- Helper CTE for aggregating index storing columns (as JSON strings)
535 | index_storing_columns_agg_cte AS (
536 | SELECT
537 | TABLE_CATALOG,
538 | TABLE_SCHEMA,
539 | TABLE_NAME,
540 | INDEX_NAME,
541 | ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
542 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
543 | WHERE ORDINAL_POSITION IS NULL -- Storing columns
544 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
545 | ),
546 |
547 | -- 4. Index Information (with JSON string for each index)
548 | indexes_info_cte AS (
549 | SELECT
550 | I.TABLE_SCHEMA,
551 | I.TABLE_NAME,
552 | ARRAY_AGG(
553 | CONCAT(
554 | '{',
555 | '"index_name":"', IFNULL(I.INDEX_NAME, ''), '",',
556 | '"index_type":"', IFNULL(I.INDEX_TYPE, ''), '",',
557 | '"is_unique":', IF(I.IS_UNIQUE, 'true', 'false'), ',',
558 | '"is_null_filtered":', IF(I.IS_NULL_FILTERED, 'true', 'false'), ',',
559 | '"interleaved_in_table":', IF(I.PARENT_TABLE_NAME IS NULL, 'null', CONCAT('"', I.PARENT_TABLE_NAME, '"')), ',',
560 | '"index_key_columns":[', ARRAY_TO_STRING(COALESCE(KeyIndexCols.key_column_json_details, []), ','), '],',
561 | '"storing_columns":[', ARRAY_TO_STRING(COALESCE(StoringIndexCols.storing_column_json_names, []), ','), ']',
562 | '}'
563 | ) ORDER BY I.INDEX_NAME
564 | ) AS indexes_json_array_elements
565 | FROM INFORMATION_SCHEMA.INDEXES AS I
566 | LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
567 | ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
568 | LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
569 | ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE = 'INDEX'
570 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
571 | GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
572 | )
573 |
574 | -- Final SELECT to build the JSON output
575 | SELECT
576 | TI.TABLE_SCHEMA AS schema_name,
577 | TI.TABLE_NAME AS object_name,
578 | CASE
579 | WHEN @output_format = 'simple' THEN
580 | -- IF format is 'simple', return basic JSON
581 | CONCAT('{"name":"', IFNULL(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
582 | ELSE
583 | CONCAT(
584 | '{',
585 | '"schema_name":"', IFNULL(TI.TABLE_SCHEMA, ''), '",',
586 | '"object_name":"', IFNULL(TI.TABLE_NAME, ''), '",',
587 | '"object_type":"', IFNULL(TI.TABLE_TYPE, ''), '",',
588 | '"columns":[', ARRAY_TO_STRING(COALESCE(CI.columns_json_array_elements, []), ','), '],',
589 | '"constraints":[', ARRAY_TO_STRING(COALESCE(CONSI.constraints_json_array_elements, []), ','), '],',
590 | '"indexes":[', ARRAY_TO_STRING(COALESCE(II.indexes_json_array_elements, []), ','), ']',
591 | '}'
592 | )
593 | END AS object_details
594 | FROM table_info_cte AS TI
595 | LEFT JOIN columns_info_cte AS CI
596 | ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
597 | LEFT JOIN constraints_info_cte AS CONSI
598 | ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
599 | LEFT JOIN indexes_info_cte AS II
600 | ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
601 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
602 |
```
--------------------------------------------------------------------------------
/tests/postgres/postgres_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2024 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package postgres
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "net/url"
25 | "os"
26 | "reflect"
27 | "regexp"
28 | "sort"
29 | "strings"
30 | "sync"
31 | "testing"
32 | "time"
33 |
34 | "github.com/google/go-cmp/cmp"
35 | "github.com/google/uuid"
36 | "github.com/googleapis/genai-toolbox/internal/testutils"
37 | "github.com/googleapis/genai-toolbox/tests"
38 | "github.com/jackc/pgx/v5/pgxpool"
39 | )
40 |
41 | var (
42 | PostgresSourceKind = "postgres"
43 | PostgresToolKind = "postgres-sql"
44 | PostgresListTablesToolKind = "postgres-list-tables"
45 | PostgresListActiveQueriesToolKind = "postgres-list-active-queries"
46 | PostgresListInstalledExtensionsToolKind = "postgres-list-installed-extensions"
47 | PostgresListAvailableExtensionsToolKind = "postgres-list-available-extensions"
48 | PostgresListViewsToolKind = "postgres-list-views"
49 | PostgresDatabase = os.Getenv("POSTGRES_DATABASE")
50 | PostgresHost = os.Getenv("POSTGRES_HOST")
51 | PostgresPort = os.Getenv("POSTGRES_PORT")
52 | PostgresUser = os.Getenv("POSTGRES_USER")
53 | PostgresPass = os.Getenv("POSTGRES_PASS")
54 | )
55 |
56 | func getPostgresVars(t *testing.T) map[string]any {
57 | switch "" {
58 | case PostgresDatabase:
59 | t.Fatal("'POSTGRES_DATABASE' not set")
60 | case PostgresHost:
61 | t.Fatal("'POSTGRES_HOST' not set")
62 | case PostgresPort:
63 | t.Fatal("'POSTGRES_PORT' not set")
64 | case PostgresUser:
65 | t.Fatal("'POSTGRES_USER' not set")
66 | case PostgresPass:
67 | t.Fatal("'POSTGRES_PASS' not set")
68 | }
69 |
70 | return map[string]any{
71 | "kind": PostgresSourceKind,
72 | "host": PostgresHost,
73 | "port": PostgresPort,
74 | "database": PostgresDatabase,
75 | "user": PostgresUser,
76 | "password": PostgresPass,
77 | }
78 | }
79 |
80 | func addPrebuiltToolConfig(t *testing.T, config map[string]any) map[string]any {
81 | tools, ok := config["tools"].(map[string]any)
82 | if !ok {
83 | t.Fatalf("unable to get tools from config")
84 | }
85 | tools["list_tables"] = map[string]any{
86 | "kind": PostgresListTablesToolKind,
87 | "source": "my-instance",
88 | "description": "Lists tables in the database.",
89 | }
90 | tools["list_active_queries"] = map[string]any{
91 | "kind": PostgresListActiveQueriesToolKind,
92 | "source": "my-instance",
93 | "description": "Lists active queries in the database.",
94 | }
95 |
96 | tools["list_installed_extensions"] = map[string]any{
97 | "kind": PostgresListInstalledExtensionsToolKind,
98 | "source": "my-instance",
99 | "description": "Lists installed extensions in the database.",
100 | }
101 |
102 | tools["list_available_extensions"] = map[string]any{
103 | "kind": PostgresListAvailableExtensionsToolKind,
104 | "source": "my-instance",
105 | "description": "Lists available extensions in the database.",
106 | }
107 |
108 | tools["list_views"] = map[string]any{
109 | "kind": PostgresListViewsToolKind,
110 | "source": "my-instance",
111 | }
112 |
113 | config["tools"] = tools
114 | return config
115 | }
116 |
117 | // Copied over from postgres.go
118 | func initPostgresConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) {
119 | // urlExample := "postgres:dd//username:password@localhost:5432/database_name"
120 | url := &url.URL{
121 | Scheme: "postgres",
122 | User: url.UserPassword(user, pass),
123 | Host: fmt.Sprintf("%s:%s", host, port),
124 | Path: dbname,
125 | }
126 | pool, err := pgxpool.New(context.Background(), url.String())
127 | if err != nil {
128 | return nil, fmt.Errorf("Unable to create connection pool: %w", err)
129 | }
130 |
131 | return pool, nil
132 | }
133 |
134 | func TestPostgres(t *testing.T) {
135 | sourceConfig := getPostgresVars(t)
136 | ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
137 | defer cancel()
138 |
139 | var args []string
140 |
141 | pool, err := initPostgresConnectionPool(PostgresHost, PostgresPort, PostgresUser, PostgresPass, PostgresDatabase)
142 | if err != nil {
143 | t.Fatalf("unable to create postgres connection pool: %s", err)
144 | }
145 |
146 | // cleanup test environment
147 | tests.CleanupPostgresTables(t, ctx, pool)
148 |
149 | // create table name with UUID
150 | tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
151 | tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
152 | tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
153 |
154 | // set up data for param tool
155 | createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
156 | teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
157 | defer teardownTable1(t)
158 |
159 | // set up data for auth tool
160 | createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
161 | teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
162 | defer teardownTable2(t)
163 |
164 | // Write config into a file and pass it to command
165 | toolsFile := tests.GetToolsConfig(sourceConfig, PostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
166 | toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
167 | tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
168 | toolsFile = tests.AddTemplateParamConfig(t, toolsFile, PostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
169 |
170 | toolsFile = addPrebuiltToolConfig(t, toolsFile)
171 |
172 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
173 | if err != nil {
174 | t.Fatalf("command initialization returned an error: %s", err)
175 | }
176 | defer cleanup()
177 |
178 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
179 | defer cancel()
180 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
181 | if err != nil {
182 | t.Logf("toolbox command logs: \n%s", out)
183 | t.Fatalf("toolbox didn't start successfully: %s", err)
184 | }
185 |
186 | // Get configs for tests
187 | select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want := tests.GetPostgresWants()
188 |
189 | // Run tests
190 | tests.RunToolGetTest(t)
191 | tests.RunToolInvokeTest(t, select1Want)
192 | tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
193 | tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want)
194 | tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
195 |
196 | // Run specific Postgres tool tests
197 | runPostgresListTablesTest(t, tableNameParam, tableNameAuth)
198 | runPostgresListViewsTest(t, ctx, pool, tableNameParam)
199 | runPostgresListActiveQueriesTest(t, ctx, pool)
200 | runPostgresListAvailableExtensionsTest(t)
201 | runPostgresListInstalledExtensionsTest(t)
202 | }
203 |
204 | func runPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth string) {
205 | // TableNameParam columns to construct want
206 | paramTableColumns := fmt.Sprintf(`[
207 | {"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null},
208 | {"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null}
209 | ]`, tableNameParam)
210 |
211 | // TableNameAuth columns to construct want
212 | authTableColumns := fmt.Sprintf(`[
213 | {"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null},
214 | {"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null},
215 | {"data_type": "text", "column_name": "email", "column_default": null, "is_not_nullable": false, "ordinal_position": 3, "column_comment": null}
216 | ]`, tableNameAuth)
217 |
218 | const (
219 | // Template to construct detailed output want
220 | detailedObjectTemplate = `{
221 | "object_name": "%[1]s", "schema_name": "public",
222 | "object_details": {
223 | "owner": "%[3]s", "comment": null,
224 | "indexes": [{"is_primary": true, "is_unique": true, "index_name": "%[1]s_pkey", "index_method": "btree", "index_columns": ["id"], "index_definition": "CREATE UNIQUE INDEX %[1]s_pkey ON public.%[1]s USING btree (id)"}],
225 | "triggers": [], "columns": %[2]s, "object_name": "%[1]s", "object_type": "TABLE", "schema_name": "public",
226 | "constraints": [{"constraint_name": "%[1]s_pkey", "constraint_type": "PRIMARY KEY", "constraint_columns": ["id"], "constraint_definition": "PRIMARY KEY (id)", "foreign_key_referenced_table": null, "foreign_key_referenced_columns": null}]
227 | }
228 | }`
229 |
230 | // Template to construct simple output want
231 | simpleObjectTemplate = `{"object_name":"%s", "schema_name":"public", "object_details":{"name":"%s"}}`
232 | )
233 |
234 | // Helper to build json for detailed want
235 | getDetailedWant := func(tableName, columnJSON string) string {
236 | return fmt.Sprintf(detailedObjectTemplate, tableName, columnJSON, PostgresUser)
237 | }
238 |
239 | // Helper to build template for simple want
240 | getSimpleWant := func(tableName string) string {
241 | return fmt.Sprintf(simpleObjectTemplate, tableName, tableName)
242 | }
243 |
244 | invokeTcs := []struct {
245 | name string
246 | api string
247 | requestBody io.Reader
248 | wantStatusCode int
249 | want string
250 | isAllTables bool
251 | }{
252 | {
253 | name: "invoke list_tables all tables detailed output",
254 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
255 | requestBody: bytes.NewBuffer([]byte(`{"table_names": ""}`)),
256 | wantStatusCode: http.StatusOK,
257 | want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)),
258 | isAllTables: true,
259 | },
260 | {
261 | name: "invoke list_tables all tables simple output",
262 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
263 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "simple"}`)),
264 | wantStatusCode: http.StatusOK,
265 | want: fmt.Sprintf("[%s,%s]", getSimpleWant(tableNameAuth), getSimpleWant(tableNameParam)),
266 | isAllTables: true,
267 | },
268 | {
269 | name: "invoke list_tables detailed output",
270 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
271 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s"}`, tableNameAuth))),
272 | wantStatusCode: http.StatusOK,
273 | want: fmt.Sprintf("[%s]", getDetailedWant(tableNameAuth, authTableColumns)),
274 | },
275 | {
276 | name: "invoke list_tables simple output",
277 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
278 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s", "output_format": "simple"}`, tableNameAuth))),
279 | wantStatusCode: http.StatusOK,
280 | want: fmt.Sprintf("[%s]", getSimpleWant(tableNameAuth)),
281 | },
282 | {
283 | name: "invoke list_tables with invalid output format",
284 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
285 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "abcd"}`)),
286 | wantStatusCode: http.StatusBadRequest,
287 | },
288 | {
289 | name: "invoke list_tables with malformed table_names parameter",
290 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
291 | requestBody: bytes.NewBuffer([]byte(`{"table_names": 12345, "output_format": "detailed"}`)),
292 | wantStatusCode: http.StatusBadRequest,
293 | },
294 | {
295 | name: "invoke list_tables with multiple table names",
296 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
297 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,%s"}`, tableNameParam, tableNameAuth))),
298 | wantStatusCode: http.StatusOK,
299 | want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)),
300 | },
301 | {
302 | name: "invoke list_tables with non-existent table",
303 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
304 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "non_existent_table"}`)),
305 | wantStatusCode: http.StatusOK,
306 | want: `null`,
307 | },
308 | {
309 | name: "invoke list_tables with one existing and one non-existent table",
310 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
311 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,non_existent_table"}`, tableNameParam))),
312 | wantStatusCode: http.StatusOK,
313 | want: fmt.Sprintf("[%s]", getDetailedWant(tableNameParam, paramTableColumns)),
314 | },
315 | }
316 | for _, tc := range invokeTcs {
317 | t.Run(tc.name, func(t *testing.T) {
318 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
319 | if err != nil {
320 | t.Fatalf("unable to create request: %s", err)
321 | }
322 | req.Header.Add("Content-type", "application/json")
323 | resp, err := http.DefaultClient.Do(req)
324 | if err != nil {
325 | t.Fatalf("unable to send request: %s", err)
326 | }
327 | defer resp.Body.Close()
328 |
329 | if resp.StatusCode != tc.wantStatusCode {
330 | bodyBytes, _ := io.ReadAll(resp.Body)
331 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
332 | }
333 |
334 | if tc.wantStatusCode == http.StatusOK {
335 | var bodyWrapper map[string]json.RawMessage
336 | respBytes, err := io.ReadAll(resp.Body)
337 | if err != nil {
338 | t.Fatalf("error reading response body: %s", err)
339 | }
340 |
341 | if err := json.Unmarshal(respBytes, &bodyWrapper); err != nil {
342 | t.Fatalf("error parsing response wrapper: %s, body: %s", err, string(respBytes))
343 | }
344 |
345 | resultJSON, ok := bodyWrapper["result"]
346 | if !ok {
347 | t.Fatal("unable to find 'result' in response body")
348 | }
349 |
350 | var resultString string
351 | if err := json.Unmarshal(resultJSON, &resultString); err != nil {
352 | t.Fatalf("'result' is not a JSON-encoded string: %s", err)
353 | }
354 |
355 | var got, want []any
356 |
357 | if err := json.Unmarshal([]byte(resultString), &got); err != nil {
358 | t.Fatalf("failed to unmarshal actual result string: %v", err)
359 | }
360 | if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
361 | t.Fatalf("failed to unmarshal expected want string: %v", err)
362 | }
363 |
364 | // Checking only the default public schema where the test tables are created to avoid brittle tests.
365 | if tc.isAllTables {
366 | var filteredGot []any
367 | for _, item := range got {
368 | if tableMap, ok := item.(map[string]interface{}); ok {
369 | if schema, ok := tableMap["schema_name"]; ok && schema == "public" {
370 | filteredGot = append(filteredGot, item)
371 | }
372 | }
373 | }
374 | got = filteredGot
375 | }
376 |
377 | sort.SliceStable(got, func(i, j int) bool {
378 | return fmt.Sprintf("%v", got[i]) < fmt.Sprintf("%v", got[j])
379 | })
380 | sort.SliceStable(want, func(i, j int) bool {
381 | return fmt.Sprintf("%v", want[i]) < fmt.Sprintf("%v", want[j])
382 | })
383 |
384 | if !reflect.DeepEqual(got, want) {
385 | t.Errorf("Unexpected result: got %#v, want: %#v", got, want)
386 | }
387 | }
388 | })
389 | }
390 | }
391 |
392 | func runPostgresListActiveQueriesTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool) {
393 | type queryListDetails struct {
394 | ProcessId any `json:"pid"`
395 | User string `json:"user"`
396 | Datname string `json:"datname"`
397 | ApplicationName string `json:"application_name"`
398 | ClientAddress string `json:"client_addr"`
399 | State string `json:"state"`
400 | WaitEventType string `json:"wait_event_type"`
401 | WaitEvent string `json:"wait_event"`
402 | BackendStart any `json:"backend_start"`
403 | TransactionStart any `json:"xact_start"`
404 | QueryStart any `json:"query_start"`
405 | QueryDuration any `json:"query_duration"`
406 | Query string `json:"query"`
407 | }
408 |
409 | singleQueryWanted := queryListDetails{
410 | ProcessId: any(nil),
411 | User: "",
412 | Datname: "",
413 | ApplicationName: "",
414 | ClientAddress: "",
415 | State: "",
416 | WaitEventType: "",
417 | WaitEvent: "",
418 | BackendStart: any(nil),
419 | TransactionStart: any(nil),
420 | QueryStart: any(nil),
421 | QueryDuration: any(nil),
422 | Query: "SELECT pg_sleep(10);",
423 | }
424 |
425 | invokeTcs := []struct {
426 | name string
427 | requestBody io.Reader
428 | clientSleepSecs int
429 | waitSecsBeforeCheck int
430 | wantStatusCode int
431 | want any
432 | }{
433 | // exclude background monitoring apps such as "wal_uploader"
434 | {
435 | name: "invoke list_active_queries when the system is idle",
436 | requestBody: bytes.NewBufferString(`{"exclude_application_names": "wal_uploader"}`),
437 | clientSleepSecs: 0,
438 | waitSecsBeforeCheck: 0,
439 | wantStatusCode: http.StatusOK,
440 | want: []queryListDetails(nil),
441 | },
442 | {
443 | name: "invoke list_active_queries when there is 1 ongoing but lower than the threshold",
444 | requestBody: bytes.NewBufferString(`{"min_duration": "100 seconds", "exclude_application_names": "wal_uploader"}`),
445 | clientSleepSecs: 1,
446 | waitSecsBeforeCheck: 1,
447 | wantStatusCode: http.StatusOK,
448 | want: []queryListDetails(nil),
449 | },
450 | {
451 | name: "invoke list_active_queries when 1 ongoing query should show up",
452 | requestBody: bytes.NewBufferString(`{"min_duration": "1 seconds", "exclude_application_names": "wal_uploader"}`),
453 | clientSleepSecs: 10,
454 | waitSecsBeforeCheck: 5,
455 | wantStatusCode: http.StatusOK,
456 | want: []queryListDetails{singleQueryWanted},
457 | },
458 | }
459 |
460 | var wg sync.WaitGroup
461 | for _, tc := range invokeTcs {
462 | t.Run(tc.name, func(t *testing.T) {
463 | if tc.clientSleepSecs > 0 {
464 | wg.Add(1)
465 |
466 | go func() {
467 | defer wg.Done()
468 |
469 | err := pool.Ping(ctx)
470 | if err != nil {
471 | t.Errorf("unable to connect to test database: %s", err)
472 | return
473 | }
474 | _, err = pool.Exec(ctx, fmt.Sprintf("SELECT pg_sleep(%d);", tc.clientSleepSecs))
475 | if err != nil {
476 | t.Errorf("Executing 'SELECT pg_sleep' failed: %s", err)
477 | }
478 | }()
479 | }
480 |
481 | if tc.waitSecsBeforeCheck > 0 {
482 | time.Sleep(time.Duration(tc.waitSecsBeforeCheck) * time.Second)
483 | }
484 |
485 | const api = "http://127.0.0.1:5000/api/tool/list_active_queries/invoke"
486 | req, err := http.NewRequest(http.MethodPost, api, tc.requestBody)
487 | if err != nil {
488 | t.Fatalf("unable to create request: %v", err)
489 | }
490 | req.Header.Add("Content-type", "application/json")
491 |
492 | resp, err := http.DefaultClient.Do(req)
493 | if err != nil {
494 | t.Fatalf("unable to send request: %v", err)
495 | }
496 | defer resp.Body.Close()
497 |
498 | if resp.StatusCode != tc.wantStatusCode {
499 | body, _ := io.ReadAll(resp.Body)
500 | t.Fatalf("wrong status code: got %d, want %d, body: %s", resp.StatusCode, tc.wantStatusCode, string(body))
501 | }
502 | if tc.wantStatusCode != http.StatusOK {
503 | return
504 | }
505 |
506 | var bodyWrapper struct {
507 | Result json.RawMessage `json:"result"`
508 | }
509 | if err := json.NewDecoder(resp.Body).Decode(&bodyWrapper); err != nil {
510 | t.Fatalf("error decoding response wrapper: %v", err)
511 | }
512 |
513 | var resultString string
514 | if err := json.Unmarshal(bodyWrapper.Result, &resultString); err != nil {
515 | resultString = string(bodyWrapper.Result)
516 | }
517 |
518 | var got any
519 | var details []queryListDetails
520 | if err := json.Unmarshal([]byte(resultString), &details); err != nil {
521 | t.Fatalf("failed to unmarshal nested ObjectDetails string: %v", err)
522 | }
523 | got = details
524 |
525 | if diff := cmp.Diff(tc.want, got, cmp.Comparer(func(a, b queryListDetails) bool {
526 | return a.Query == b.Query
527 | })); diff != "" {
528 | t.Errorf("Unexpected result: got %#v, want: %#v", got, tc.want)
529 | }
530 | })
531 | }
532 | wg.Wait()
533 | }
534 |
535 | func setUpPostgresViews(t *testing.T, ctx context.Context, pool *pgxpool.Pool, viewName, tableName string) func() {
536 | createView := fmt.Sprintf("CREATE VIEW %s AS SELECT name FROM %s", viewName, tableName)
537 | _, err := pool.Exec(ctx, createView)
538 | if err != nil {
539 | t.Fatalf("failed to create view: %v", err)
540 | }
541 | return func() {
542 | dropView := fmt.Sprintf("DROP VIEW %s", viewName)
543 | _, err := pool.Exec(ctx, dropView)
544 | if err != nil {
545 | t.Fatalf("failed to drop view: %v", err)
546 | }
547 | }
548 | }
549 |
550 | func runPostgresListViewsTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool, tableName string) {
551 | viewName1 := "test_view_1" + strings.ReplaceAll(uuid.New().String(), "-", "")
552 | dropViewfunc1 := setUpPostgresViews(t, ctx, pool, viewName1, tableName)
553 | defer dropViewfunc1()
554 |
555 | invokeTcs := []struct {
556 | name string
557 | requestBody io.Reader
558 | wantStatusCode int
559 | want string
560 | }{
561 | {
562 | name: "invoke list_views with newly created view",
563 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"viewname": "%s"}`, viewName1))),
564 | wantStatusCode: http.StatusOK,
565 | want: fmt.Sprintf(`[{"schemaname":"public","viewname":"%s","viewowner":"postgres"}]`, viewName1),
566 | },
567 | {
568 | name: "invoke list_views with non-existent_view",
569 | requestBody: bytes.NewBuffer([]byte(`{"viewname": "non_existent_view"}`)),
570 | wantStatusCode: http.StatusOK,
571 | want: `null`,
572 | },
573 | }
574 | for _, tc := range invokeTcs {
575 | t.Run(tc.name, func(t *testing.T) {
576 | const api = "http://127.0.0.1:5000/api/tool/list_views/invoke"
577 | req, err := http.NewRequest(http.MethodPost, api, tc.requestBody)
578 | if err != nil {
579 | t.Fatalf("unable to create request: %v", err)
580 | }
581 | req.Header.Add("Content-type", "application/json")
582 | resp, err := http.DefaultClient.Do(req)
583 | if err != nil {
584 | t.Fatalf("unable to send request: %v", err)
585 | }
586 | defer resp.Body.Close()
587 |
588 | if resp.StatusCode != tc.wantStatusCode {
589 | body, _ := io.ReadAll(resp.Body)
590 | t.Fatalf("wrong status code: got %d, want %d, body: %s", resp.StatusCode, tc.wantStatusCode, string(body))
591 | }
592 | if tc.wantStatusCode != http.StatusOK {
593 | return
594 | }
595 |
596 | var bodyWrapper struct {
597 | Result json.RawMessage `json:"result"`
598 | }
599 | if err := json.NewDecoder(resp.Body).Decode(&bodyWrapper); err != nil {
600 | t.Fatalf("error decoding response wrapper: %v", err)
601 | }
602 |
603 | var resultString string
604 | if err := json.Unmarshal(bodyWrapper.Result, &resultString); err != nil {
605 | resultString = string(bodyWrapper.Result)
606 | }
607 |
608 | var got, want any
609 | if err := json.Unmarshal([]byte(resultString), &got); err != nil {
610 | t.Fatalf("failed to unmarshal nested result string: %v", err)
611 | }
612 | if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
613 | t.Fatalf("failed to unmarshal want string: %v", err)
614 | }
615 |
616 | if diff := cmp.Diff(want, got); diff != "" {
617 | t.Errorf("Unexpected result (-want +got):\n%s", diff)
618 | }
619 | })
620 | }
621 | }
622 |
623 | func runPostgresListAvailableExtensionsTest(t *testing.T) {
624 | invokeTcs := []struct {
625 | name string
626 | api string
627 | requestBody io.Reader
628 | wantStatusCode int
629 | }{
630 | {
631 | name: "invoke list_available_extensions output",
632 | api: "http://127.0.0.1:5000/api/tool/list_available_extensions/invoke",
633 | wantStatusCode: http.StatusOK,
634 | requestBody: bytes.NewBuffer([]byte(`{}`)),
635 | },
636 | }
637 | for _, tc := range invokeTcs {
638 | t.Run(tc.name, func(t *testing.T) {
639 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
640 | if err != nil {
641 | t.Fatalf("unable to create request: %s", err)
642 | }
643 | req.Header.Add("Content-type", "application/json")
644 | resp, err := http.DefaultClient.Do(req)
645 | if err != nil {
646 | t.Fatalf("unable to send request: %s", err)
647 | }
648 | defer resp.Body.Close()
649 |
650 | if resp.StatusCode != tc.wantStatusCode {
651 | bodyBytes, _ := io.ReadAll(resp.Body)
652 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
653 | }
654 |
655 | // Intentionally not adding the output check as output depends on the postgres instance used where the the functional test runs.
656 | // Adding the check will make the test flaky.
657 | })
658 | }
659 | }
660 |
661 | func runPostgresListInstalledExtensionsTest(t *testing.T) {
662 | invokeTcs := []struct {
663 | name string
664 | api string
665 | requestBody io.Reader
666 | wantStatusCode int
667 | }{
668 | {
669 | name: "invoke list_installed_extensions output",
670 | api: "http://127.0.0.1:5000/api/tool/list_installed_extensions/invoke",
671 | wantStatusCode: http.StatusOK,
672 | requestBody: bytes.NewBuffer([]byte(`{}`)),
673 | },
674 | }
675 | for _, tc := range invokeTcs {
676 | t.Run(tc.name, func(t *testing.T) {
677 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
678 | if err != nil {
679 | t.Fatalf("unable to create request: %s", err)
680 | }
681 | req.Header.Add("Content-type", "application/json")
682 | resp, err := http.DefaultClient.Do(req)
683 | if err != nil {
684 | t.Fatalf("unable to send request: %s", err)
685 | }
686 | defer resp.Body.Close()
687 |
688 | if resp.StatusCode != tc.wantStatusCode {
689 | bodyBytes, _ := io.ReadAll(resp.Body)
690 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
691 | }
692 |
693 | // Intentionally not adding the output check as output depends on the postgres instance used where the the functional test runs.
694 | // Adding the check will make the test flaky.
695 | })
696 | }
697 | }
698 |
```
--------------------------------------------------------------------------------
/.ci/integration.cloudbuild.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Copyright 2024 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | steps:
16 | - id: "install-dependencies"
17 | name: golang:1
18 | waitFor: ["-"]
19 | env:
20 | - "GOPATH=/gopath"
21 | volumes:
22 | - name: "go"
23 | path: "/gopath"
24 | script: |
25 | go get -d ./...
26 |
27 | - id: "compile-test-binary"
28 | name: golang:1
29 | waitFor: ["install-dependencies"]
30 | env:
31 | - "GOPATH=/gopath"
32 | volumes:
33 | - name: "go"
34 | path: "/gopath"
35 | script: |
36 | go test -c -race -cover \
37 | -coverpkg=./internal/sources/...,./internal/tools/... ./tests/...
38 | chmod +x .ci/test_with_coverage.sh
39 |
40 | - id: "cloud-sql-pg"
41 | name: golang:1
42 | waitFor: ["compile-test-binary"]
43 | entrypoint: /bin/bash
44 | env:
45 | - "GOPATH=/gopath"
46 | - "CLOUD_SQL_POSTGRES_PROJECT=$PROJECT_ID"
47 | - "CLOUD_SQL_POSTGRES_INSTANCE=$_CLOUD_SQL_POSTGRES_INSTANCE"
48 | - "CLOUD_SQL_POSTGRES_DATABASE=$_DATABASE_NAME"
49 | - "CLOUD_SQL_POSTGRES_REGION=$_REGION"
50 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
51 | secretEnv:
52 | ["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
53 | volumes:
54 | - name: "go"
55 | path: "/gopath"
56 | args:
57 | - -c
58 | - |
59 | .ci/test_with_coverage.sh \
60 | "Cloud SQL Postgres" \
61 | cloudsqlpg \
62 | postgressql \
63 | postgresexecutesql
64 |
65 | - id: "alloydb"
66 | name: golang:1
67 | waitFor: ["compile-test-binary"]
68 | entrypoint: /bin/bash
69 | env:
70 | - "GOPATH=/gopath"
71 | - "ALLOYDB_PROJECT=$PROJECT_ID"
72 | - "ALLOYDB_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
73 | - "ALLOYDB_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
74 | - "ALLOYDB_REGION=$_REGION"
75 | secretEnv: ["ALLOYDB_POSTGRES_USER"]
76 | volumes:
77 | - name: "go"
78 | path: "/gopath"
79 | args:
80 | - -c
81 | - |
82 | .ci/test_with_coverage.sh \
83 | "AlloyDB" \
84 | alloydb \
85 | alloydb
86 |
87 | - id: "alloydb-pg"
88 | name: golang:1
89 | waitFor: ["compile-test-binary"]
90 | entrypoint: /bin/bash
91 | env:
92 | - "GOPATH=/gopath"
93 | - "ALLOYDB_POSTGRES_PROJECT=$PROJECT_ID"
94 | - "ALLOYDB_POSTGRES_CLUSTER=$_ALLOYDB_POSTGRES_CLUSTER"
95 | - "ALLOYDB_POSTGRES_INSTANCE=$_ALLOYDB_POSTGRES_INSTANCE"
96 | - "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
97 | - "ALLOYDB_POSTGRES_REGION=$_REGION"
98 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
99 | secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
100 | volumes:
101 | - name: "go"
102 | path: "/gopath"
103 | args:
104 | - -c
105 | - |
106 | .ci/test_with_coverage.sh \
107 | "AlloyDB Postgres" \
108 | alloydbpg \
109 | postgressql \
110 | postgresexecutesql
111 |
112 | - id: "alloydb-ai-nl"
113 | name: golang:1
114 | waitFor: ["compile-test-binary"]
115 | entrypoint: /bin/bash
116 | env:
117 | - "GOPATH=/gopath"
118 | - "ALLOYDB_AI_NL_PROJECT=$PROJECT_ID"
119 | - "ALLOYDB_AI_NL_CLUSTER=$_ALLOYDB_AI_NL_CLUSTER"
120 | - "ALLOYDB_AI_NL_INSTANCE=$_ALLOYDB_AI_NL_INSTANCE"
121 | - "ALLOYDB_AI_NL_DATABASE=$_DATABASE_NAME"
122 | - "ALLOYDB_AI_NL_REGION=$_REGION"
123 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
124 | secretEnv: ["ALLOYDB_AI_NL_USER", "ALLOYDB_AI_NL_PASS", "CLIENT_ID"]
125 | volumes:
126 | - name: "go"
127 | path: "/gopath"
128 | args:
129 | - -c
130 | - |
131 | .ci/test_with_coverage.sh \
132 | "AlloyDB AI NL" \
133 | alloydbainl \
134 | alloydbainl
135 |
136 | - id: "bigtable"
137 | name: golang:1
138 | waitFor: ["compile-test-binary"]
139 | entrypoint: /bin/bash
140 | env:
141 | - "GOPATH=/gopath"
142 | - "BIGTABLE_PROJECT=$PROJECT_ID"
143 | - "BIGTABLE_INSTANCE=$_BIGTABLE_INSTANCE"
144 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
145 | secretEnv: ["CLIENT_ID"]
146 | volumes:
147 | - name: "go"
148 | path: "/gopath"
149 | args:
150 | - -c
151 | - |
152 | .ci/test_with_coverage.sh \
153 | "Bigtable" \
154 | bigtable \
155 | bigtable
156 |
157 | - id: "bigquery"
158 | name: golang:1
159 | waitFor: ["compile-test-binary"]
160 | entrypoint: /bin/bash
161 | env:
162 | - "GOPATH=/gopath"
163 | - "BIGQUERY_PROJECT=$PROJECT_ID"
164 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
165 | secretEnv: ["CLIENT_ID"]
166 | volumes:
167 | - name: "go"
168 | path: "/gopath"
169 | args:
170 | - -c
171 | - |
172 | .ci/test_with_coverage.sh \
173 | "BigQuery" \
174 | bigquery \
175 | bigquery
176 |
177 | - id: "dataplex"
178 | name: golang:1
179 | waitFor: ["compile-test-binary"]
180 | entrypoint: /bin/bash
181 | env:
182 | - "GOPATH=/gopath"
183 | - "DATAPLEX_PROJECT=$PROJECT_ID"
184 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
185 | secretEnv: ["CLIENT_ID"]
186 | volumes:
187 | - name: "go"
188 | path: "/gopath"
189 | args:
190 | - -c
191 | - |
192 | .ci/test_with_coverage.sh \
193 | "Dataplex" \
194 | dataplex \
195 | dataplex
196 |
197 | - id: "dataform"
198 | name: golang:1
199 | waitFor: ["compile-test-binary"]
200 | entrypoint: /bin/bash
201 | env:
202 | - "GOPATH=/gopath"
203 | secretEnv: ["CLIENT_ID"]
204 | volumes:
205 | - name: "go"
206 | path: "/gopath"
207 | args:
208 | - -c
209 | - |
210 | apt-get update && apt-get install -y npm && \
211 | npm install -g @dataform/cli && \
212 | .ci/test_with_coverage.sh \
213 | "Dataform" \
214 | dataform \
215 | dataform
216 |
217 | - id: "postgres"
218 | name: golang:1
219 | waitFor: ["compile-test-binary"]
220 | entrypoint: /bin/bash
221 | env:
222 | - "GOPATH=/gopath"
223 | - "POSTGRES_DATABASE=$_DATABASE_NAME"
224 | - "POSTGRES_HOST=$_POSTGRES_HOST"
225 | - "POSTGRES_PORT=$_POSTGRES_PORT"
226 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
227 | secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
228 | volumes:
229 | - name: "go"
230 | path: "/gopath"
231 | args:
232 | - -c
233 | - |
234 | .ci/test_with_coverage.sh \
235 | "Postgres" \
236 | postgres \
237 | postgressql \
238 | postgresexecutesql
239 |
240 | - id: "spanner"
241 | name: golang:1
242 | waitFor: ["compile-test-binary"]
243 | entrypoint: /bin/bash
244 | env:
245 | - "GOPATH=/gopath"
246 | - "SPANNER_PROJECT=$PROJECT_ID"
247 | - "SPANNER_DATABASE=$_DATABASE_NAME"
248 | - "SPANNER_INSTANCE=$_SPANNER_INSTANCE"
249 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
250 | secretEnv: ["CLIENT_ID"]
251 | volumes:
252 | - name: "go"
253 | path: "/gopath"
254 | args:
255 | - -c
256 | - |
257 | .ci/test_with_coverage.sh \
258 | "Spanner" \
259 | spanner \
260 | spanner
261 |
262 | - id: "neo4j"
263 | name: golang:1
264 | waitFor: ["compile-test-binary"]
265 | entrypoint: /bin/bash
266 | env:
267 | - "GOPATH=/gopath"
268 | - "NEO4J_DATABASE=$_NEO4J_DATABASE"
269 | - "NEO4J_URI=$_NEO4J_URI"
270 | secretEnv: ["NEO4J_USER", "NEO4J_PASS"]
271 | volumes:
272 | - name: "go"
273 | path: "/gopath"
274 | args:
275 | - -c
276 | - |
277 | .ci/test_with_coverage.sh \
278 | "Neo4j" \
279 | neo4j \
280 | neo4j
281 |
282 | - id: "cloud-sql-mssql"
283 | name: golang:1
284 | waitFor: ["compile-test-binary"]
285 | entrypoint: /bin/bash
286 | env:
287 | - "GOPATH=/gopath"
288 | - "CLOUD_SQL_MSSQL_PROJECT=$PROJECT_ID"
289 | - "CLOUD_SQL_MSSQL_INSTANCE=$_CLOUD_SQL_MSSQL_INSTANCE"
290 | - "CLOUD_SQL_MSSQL_IP=$_CLOUD_SQL_MSSQL_IP"
291 | - "CLOUD_SQL_MSSQL_DATABASE=$_DATABASE_NAME"
292 | - "CLOUD_SQL_MSSQL_REGION=$_REGION"
293 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
294 | secretEnv: ["CLOUD_SQL_MSSQL_USER", "CLOUD_SQL_MSSQL_PASS", "CLIENT_ID"]
295 | volumes:
296 | - name: "go"
297 | path: "/gopath"
298 | args:
299 | - -c
300 | - |
301 | .ci/test_with_coverage.sh \
302 | "Cloud SQL MSSQL" \
303 | cloudsqlmssql \
304 | mssql
305 |
306 | - id: "cloud-sql-mysql"
307 | name: golang:1
308 | waitFor: ["compile-test-binary"]
309 | entrypoint: /bin/bash
310 | env:
311 | - "GOPATH=/gopath"
312 | - "CLOUD_SQL_MYSQL_PROJECT=$PROJECT_ID"
313 | - "CLOUD_SQL_MYSQL_INSTANCE=$_CLOUD_SQL_MYSQL_INSTANCE"
314 | - "CLOUD_SQL_MYSQL_DATABASE=$_DATABASE_NAME"
315 | - "CLOUD_SQL_MYSQL_REGION=$_REGION"
316 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
317 | secretEnv: ["CLOUD_SQL_MYSQL_USER", "CLOUD_SQL_MYSQL_PASS", "CLIENT_ID"]
318 | volumes:
319 | - name: "go"
320 | path: "/gopath"
321 | args:
322 | - -c
323 | - |
324 | .ci/test_with_coverage.sh \
325 | "Cloud SQL MySQL" \
326 | cloudsqlmysql \
327 | mysql || echo "Integration tests failed." # ignore test failures
328 |
329 | - id: "mysql"
330 | name: golang:1
331 | waitFor: ["compile-test-binary"]
332 | entrypoint: /bin/bash
333 | env:
334 | - "GOPATH=/gopath"
335 | - "MYSQL_DATABASE=$_DATABASE_NAME"
336 | - "MYSQL_HOST=$_MYSQL_HOST"
337 | - "MYSQL_PORT=$_MYSQL_PORT"
338 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
339 | secretEnv: ["MYSQL_USER", "MYSQL_PASS", "CLIENT_ID"]
340 | volumes:
341 | - name: "go"
342 | path: "/gopath"
343 | args:
344 | - -c
345 | - |
346 | .ci/test_with_coverage.sh \
347 | "MySQL" \
348 | mysql \
349 | mysql || echo "Integration tests failed." # ignore test failures
350 |
351 | - id: "mssql"
352 | name: golang:1
353 | waitFor: ["compile-test-binary"]
354 | entrypoint: /bin/bash
355 | env:
356 | - "GOPATH=/gopath"
357 | - "MSSQL_DATABASE=$_DATABASE_NAME"
358 | - "MSSQL_HOST=$_MSSQL_HOST"
359 | - "MSSQL_PORT=$_MSSQL_PORT"
360 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
361 | secretEnv: ["MSSQL_USER", "MSSQL_PASS", "CLIENT_ID"]
362 | volumes:
363 | - name: "go"
364 | path: "/gopath"
365 | args:
366 | - -c
367 | - |
368 | .ci/test_with_coverage.sh \
369 | "MSSQL" \
370 | mssql \
371 | mssql
372 |
373 | # - id: "dgraph"
374 | # name: golang:1
375 | # waitFor: ["compile-test-binary"]
376 | # entrypoint: /bin/bash
377 | # env:
378 | # - "GOPATH=/gopath"
379 | # - "DGRAPH_URL=$_DGRAPHURL"
380 | # volumes:
381 | # - name: "go"
382 | # path: "/gopath"
383 | # args:
384 | # - -c
385 | # - |
386 | # .ci/test_with_coverage.sh \
387 | # "Dgraph" \
388 | # dgraph \
389 | # dgraph
390 |
391 | - id: "http"
392 | name: golang:1
393 | waitFor: ["compile-test-binary"]
394 | entrypoint: /bin/bash
395 | env:
396 | - "GOPATH=/gopath"
397 | secretEnv: ["CLIENT_ID"]
398 | volumes:
399 | - name: "go"
400 | path: "/gopath"
401 | args:
402 | - -c
403 | - |
404 | .ci/test_with_coverage.sh \
405 | "HTTP" \
406 | http \
407 | http
408 |
409 | - id: "sqlite"
410 | name: golang:1
411 | waitFor: ["compile-test-binary"]
412 | entrypoint: /bin/bash
413 | env:
414 | - "GOPATH=/gopath"
415 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
416 | volumes:
417 | - name: "go"
418 | path: "/gopath"
419 | secretEnv: ["CLIENT_ID"]
420 | args:
421 | - -c
422 | - |
423 | .ci/test_with_coverage.sh \
424 | "SQLite" \
425 | sqlite \
426 | sqlite
427 |
428 | - id: "couchbase"
429 | name: golang:1
430 | waitFor: ["compile-test-binary"]
431 | entrypoint: /bin/bash
432 | env:
433 | - "GOPATH=/gopath"
434 | - "COUCHBASE_SCOPE=$_COUCHBASE_SCOPE"
435 | - "COUCHBASE_BUCKET=$_COUCHBASE_BUCKET"
436 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
437 | secretEnv:
438 | ["COUCHBASE_CONNECTION", "COUCHBASE_USER", "COUCHBASE_PASS", "CLIENT_ID"]
439 | volumes:
440 | - name: "go"
441 | path: "/gopath"
442 | args:
443 | - -c
444 | - |
445 | .ci/test_with_coverage.sh \
446 | "Couchbase" \
447 | couchbase \
448 | couchbase
449 |
450 | - id: "redis"
451 | name: golang:1
452 | waitFor: ["compile-test-binary"]
453 | entrypoint: /bin/bash
454 | env:
455 | - "GOPATH=/gopath"
456 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
457 | secretEnv: ["REDIS_ADDRESS", "REDIS_PASS", "CLIENT_ID"]
458 | volumes:
459 | - name: "go"
460 | path: "/gopath"
461 | args:
462 | - -c
463 | - |
464 | .ci/test_with_coverage.sh \
465 | "Redis" \
466 | redis \
467 | redis
468 |
469 | - id: "valkey"
470 | name: golang:1
471 | waitFor: ["compile-test-binary"]
472 | entrypoint: /bin/bash
473 | env:
474 | - "GOPATH=/gopath"
475 | - "VALKEY_DATABASE=$_VALKEY_DATABASE"
476 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
477 | secretEnv: ["VALKEY_ADDRESS", "CLIENT_ID"]
478 | volumes:
479 | - name: "go"
480 | path: "/gopath"
481 | args:
482 | - -c
483 | - |
484 | .ci/test_with_coverage.sh \
485 | "Valkey" \
486 | valkey \
487 | valkey
488 |
489 | - id: "oceanbase"
490 | name: golang:1
491 | waitFor: ["compile-test-binary"]
492 | entrypoint: /bin/bash
493 | env:
494 | - "GOPATH=/gopath"
495 | - "OCEANBASE_PORT=$_OCEANBASE_PORT"
496 | - "OCEANBASE_DATABASE=$_OCEANBASE_DATABASE"
497 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
498 | secretEnv:
499 | ["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
500 | volumes:
501 | - name: "go"
502 | path: "/gopath"
503 | args:
504 | - -c
505 | - |
506 | .ci/test_with_coverage.sh \
507 | "OceanBase" \
508 | oceanbase \
509 | oceanbase
510 |
511 | - id: "firestore"
512 | name: golang:1
513 | waitFor: ["compile-test-binary"]
514 | entrypoint: /bin/bash
515 | env:
516 | - "GOPATH=/gopath"
517 | - "FIRESTORE_PROJECT=$PROJECT_ID"
518 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
519 | secretEnv: ["CLIENT_ID"]
520 | volumes:
521 | - name: "go"
522 | path: "/gopath"
523 | args:
524 | - -c
525 | - |
526 | .ci/test_with_coverage.sh \
527 | "Firestore" \
528 | firestore \
529 | firestore
530 |
531 | - id: "looker"
532 | name: golang:1
533 | waitFor: ["compile-test-binary"]
534 | entrypoint: /bin/bash
535 | env:
536 | - "GOPATH=/gopath"
537 | - "FIRESTORE_PROJECT=$PROJECT_ID"
538 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
539 | - "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
540 | - "LOOKER_PROJECT=$_LOOKER_PROJECT"
541 | - "LOOKER_LOCATION=$_LOOKER_LOCATION"
542 | secretEnv:
543 | [
544 | "CLIENT_ID",
545 | "LOOKER_BASE_URL",
546 | "LOOKER_CLIENT_ID",
547 | "LOOKER_CLIENT_SECRET",
548 | ]
549 | volumes:
550 | - name: "go"
551 | path: "/gopath"
552 | args:
553 | - -c
554 | - |
555 | .ci/test_with_coverage.sh \
556 | "Looker" \
557 | looker \
558 | looker
559 |
560 | - id: "cloud-sql"
561 | name: golang:1
562 | waitFor: ["compile-test-binary"]
563 | entrypoint: /bin/bash
564 | env:
565 | - "GOPATH=/gopath"
566 | secretEnv: ["CLIENT_ID"]
567 | volumes:
568 | - name: "go"
569 | path: "/gopath"
570 | args:
571 | - -c
572 | - |
573 | .ci/test_with_coverage.sh \
574 | "Cloud SQL Wait for Operation" \
575 | cloudsql \
576 | cloudsql
577 |
578 | - id: "tidb"
579 | name: golang:1
580 | waitFor: ["compile-test-binary"]
581 | entrypoint: /bin/bash
582 | env:
583 | - "GOPATH=/gopath"
584 | - "TIDB_DATABASE=$_DATABASE_NAME"
585 | - "TIDB_HOST=$_TIDB_HOST"
586 | - "TIDB_PORT=$_TIDB_PORT"
587 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
588 | secretEnv: ["CLIENT_ID", "TIDB_USER", "TIDB_PASS"]
589 | volumes:
590 | - name: "go"
591 | path: "/gopath"
592 | args:
593 | - -c
594 | - |
595 | .ci/test_with_coverage.sh \
596 | "TiDB" \
597 | tidb \
598 | tidbsql tidbexecutesql
599 |
600 | - id: "firebird"
601 | name: golang:1
602 | waitFor: ["compile-test-binary"]
603 | entrypoint: /bin/bash
604 | env:
605 | - "GOPATH=/gopath"
606 | - "FIREBIRD_DATABASE=$_FIREBIRD_DATABASE_NAME"
607 | - "FIREBIRD_HOST=$_FIREBIRD_HOST"
608 | - "FIREBIRD_PORT=$_FIREBIRD_PORT"
609 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
610 | secretEnv: ["CLIENT_ID", "FIREBIRD_USER", "FIREBIRD_PASS"]
611 | volumes:
612 | - name: "go"
613 | path: "/gopath"
614 | args:
615 | - -c
616 | - |
617 | .ci/test_with_coverage.sh \
618 | "Firebird" \
619 | firebird \
620 | firebirdsql firebirdexecutesql
621 |
622 | - id: "clickhouse"
623 | name: golang:1
624 | waitFor: ["compile-test-binary"]
625 | entrypoint: /bin/bash
626 | env:
627 | - "GOPATH=/gopath"
628 | - "CLICKHOUSE_DATABASE=$_CLICKHOUSE_DATABASE"
629 | - "CLICKHOUSE_PORT=$_CLICKHOUSE_PORT"
630 | - "CLICKHOUSE_PROTOCOL=$_CLICKHOUSE_PROTOCOL"
631 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
632 | secretEnv: ["CLICKHOUSE_HOST", "CLICKHOUSE_USER", "CLIENT_ID"]
633 | volumes:
634 | - name: "go"
635 | path: "/gopath"
636 | args:
637 | - -c
638 | - |
639 | .ci/test_with_coverage.sh \
640 | "ClickHouse" \
641 | clickhouse \
642 | clickhouse
643 |
644 | - id: "trino"
645 | name: golang:1
646 | waitFor: ["compile-test-binary"]
647 | entrypoint: /bin/bash
648 | env:
649 | - "GOPATH=/gopath"
650 | - "TRINO_HOST=$_TRINO_HOST"
651 | - "TRINO_PORT=$_TRINO_PORT"
652 | - "TRINO_CATALOG=$_TRINO_CATALOG"
653 | - "TRINO_SCHEMA=$_TRINO_SCHEMA"
654 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
655 | secretEnv: ["CLIENT_ID", "TRINO_USER"]
656 | volumes:
657 | - name: "go"
658 | path: "/gopath"
659 | args:
660 | - -c
661 | - |
662 | .ci/test_with_coverage.sh \
663 | "Trino" \
664 | trino \
665 | trinosql trinoexecutesql
666 |
667 | - id: "yugabytedb"
668 | name: golang:1
669 | waitFor: ["compile-test-binary"]
670 | entrypoint: /bin/bash
671 | env:
672 | - "GOPATH=/gopath"
673 | - "YUGABYTEDB_DATABASE=$_YUGABYTEDB_DATABASE"
674 | - "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
675 | - "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
676 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
677 | secretEnv:
678 | ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
679 | volumes:
680 | - name: "go"
681 | path: "/gopath"
682 | args:
683 | - -c
684 | - |
685 | ./yugabytedb.test -test.v
686 |
687 |
688 | - id: "cassandra"
689 | name: golang:1
690 | waitFor: ["compile-test-binary"]
691 | entrypoint: /bin/bash
692 | env:
693 | - "GOPATH=/gopath"
694 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
695 | secretEnv: ["CLIENT_ID", "CASSANDRA_USER", "CASSANDRA_PASS", "CASSANDRA_HOST"]
696 | volumes:
697 | - name: "go"
698 | path: "/gopath"
699 | args:
700 | - -c
701 | - |
702 | .ci/test_with_coverage.sh \
703 | "Cassandra" \
704 | cassandra \
705 | cassandra
706 |
707 | - id: "oracle"
708 | name: golang:1
709 | waitFor: ["compile-test-binary"]
710 | entrypoint: /bin/bash
711 | env:
712 | - "GOPATH=/gopath"
713 | - "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
714 | - "ORACLE_SERVER_NAME=$_ORACLE_SERVER_NAME"
715 | secretEnv: ["CLIENT_ID", "ORACLE_USER", "ORACLE_PASS", "ORACLE_HOST"]
716 | volumes:
717 | - name: "go"
718 | path: "/gopath"
719 | args:
720 | - -c
721 | - |
722 | .ci/test_with_coverage.sh \
723 | "Oracle" \
724 | oracle \
725 | oracle
726 |
727 | - id: "serverless-spark"
728 | name: golang:1
729 | waitFor: ["compile-test-binary"]
730 | entrypoint: /bin/bash
731 | env:
732 | - "GOPATH=/gopath"
733 | - "SERVERLESS_SPARK_PROJECT=$PROJECT_ID"
734 | - "SERVERLESS_SPARK_LOCATION=$_REGION"
735 | secretEnv: ["CLIENT_ID"]
736 | volumes:
737 | - name: "go"
738 | path: "/gopath"
739 | args:
740 | - -c
741 | - |
742 | .ci/test_with_coverage.sh \
743 | "Serverless Spark" \
744 | serverlessspark
745 |
746 | availableSecrets:
747 | secretManager:
748 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
749 | env: CLOUD_SQL_POSTGRES_USER
750 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
751 | env: CLOUD_SQL_POSTGRES_PASS
752 | - versionName: projects/$PROJECT_ID/secrets/alloydb_pg_user/versions/latest
753 | env: ALLOYDB_POSTGRES_USER
754 | - versionName: projects/$PROJECT_ID/secrets/alloydb_pg_pass/versions/latest
755 | env: ALLOYDB_POSTGRES_PASS
756 | - versionName: projects/$PROJECT_ID/secrets/alloydb_ai_nl_user/versions/latest
757 | env: ALLOYDB_AI_NL_USER
758 | - versionName: projects/$PROJECT_ID/secrets/alloydb_ai_nl_pass/versions/latest
759 | env: ALLOYDB_AI_NL_PASS
760 | - versionName: projects/$PROJECT_ID/secrets/postgres_user/versions/latest
761 | env: POSTGRES_USER
762 | - versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
763 | env: POSTGRES_PASS
764 | - versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
765 | env: CLIENT_ID
766 | - versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
767 | env: NEO4J_USER
768 | - versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
769 | env: NEO4J_PASS
770 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_mssql_user/versions/latest
771 | env: CLOUD_SQL_MSSQL_USER
772 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_mssql_pass/versions/latest
773 | env: CLOUD_SQL_MSSQL_PASS
774 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_mysql_user/versions/latest
775 | env: CLOUD_SQL_MYSQL_USER
776 | - versionName: projects/$PROJECT_ID/secrets/cloud_sql_mysql_pass/versions/latest
777 | env: CLOUD_SQL_MYSQL_PASS
778 | - versionName: projects/$PROJECT_ID/secrets/mysql_user/versions/latest
779 | env: MYSQL_USER
780 | - versionName: projects/$PROJECT_ID/secrets/mysql_pass/versions/latest
781 | env: MYSQL_PASS
782 | - versionName: projects/$PROJECT_ID/secrets/mssql_user/versions/latest
783 | env: MSSQL_USER
784 | - versionName: projects/$PROJECT_ID/secrets/mssql_pass/versions/latest
785 | env: MSSQL_PASS
786 | - versionName: projects/$PROJECT_ID/secrets/couchbase_connection/versions/latest
787 | env: COUCHBASE_CONNECTION
788 | - versionName: projects/$PROJECT_ID/secrets/couchbase_user/versions/latest
789 | env: COUCHBASE_USER
790 | - versionName: projects/$PROJECT_ID/secrets/couchbase_pass/versions/latest
791 | env: COUCHBASE_PASS
792 | - versionName: projects/$PROJECT_ID/secrets/memorystore_redis_address/versions/latest
793 | env: REDIS_ADDRESS
794 | - versionName: projects/$PROJECT_ID/secrets/memorystore_redis_pass/versions/latest
795 | env: REDIS_PASS
796 | - versionName: projects/$PROJECT_ID/secrets/memorystore_valkey_address/versions/latest
797 | env: VALKEY_ADDRESS
798 | - versionName: projects/$PROJECT_ID/secrets/looker_base_url/versions/latest
799 | env: LOOKER_BASE_URL
800 | - versionName: projects/$PROJECT_ID/secrets/looker_client_id/versions/latest
801 | env: LOOKER_CLIENT_ID
802 | - versionName: projects/$PROJECT_ID/secrets/looker_client_secret/versions/latest
803 | env: LOOKER_CLIENT_SECRET
804 | - versionName: projects/$PROJECT_ID/secrets/tidb_user/versions/latest
805 | env: TIDB_USER
806 | - versionName: projects/$PROJECT_ID/secrets/tidb_pass/versions/latest
807 | env: TIDB_PASS
808 | - versionName: projects/$PROJECT_ID/secrets/clickhouse_host/versions/latest
809 | env: CLICKHOUSE_HOST
810 | - versionName: projects/$PROJECT_ID/secrets/clickhouse_user/versions/latest
811 | env: CLICKHOUSE_USER
812 | - versionName: projects/$PROJECT_ID/secrets/firebird_user/versions/latest
813 | env: FIREBIRD_USER
814 | - versionName: projects/$PROJECT_ID/secrets/firebird_pass/versions/latest
815 | env: FIREBIRD_PASS
816 | - versionName: projects/$PROJECT_ID/secrets/trino_user/versions/latest
817 | env: TRINO_USER
818 | - versionName: projects/$PROJECT_ID/secrets/oceanbase_host/versions/latest
819 | env: OCEANBASE_HOST
820 | - versionName: projects/$PROJECT_ID/secrets/oceanbase_user/versions/latest
821 | env: OCEANBASE_USER
822 | - versionName: projects/$PROJECT_ID/secrets/oceanbase_pass/versions/latest
823 | env: OCEANBASE_PASSWORD
824 | - versionName: projects/$PROJECT_ID/secrets/yugabytedb_host/versions/latest
825 | env: YUGABYTEDB_HOST
826 | - versionName: projects/$PROJECT_ID/secrets/yugabytedb_user/versions/latest
827 | env: YUGABYTEDB_USER
828 | - versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
829 | env: YUGABYTEDB_PASS
830 | - versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
831 | env: CASSANDRA_USER
832 | - versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
833 | env: CASSANDRA_PASS
834 | - versionName: projects/$PROJECT_ID/secrets/cassandra_host/versions/latest
835 | env: CASSANDRA_HOST
836 | - versionName: projects/$PROJECT_ID/secrets/oracle_user/versions/latest
837 | env: ORACLE_USER
838 | - versionName: projects/$PROJECT_ID/secrets/oracle_pass/versions/latest
839 | env: ORACLE_PASS
840 | - versionName: projects/$PROJECT_ID/secrets/oracle_host/versions/latest
841 | env: ORACLE_HOST
842 |
843 | options:
844 | logging: CLOUD_LOGGING_ONLY
845 | automapSubstitutions: true
846 | substitutionOption: "ALLOW_LOOSE"
847 | dynamicSubstitutions: true
848 | pool:
849 | name: projects/$PROJECT_ID/locations/us-central1/workerPools/integration-testing # Necessary for VPC network connection
850 |
851 | substitutions:
852 | _DATABASE_NAME: test_database
853 | _FIREBIRD_DATABASE_NAME: /firebird/test_database.fdb
854 | _REGION: "us-central1"
855 | _CLOUD_SQL_POSTGRES_INSTANCE: "cloud-sql-pg-testing"
856 | _ALLOYDB_POSTGRES_CLUSTER: "alloydb-pg-testing"
857 | _ALLOYDB_POSTGRES_INSTANCE: "alloydb-pg-testing-instance"
858 | _ALLOYDB_AI_NL_CLUSTER: "alloydb-ai-nl-testing"
859 | _ALLOYDB_AI_NL_INSTANCE: "alloydb-ai-nl-testing-instance"
860 | _BIGTABLE_INSTANCE: "bigtable-testing-instance"
861 | _POSTGRES_HOST: 127.0.0.1
862 | _POSTGRES_PORT: "5432"
863 | _SPANNER_INSTANCE: "spanner-testing"
864 | _NEO4J_DATABASE: "neo4j"
865 | _CLOUD_SQL_MSSQL_INSTANCE: "cloud-sql-mssql-testing"
866 | _CLOUD_SQL_MYSQL_INSTANCE: "cloud-sql-mysql-testing"
867 | _MYSQL_HOST: 127.0.0.1
868 | _MYSQL_PORT: "3306"
869 | _MSSQL_HOST: 127.0.0.1
870 | _MSSQL_PORT: "1433"
871 | _DGRAPHURL: "https://play.dgraph.io"
872 | _COUCHBASE_BUCKET: "couchbase-bucket"
873 | _COUCHBASE_SCOPE: "couchbase-scope"
874 | _LOOKER_LOCATION: "us"
875 | _LOOKER_PROJECT: "149671255749"
876 | _LOOKER_VERIFY_SSL: "true"
877 | _TIDB_HOST: 127.0.0.1
878 | _TIDB_PORT: "4000"
879 | _CLICKHOUSE_DATABASE: "default"
880 | _CLICKHOUSE_PORT: "8123"
881 | _CLICKHOUSE_PROTOCOL: "http"
882 | _FIREBIRD_HOST: 127.0.0.1
883 | _FIREBIRD_PORT: "3050"
884 | _TRINO_HOST: 127.0.0.1
885 | _TRINO_PORT: "8080"
886 | _TRINO_CATALOG: "memory"
887 | _TRINO_SCHEMA: "default"
888 | _OCEANBASE_PORT: "2883"
889 | _OCEANBASE_DATABASE: "oceanbase"
890 | _YUGABYTEDB_DATABASE: "yugabyte"
891 | _YUGABYTEDB_PORT: "5433"
892 | _YUGABYTEDB_LOADBALANCE: "false"
893 | _ORACLE_SERVER_NAME: "FREEPDB1"
```