This is page 42 of 59. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── adkgo
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-healthcare.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── elasticsearch.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mindsdb.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── singlestore.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudhealthcare
│ │ │ ├── _index.md
│ │ │ ├── cloud-healthcare-fhir-fetch-page.md
│ │ │ ├── cloud-healthcare-fhir-patient-everything.md
│ │ │ ├── cloud-healthcare-fhir-patient-search.md
│ │ │ ├── cloud-healthcare-get-dataset.md
│ │ │ ├── cloud-healthcare-get-dicom-store-metrics.md
│ │ │ ├── cloud-healthcare-get-dicom-store.md
│ │ │ ├── cloud-healthcare-get-fhir-resource.md
│ │ │ ├── cloud-healthcare-get-fhir-store-metrics.md
│ │ │ ├── cloud-healthcare-get-fhir-store.md
│ │ │ ├── cloud-healthcare-list-dicom-stores.md
│ │ │ ├── cloud-healthcare-list-fhir-stores.md
│ │ │ ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│ │ │ ├── cloud-healthcare-search-dicom-instances.md
│ │ │ ├── cloud-healthcare-search-dicom-series.md
│ │ │ └── cloud-healthcare-search-dicom-studies.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── elasticsearch
│ │ │ ├── _index.md
│ │ │ └── elasticsearch-esql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-connection-databases.md
│ │ │ ├── looker-get-connection-schemas.md
│ │ │ ├── looker-get-connection-table-columns.md
│ │ │ ├── looker-get-connection-tables.md
│ │ │ ├── looker-get-connections.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-dashboard.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mindsdb
│ │ │ ├── _index.md
│ │ │ ├── mindsdb-execute-sql.md
│ │ │ └── mindsdb-sql.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-schemas.md
│ │ │ ├── postgres-list-tables.md
│ │ │ ├── postgres-list-views.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ ├── serverless-spark-cancel-batch.md
│ │ │ ├── serverless-spark-get-batch.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── singlestore
│ │ │ ├── _index.md
│ │ │ ├── singlestore-execute-sql.md
│ │ │ └── singlestore-sql.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-healthcare.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── elasticsearch.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mindsdb.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── singlestore.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ ├── bigquery.go
│ │ │ └── cache.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloud_healthcare_test.go
│ │ │ └── cloud_healthcare.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── elasticsearch
│ │ │ ├── elasticsearch_test.go
│ │ │ └── elasticsearch.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mindsdb
│ │ │ ├── mindsdb_test.go
│ │ │ └── mindsdb.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── singlestore
│ │ │ ├── singlestore_test.go
│ │ │ └── singlestore.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloudhealthcarefhirfetchpage
│ │ │ │ ├── cloudhealthcarefhirfetchpage_test.go
│ │ │ │ └── cloudhealthcarefhirfetchpage.go
│ │ │ ├── cloudhealthcarefhirpatienteverything
│ │ │ │ ├── cloudhealthcarefhirpatienteverything_test.go
│ │ │ │ └── cloudhealthcarefhirpatienteverything.go
│ │ │ ├── cloudhealthcarefhirpatientsearch
│ │ │ │ ├── cloudhealthcarefhirpatientsearch_test.go
│ │ │ │ └── cloudhealthcarefhirpatientsearch.go
│ │ │ ├── cloudhealthcaregetdataset
│ │ │ │ ├── cloudhealthcaregetdataset_test.go
│ │ │ │ └── cloudhealthcaregetdataset.go
│ │ │ ├── cloudhealthcaregetdicomstore
│ │ │ │ ├── cloudhealthcaregetdicomstore_test.go
│ │ │ │ └── cloudhealthcaregetdicomstore.go
│ │ │ ├── cloudhealthcaregetdicomstoremetrics
│ │ │ │ ├── cloudhealthcaregetdicomstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetdicomstoremetrics.go
│ │ │ ├── cloudhealthcaregetfhirresource
│ │ │ │ ├── cloudhealthcaregetfhirresource_test.go
│ │ │ │ └── cloudhealthcaregetfhirresource.go
│ │ │ ├── cloudhealthcaregetfhirstore
│ │ │ │ ├── cloudhealthcaregetfhirstore_test.go
│ │ │ │ └── cloudhealthcaregetfhirstore.go
│ │ │ ├── cloudhealthcaregetfhirstoremetrics
│ │ │ │ ├── cloudhealthcaregetfhirstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetfhirstoremetrics.go
│ │ │ ├── cloudhealthcarelistdicomstores
│ │ │ │ ├── cloudhealthcarelistdicomstores_test.go
│ │ │ │ └── cloudhealthcarelistdicomstores.go
│ │ │ ├── cloudhealthcarelistfhirstores
│ │ │ │ ├── cloudhealthcarelistfhirstores_test.go
│ │ │ │ └── cloudhealthcarelistfhirstores.go
│ │ │ ├── cloudhealthcareretrieverendereddicominstance
│ │ │ │ ├── cloudhealthcareretrieverendereddicominstance_test.go
│ │ │ │ └── cloudhealthcareretrieverendereddicominstance.go
│ │ │ ├── cloudhealthcaresearchdicominstances
│ │ │ │ ├── cloudhealthcaresearchdicominstances_test.go
│ │ │ │ └── cloudhealthcaresearchdicominstances.go
│ │ │ ├── cloudhealthcaresearchdicomseries
│ │ │ │ ├── cloudhealthcaresearchdicomseries_test.go
│ │ │ │ └── cloudhealthcaresearchdicomseries.go
│ │ │ ├── cloudhealthcaresearchdicomstudies
│ │ │ │ ├── cloudhealthcaresearchdicomstudies_test.go
│ │ │ │ └── cloudhealthcaresearchdicomstudies.go
│ │ │ └── common
│ │ │ └── util.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── elasticsearch
│ │ │ └── elasticsearchesql
│ │ │ ├── elasticsearchesql_test.go
│ │ │ └── elasticsearchesql.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrundashboard
│ │ │ │ ├── lookerrundashboard_test.go
│ │ │ │ └── lookerrundashboard.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mindsdb
│ │ │ ├── mindsdbexecutesql
│ │ │ │ ├── mindsdbexecutesql_test.go
│ │ │ │ └── mindsdbexecutesql.go
│ │ │ └── mindsdbsql
│ │ │ ├── mindsdbsql_test.go
│ │ │ └── mindsdbsql.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslistschemas
│ │ │ │ ├── postgreslistschemas_test.go
│ │ │ │ └── postgreslistschemas.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlesssparkcancelbatch
│ │ │ │ ├── serverlesssparkcancelbatch_test.go
│ │ │ │ └── serverlesssparkcancelbatch.go
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── singlestore
│ │ │ ├── singlestoreexecutesql
│ │ │ │ ├── singlestoreexecutesql_test.go
│ │ │ │ └── singlestoreexecutesql.go
│ │ │ └── singlestoresql
│ │ │ ├── singlestoresql_test.go
│ │ │ └── singlestoresql.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ ├── orderedmap
│ │ ├── orderedmap_test.go
│ │ └── orderedmap.go
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudhealthcare
│ └── cloud_healthcare_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── elasticsearch
│ └── elasticsearch_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mindsdb
│ └── mindsdb_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── singlestore
│ └── singlestore_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/tests/http/http_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package http
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "net/http/httptest"
25 | "reflect"
26 | "regexp"
27 | "strings"
28 | "testing"
29 | "time"
30 |
31 | "github.com/googleapis/genai-toolbox/internal/testutils"
32 | "github.com/googleapis/genai-toolbox/internal/tools"
33 | "github.com/googleapis/genai-toolbox/tests"
34 | )
35 |
36 | var (
37 | HttpSourceKind = "http"
38 | HttpToolKind = "http"
39 | )
40 |
41 | func getHTTPSourceConfig(t *testing.T) map[string]any {
42 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
43 | if err != nil {
44 | t.Fatalf("error getting ID token: %s", err)
45 | }
46 | idToken = "Bearer " + idToken
47 |
48 | return map[string]any{
49 | "kind": HttpSourceKind,
50 | "headers": map[string]string{"Authorization": idToken},
51 | }
52 | }
53 |
54 | // handler function for the test server
55 | func multiTool(w http.ResponseWriter, r *http.Request) {
56 | path := r.URL.Path
57 | path = strings.TrimPrefix(path, "/") // Remove leading slash
58 |
59 | switch path {
60 | case "tool0":
61 | handleTool0(w, r)
62 | case "tool1":
63 | handleTool1(w, r)
64 | case "tool1id":
65 | handleTool1Id(w, r)
66 | case "tool1name":
67 | handleTool1Name(w, r)
68 | case "tool2":
69 | handleTool2(w, r)
70 | case "tool3":
71 | handleTool3(w, r)
72 | case "toolQueryTest":
73 | handleQueryTest(w, r)
74 | default:
75 | http.NotFound(w, r) // Return 404 for unknown paths
76 | }
77 | }
78 |
79 | // handleQueryTest simply returns the raw query string it received so the test
80 | // can verify it's formatted correctly.
81 | func handleQueryTest(w http.ResponseWriter, r *http.Request) {
82 | // expect GET method
83 | if r.Method != http.MethodGet {
84 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
85 | http.Error(w, errorMessage, http.StatusBadRequest)
86 | return
87 | }
88 |
89 | w.WriteHeader(http.StatusOK)
90 | enc := json.NewEncoder(w)
91 | enc.SetEscapeHTML(false)
92 |
93 | err := enc.Encode(r.URL.RawQuery)
94 | if err != nil {
95 | http.Error(w, "Failed to write response", http.StatusInternalServerError)
96 | return
97 | }
98 | }
99 |
100 | // handler function for the test server
101 | func handleTool0(w http.ResponseWriter, r *http.Request) {
102 | // expect POST method
103 | if r.Method != http.MethodPost {
104 | errorMessage := fmt.Sprintf("expected POST method but got: %s", string(r.Method))
105 | http.Error(w, errorMessage, http.StatusBadRequest)
106 | return
107 | }
108 | w.WriteHeader(http.StatusOK)
109 | response := "hello world"
110 | err := json.NewEncoder(w).Encode(response)
111 | if err != nil {
112 | http.Error(w, "Failed to encode JSON", http.StatusInternalServerError)
113 | return
114 | }
115 | }
116 |
117 | // handler function for the test server
118 | func handleTool1(w http.ResponseWriter, r *http.Request) {
119 | // expect GET method
120 | if r.Method != http.MethodGet {
121 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
122 | http.Error(w, errorMessage, http.StatusBadRequest)
123 | return
124 | }
125 | // Parse request body
126 | var requestBody map[string]interface{}
127 | bodyBytes, readErr := io.ReadAll(r.Body)
128 | if readErr != nil {
129 | http.Error(w, "Bad Request: Failed to read request body", http.StatusBadRequest)
130 | return
131 | }
132 | defer r.Body.Close()
133 | err := json.Unmarshal(bodyBytes, &requestBody)
134 | if err != nil {
135 | errorMessage := fmt.Sprintf("Bad Request: Error unmarshalling request body: %s, Raw body: %s", err, string(bodyBytes))
136 | http.Error(w, errorMessage, http.StatusBadRequest)
137 | return
138 | }
139 |
140 | // Extract name
141 | name, ok := requestBody["name"].(string)
142 | if !ok || name == "" {
143 | http.Error(w, "Bad Request: Missing or invalid name", http.StatusBadRequest)
144 | return
145 | }
146 |
147 | if name == "Alice" {
148 | response := `[{"id":1,"name":"Alice"},{"id":3,"name":"Sid"}]`
149 | _, err := w.Write([]byte(response))
150 | if err != nil {
151 | http.Error(w, "Failed to write response", http.StatusInternalServerError)
152 | }
153 | return
154 | }
155 |
156 | http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
157 | }
158 |
159 | // handler function for the test server
160 | func handleTool1Id(w http.ResponseWriter, r *http.Request) {
161 | // expect GET method
162 | if r.Method != http.MethodGet {
163 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
164 | http.Error(w, errorMessage, http.StatusBadRequest)
165 | return
166 | }
167 |
168 | id := r.URL.Query().Get("id")
169 | if id == "4" {
170 | response := `[{"id":4,"name":null}]`
171 | _, err := w.Write([]byte(response))
172 | if err != nil {
173 | http.Error(w, "Failed to write response", http.StatusInternalServerError)
174 | }
175 | return
176 | }
177 | http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
178 | }
179 |
180 | // handler function for the test server
181 | func handleTool1Name(w http.ResponseWriter, r *http.Request) {
182 | // expect GET method
183 | if r.Method != http.MethodGet {
184 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
185 | http.Error(w, errorMessage, http.StatusBadRequest)
186 | return
187 | }
188 |
189 | if !r.URL.Query().Has("name") {
190 | response := "null"
191 | _, err := w.Write([]byte(response))
192 | if err != nil {
193 | http.Error(w, "Failed to write response", http.StatusInternalServerError)
194 | }
195 | return
196 | }
197 | http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
198 | }
199 |
200 | // handler function for the test server
201 | func handleTool2(w http.ResponseWriter, r *http.Request) {
202 | // expect GET method
203 | if r.Method != http.MethodGet {
204 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
205 | http.Error(w, errorMessage, http.StatusBadRequest)
206 | return
207 | }
208 | email := r.URL.Query().Get("email")
209 | if email != "" {
210 | response := `[{"name":"Alice"}]`
211 | _, err := w.Write([]byte(response))
212 | if err != nil {
213 | http.Error(w, "Failed to write response", http.StatusInternalServerError)
214 | }
215 | return
216 | }
217 |
218 | http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
219 | }
220 |
221 | // handler function for the test server
222 | func handleTool3(w http.ResponseWriter, r *http.Request) {
223 | // expect GET method
224 | if r.Method != http.MethodGet {
225 | errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
226 | http.Error(w, errorMessage, http.StatusBadRequest)
227 | return
228 | }
229 |
230 | // Check request headers
231 | expectedHeaders := map[string]string{
232 | "Content-Type": "application/json",
233 | "X-Custom-Header": "example",
234 | "X-Other-Header": "test",
235 | }
236 | for header, expectedValue := range expectedHeaders {
237 | if r.Header.Get(header) != expectedValue {
238 | errorMessage := fmt.Sprintf("Bad Request: Missing or incorrect header: %s", header)
239 | http.Error(w, errorMessage, http.StatusBadRequest)
240 | return
241 | }
242 | }
243 |
244 | // Check query parameters
245 | expectedQueryParams := map[string][]string{
246 | "id": []string{"2", "1", "3"},
247 | "country": []string{"US"},
248 | }
249 | query := r.URL.Query()
250 | for param, expectedValueSlice := range expectedQueryParams {
251 | values, ok := query[param]
252 | if ok {
253 | if !reflect.DeepEqual(expectedValueSlice, values) {
254 | errorMessage := fmt.Sprintf("Bad Request: Incorrect query parameter: %s, actual: %s", param, query[param])
255 | http.Error(w, errorMessage, http.StatusBadRequest)
256 | return
257 | }
258 | } else {
259 | errorMessage := fmt.Sprintf("Bad Request: Missing query parameter: %s, actual: %s", param, query[param])
260 | http.Error(w, errorMessage, http.StatusBadRequest)
261 | return
262 | }
263 | }
264 |
265 | // Parse request body
266 | var requestBody map[string]interface{}
267 | bodyBytes, readErr := io.ReadAll(r.Body)
268 | if readErr != nil {
269 | http.Error(w, "Bad Request: Failed to read request body", http.StatusBadRequest)
270 | return
271 | }
272 | defer r.Body.Close()
273 | err := json.Unmarshal(bodyBytes, &requestBody)
274 | if err != nil {
275 | errorMessage := fmt.Sprintf("Bad Request: Error unmarshalling request body: %s, Raw body: %s", err, string(bodyBytes))
276 | http.Error(w, errorMessage, http.StatusBadRequest)
277 | return
278 | }
279 |
280 | // Check request body
281 | expectedBody := map[string]interface{}{
282 | "place": "zoo",
283 | "animals": []any{"rabbit", "ostrich", "whale"},
284 | }
285 |
286 | if !reflect.DeepEqual(requestBody, expectedBody) {
287 | errorMessage := fmt.Sprintf("Bad Request: Incorrect request body. Expected: %v, Got: %v", expectedBody, requestBody)
288 | http.Error(w, errorMessage, http.StatusBadRequest)
289 | return
290 | }
291 |
292 | response := "hello world"
293 | err = json.NewEncoder(w).Encode(response)
294 | if err != nil {
295 | http.Error(w, "Failed to encode JSON", http.StatusInternalServerError)
296 | return
297 | }
298 | }
299 |
300 | func TestHttpToolEndpoints(t *testing.T) {
301 | // start a test server
302 | server := httptest.NewServer(http.HandlerFunc(multiTool))
303 | defer server.Close()
304 |
305 | sourceConfig := getHTTPSourceConfig(t)
306 | sourceConfig["baseUrl"] = server.URL
307 | ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
308 | defer cancel()
309 |
310 | var args []string
311 |
312 | toolsFile := getHTTPToolsConfig(sourceConfig, HttpToolKind)
313 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
314 | if err != nil {
315 | t.Fatalf("command initialization returned an error: %s", err)
316 | }
317 | defer cleanup()
318 |
319 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
320 | defer cancel()
321 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
322 | if err != nil {
323 | t.Logf("toolbox command logs: \n%s", out)
324 | t.Fatalf("toolbox didn't start successfully: %s", err)
325 | }
326 |
327 | // Run tests
328 | tests.RunToolGetTest(t)
329 | tests.RunToolInvokeTest(t, `"hello world"`, tests.DisableArrayTest())
330 | runAdvancedHTTPInvokeTest(t)
331 | runQueryParamInvokeTest(t)
332 | }
333 |
334 | // runQueryParamInvokeTest runs the tool invoke endpoint for the query param test tool
335 | func runQueryParamInvokeTest(t *testing.T) {
336 | invokeTcs := []struct {
337 | name string
338 | api string
339 | requestBody io.Reader
340 | want string
341 | isErr bool
342 | }{
343 | {
344 | name: "invoke query-param-tool (optional omitted)",
345 | api: "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
346 | requestBody: bytes.NewBuffer([]byte(`{"reqId": "test1"}`)),
347 | want: `"reqId=test1"`,
348 | },
349 | {
350 | name: "invoke query-param-tool (some optional nil)",
351 | api: "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
352 | requestBody: bytes.NewBuffer([]byte(`{"reqId": "test2", "page": "5", "filter": null}`)),
353 | want: `"page=5\u0026reqId=test2"`, // 'filter' omitted
354 | },
355 | {
356 | name: "invoke query-param-tool (some optional absent)",
357 | api: "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
358 | requestBody: bytes.NewBuffer([]byte(`{"reqId": "test2", "page": "5"}`)),
359 | want: `"page=5\u0026reqId=test2"`, // 'filter' omitted
360 | },
361 | {
362 | name: "invoke query-param-tool (required param nil)",
363 | api: "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
364 | requestBody: bytes.NewBuffer([]byte(`{"reqId": null, "page": "1"}`)),
365 | want: `"page=1\u0026reqId="`, // reqId becomes "",
366 | },
367 | }
368 | for _, tc := range invokeTcs {
369 | t.Run(tc.name, func(t *testing.T) {
370 | // Send Tool invocation request
371 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
372 | if err != nil {
373 | t.Fatalf("unable to create request: %s", err)
374 | }
375 | req.Header.Add("Content-type", "application/json")
376 |
377 | resp, err := http.DefaultClient.Do(req)
378 | if err != nil {
379 | t.Fatalf("unable to send request: %s", err)
380 | }
381 | defer resp.Body.Close()
382 |
383 | if resp.StatusCode != http.StatusOK {
384 | bodyBytes, _ := io.ReadAll(resp.Body)
385 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
386 | }
387 |
388 | // Check response body
389 | var body map[string]interface{}
390 | err = json.NewDecoder(resp.Body).Decode(&body)
391 | if err != nil {
392 | t.Fatalf("error parsing response body: %v", err)
393 | }
394 | got, ok := body["result"].(string)
395 | if !ok {
396 | bodyBytes, _ := json.Marshal(body)
397 | t.Fatalf("unable to find result in response body, got: %s", string(bodyBytes))
398 | }
399 |
400 | if got != tc.want {
401 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
402 | }
403 | })
404 | }
405 | }
406 |
407 | // runToolInvoke runs the tool invoke endpoint
408 | func runAdvancedHTTPInvokeTest(t *testing.T) {
409 | // Test HTTP tool invoke endpoint
410 | invokeTcs := []struct {
411 | name string
412 | api string
413 | requestHeader map[string]string
414 | requestBody io.Reader
415 | want string
416 | isErr bool
417 | }{
418 | {
419 | name: "invoke my-advanced-tool",
420 | api: "http://127.0.0.1:5000/api/tool/my-advanced-tool/invoke",
421 | requestHeader: map[string]string{},
422 | requestBody: bytes.NewBuffer([]byte(`{"animalArray": ["rabbit", "ostrich", "whale"], "id": 3, "path": "tool3", "country": "US", "X-Other-Header": "test"}`)),
423 | want: `"hello world"`,
424 | isErr: false,
425 | },
426 | {
427 | name: "invoke my-advanced-tool with wrong params",
428 | api: "http://127.0.0.1:5000/api/tool/my-advanced-tool/invoke",
429 | requestHeader: map[string]string{},
430 | requestBody: bytes.NewBuffer([]byte(`{"animalArray": ["rabbit", "ostrich", "whale"], "id": 4, "path": "tool3", "country": "US", "X-Other-Header": "test"}`)),
431 | isErr: true,
432 | },
433 | }
434 | for _, tc := range invokeTcs {
435 | t.Run(tc.name, func(t *testing.T) {
436 | // Send Tool invocation request
437 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
438 | if err != nil {
439 | t.Fatalf("unable to create request: %s", err)
440 | }
441 | req.Header.Add("Content-type", "application/json")
442 | for k, v := range tc.requestHeader {
443 | req.Header.Add(k, v)
444 | }
445 | resp, err := http.DefaultClient.Do(req)
446 | if err != nil {
447 | t.Fatalf("unable to send request: %s", err)
448 | }
449 | defer resp.Body.Close()
450 |
451 | if resp.StatusCode != http.StatusOK {
452 | if tc.isErr == true {
453 | return
454 | }
455 | bodyBytes, _ := io.ReadAll(resp.Body)
456 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
457 | }
458 |
459 | // Check response body
460 | var body map[string]interface{}
461 | err = json.NewDecoder(resp.Body).Decode(&body)
462 | if err != nil {
463 | t.Fatalf("error parsing response body")
464 | }
465 | got, ok := body["result"].(string)
466 | if !ok {
467 | t.Fatalf("unable to find result in response body")
468 | }
469 |
470 | if got != tc.want {
471 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
472 | }
473 | })
474 | }
475 | }
476 |
477 | // getHTTPToolsConfig returns a mock HTTP tool's config file
478 | func getHTTPToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
479 | // Write config into a file and pass it to command
480 | otherSourceConfig := make(map[string]any)
481 | for k, v := range sourceConfig {
482 | otherSourceConfig[k] = v
483 | }
484 | otherSourceConfig["headers"] = map[string]string{"X-Custom-Header": "unexpected", "Content-Type": "application/json"}
485 | otherSourceConfig["queryParams"] = map[string]any{"id": 1, "name": "Sid"}
486 |
487 | toolsFile := map[string]any{
488 | "sources": map[string]any{
489 | "my-instance": sourceConfig,
490 | "other-instance": otherSourceConfig,
491 | },
492 | "authServices": map[string]any{
493 | "my-google-auth": map[string]any{
494 | "kind": "google",
495 | "clientId": tests.ClientId,
496 | },
497 | },
498 | "tools": map[string]any{
499 | "my-simple-tool": map[string]any{
500 | "kind": toolKind,
501 | "path": "/tool0",
502 | "method": "POST",
503 | "source": "my-instance",
504 | "requestBody": "{}",
505 | "description": "Simple tool to test end to end functionality.",
506 | },
507 | "my-tool": map[string]any{
508 | "kind": toolKind,
509 | "source": "my-instance",
510 | "method": "GET",
511 | "path": "/tool1",
512 | "description": "some description",
513 | "queryParams": []tools.Parameter{
514 | tools.NewIntParameter("id", "user ID")},
515 | "requestBody": `{
516 | "age": 36,
517 | "name": "{{.name}}"
518 | }
519 | `,
520 | "bodyParams": []tools.Parameter{tools.NewStringParameter("name", "user name")},
521 | "headers": map[string]string{"Content-Type": "application/json"},
522 | },
523 | "my-tool-by-id": map[string]any{
524 | "kind": toolKind,
525 | "source": "my-instance",
526 | "method": "GET",
527 | "path": "/tool1id",
528 | "description": "some description",
529 | "queryParams": []tools.Parameter{
530 | tools.NewIntParameter("id", "user ID")},
531 | "headers": map[string]string{"Content-Type": "application/json"},
532 | },
533 | "my-tool-by-name": map[string]any{
534 | "kind": toolKind,
535 | "source": "my-instance",
536 | "method": "GET",
537 | "path": "/tool1name",
538 | "description": "some description",
539 | "queryParams": []tools.Parameter{
540 | tools.NewStringParameterWithRequired("name", "user name", false)},
541 | "headers": map[string]string{"Content-Type": "application/json"},
542 | },
543 | "my-query-param-tool": map[string]any{
544 | "kind": toolKind,
545 | "source": "my-instance",
546 | "method": "GET",
547 | "path": "/toolQueryTest",
548 | "description": "Tool to test optional query parameters.",
549 | "queryParams": []tools.Parameter{
550 | tools.NewStringParameterWithRequired("reqId", "required ID", true),
551 | tools.NewStringParameterWithRequired("page", "optional page number", false),
552 | tools.NewStringParameterWithRequired("filter", "optional filter string", false),
553 | },
554 | },
555 | "my-auth-tool": map[string]any{
556 | "kind": toolKind,
557 | "source": "my-instance",
558 | "method": "GET",
559 | "path": "/tool2",
560 | "description": "some description",
561 | "requestBody": "{}",
562 | "queryParams": []tools.Parameter{
563 | tools.NewStringParameterWithAuth("email", "some description",
564 | []tools.ParamAuthService{{Name: "my-google-auth", Field: "email"}}),
565 | },
566 | },
567 | "my-auth-required-tool": map[string]any{
568 | "kind": toolKind,
569 | "source": "my-instance",
570 | "method": "POST",
571 | "path": "/tool0",
572 | "description": "some description",
573 | "requestBody": "{}",
574 | "authRequired": []string{"my-google-auth"},
575 | },
576 | "my-advanced-tool": map[string]any{
577 | "kind": toolKind,
578 | "source": "other-instance",
579 | "method": "get",
580 | "path": "/{{.path}}?id=2",
581 | "description": "some description",
582 | "headers": map[string]string{
583 | "X-Custom-Header": "example",
584 | },
585 | "pathParams": []tools.Parameter{
586 | &tools.StringParameter{
587 | CommonParameter: tools.CommonParameter{Name: "path", Type: "string", Desc: "path param"},
588 | },
589 | },
590 | "queryParams": []tools.Parameter{
591 | tools.NewIntParameter("id", "user ID"), tools.NewStringParameter("country", "country"),
592 | },
593 | "requestBody": `{
594 | "place": "zoo",
595 | "animals": {{json .animalArray }}
596 | }
597 | `,
598 | "bodyParams": []tools.Parameter{tools.NewArrayParameter("animalArray", "animals in the zoo", tools.NewStringParameter("animals", "desc"))},
599 | "headerParams": []tools.Parameter{tools.NewStringParameter("X-Other-Header", "custom header")},
600 | },
601 | },
602 | }
603 | return toolsFile
604 | }
605 |
```
--------------------------------------------------------------------------------
/docs/en/resources/sources/dataplex.md:
--------------------------------------------------------------------------------
```markdown
1 | ---
2 | title: "Dataplex"
3 | type: docs
4 | weight: 1
5 | description: >
6 | Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
7 | ---
8 |
9 | # Dataplex Source
10 |
11 | [Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
12 | solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
13 | powers AI, analytics, and business intelligence at scale.
14 |
15 | At the heart of these governance capabilities is a catalog that contains a
16 | centralized inventory of the data assets in your organization. Dataplex
17 | Universal Catalog holds business, technical, and runtime metadata for all of
18 | your data. It helps you discover relationships and semantics in the metadata by
19 | applying artificial intelligence and machine learning.
20 |
21 | [dataplex-docs]: https://cloud.google.com/dataplex/docs
22 |
23 | ## Example
24 |
25 | ```yaml
26 | sources:
27 | my-dataplex-source:
28 | kind: "dataplex"
29 | project: "my-project-id"
30 | ```
31 |
32 | ## Sample System Prompt
33 |
34 | You can use the following system prompt as "Custom Instructions" in your client
35 | application.
36 |
37 | ```
38 | # Objective
39 | Your primary objective is to help discover, organize and manage metadata related to data assets.
40 |
41 | # Tone and Style
42 | 1. Adopt the persona of a senior subject matter expert
43 | 2. Your communication style must be:
44 | 1. Concise: Always favor brevity.
45 | 2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.
46 | Example (Incorrect): Hi there! I see that you are looking for...
47 | Example (Correct): This problem likely stems from...
48 | 3. Do not reiterate or summarize the question in the answer.
49 | 4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
50 | Example (Correct): "The entry describes..."
51 | Example (Correct): "According to catalog,..."
52 | Example (Correct): "Based on the metadata,..."
53 | Example (Correct): "Based on the search results,..."
54 | 5. Do not make assumptions
55 |
56 | # Data Model
57 | ## Entries
58 | Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:
59 |
60 | - A BigQuery table or dataset
61 | - A Cloud Storage bucket or folder
62 | - An on-premises SQL table
63 |
64 | ## Aspects
65 | While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.
66 |
67 | ## Aspect Types
68 | Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry.
69 |
70 | Examples:
71 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
72 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub
73 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
74 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
75 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
76 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
77 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-model
78 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
79 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
80 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
81 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-table
82 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-view
83 | - projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
84 | - projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
85 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
86 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
87 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
88 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
89 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
90 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
91 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
92 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
93 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
94 | - projects/dataplex-types/locations/global/aspectTypes/contacts
95 | - projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
96 | - projects/dataplex-types/locations/global/aspectTypes/dataform-repository
97 | - projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
98 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
99 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
100 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
101 | - projects/dataplex-types/locations/global/aspectTypes/data-product
102 | - projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
103 | - projects/dataplex-types/locations/global/aspectTypes/external-connection
104 | - projects/dataplex-types/locations/global/aspectTypes/overview
105 | - projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
106 | - projects/dataplex-types/locations/global/aspectTypes/schema
107 | - projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
108 | - projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
109 | - projects/dataplex-types/locations/global/aspectTypes/sql-access
110 | - projects/dataplex-types/locations/global/aspectTypes/storage-bucket
111 | - projects/dataplex-types/locations/global/aspectTypes/storage-folder
112 | - projects/dataplex-types/locations/global/aspectTypes/storage
113 | - projects/dataplex-types/locations/global/aspectTypes/usage
114 |
115 | ## Entry Types
116 | Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type.
117 |
118 | Examples:
119 | - projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
120 | - projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
121 | - projects/dataplex-types/locations/global/entryTypes/bigquery-connection
122 | - projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
123 | - projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
124 | - projects/dataplex-types/locations/global/entryTypes/bigquery-model
125 | - projects/dataplex-types/locations/global/entryTypes/bigquery-routine
126 | - projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
127 | - projects/dataplex-types/locations/global/entryTypes/bigquery-table
128 | - projects/dataplex-types/locations/global/entryTypes/bigquery-view
129 | - projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
130 | - projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
131 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
132 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
133 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
134 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
135 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
136 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
137 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
138 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
139 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
140 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
141 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
142 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
143 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
144 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
145 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
146 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
147 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
148 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
149 | - projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
150 | - projects/dataplex-types/locations/global/entryTypes/dataform-repository
151 | - projects/dataplex-types/locations/global/entryTypes/dataform-workspace
152 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
153 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
154 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
155 | - projects/dataplex-types/locations/global/entryTypes/pubsub-topic
156 | - projects/dataplex-types/locations/global/entryTypes/storage-bucket
157 | - projects/dataplex-types/locations/global/entryTypes/storage-folder
158 | - projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
159 | - projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
160 | - projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store
161 |
162 | ## Entry Groups
163 | Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.
164 |
165 | ## Entry Links
166 | Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).
167 |
168 | # Tool instructions
169 | ## Tool: dataplex_search_entries
170 | ## General
171 | - Do not try to search within search results on your own.
172 | - Do not fetch multiple pages of results unless explicitly asked.
173 |
174 | ## Search syntax
175 |
176 | ### Simple search
177 | In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:
178 |
179 | - A substring of a name, display name, or description of a resource
180 | - A substring of the type of a resource
181 | - A substring of a column name (or nested column name) in the schema of a resource
182 | - A substring of a project ID
183 | - A string from an overview description
184 |
185 | For example, the predicate foo matches the following resources:
186 | - Resource with the name foo.bar
187 | - Resource with the display name Foo Bar
188 | - Resource with the description This is the foo script
189 | - Resource with the exact type foo
190 | - Column foo_bar in the schema of a resource
191 | - Nested column foo_bar in the schema of a resource
192 | - Project prod-foo-bar
193 | - Resource with an overview containing the word foo
194 |
195 |
196 | ### Qualified predicates
197 | You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
198 | - An equal sign (=) restricts the search to an exact match.
199 | - A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.
200 |
201 | Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
202 | - name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
203 | - description:foo selects resources with the foo token in the description, like bar and foo.
204 | - location=foo matches resources in a specified location with foo as the location name.
205 |
206 | The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.
207 |
208 | Search syntax supports the following qualifiers:
209 | - "name:x" - Matches x as a substring of the resource ID.
210 | - "displayname:x" - Match x as a substring of the resource display name.
211 | - "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
212 | - "description:x" - Matches x as a token in the resource description.
213 | - "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
214 | - "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
215 | - "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
216 | - "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
217 | - "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
218 | - "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
219 | - "type=TYPE" - Matches resources of a specific entry type or its type alias.
220 | - "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
221 | - "parent:x" - Matches x as a substring of the hierarchical path of a resource. It supports same syntax as `name` predicate.
222 | - "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
223 | - "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
224 | - "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
225 | - "createtime" -
226 | Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01.
227 | - "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
228 |
229 | ### Aspect Search
230 | To search for entries based on their attached aspects, use the following query syntax.
231 |
232 | aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
233 | aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
234 | aspect:xOPERATORvalue
235 | Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
236 |
237 | The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
238 | - String: = (exact match) and : (substring)
239 | - All number types: =, :, <, >, <=, >=, =>, =<
240 | - Enum: =
241 | - Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
242 | - Boolean: =
243 |
244 | Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
245 | - aspect:example-project.us-central1.employee-info.is-enrolled=true
246 | - aspect:example-project.us-central1.employee=true
247 | - aspect:employee=true
248 |
249 | Example:-
250 | You can use following filters
251 | - dataplex-types.global.bigquery-table.type={BIGLAKE_TABLE, BIGLAKE_OBJECT_TABLE, EXTERNAL_TABLE, TABLE}
252 | - dataplex-types.global.storage.type={STRUCTURED, UNSTRUCTURED}
253 |
254 | ### Logical operators
255 | A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
256 | Logical AND and logical OR are supported. For example, foo OR bar.
257 |
258 | You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
259 | Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
260 |
261 | ### Request
262 | 1. Always try to rewrite the prompt using search syntax.
263 |
264 | ### Response
265 | 1. If there are multiple search results found
266 | 1. Present the list of search results
267 | 2. Format the output in nested ordered list, for example:
268 | Given
269 | ```
270 | {
271 | results: [
272 | {
273 | name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
274 | entrySource: {
275 | displayName: "Users"
276 | description: "Table contains list of users."
277 | location: "aws-us-east-1"
278 | system: "BigQuery"
279 | }
280 | },
281 | {
282 | name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
283 | entrySource: {
284 | displayName: "Top customers",
285 | description: "Table contains list of best customers."
286 | location: "us-central1"
287 | system: "BigQuery"
288 | }
289 | },
290 | ]
291 | }
292 | ```
293 | Return output formatted as markdown nested list:
294 | ```
295 | * Users:
296 | - projectId: test_project
297 | - location: aws-us-east-1
298 | - description: Table contains list of users.
299 | * Top customers:
300 | - projectId: another_project
301 | - location: us-central1
302 | - description: Table contains list of best customers.
303 | ```
304 | 3. Ask to select one of the presented search results
305 | 2. If there is only one search result found
306 | 1. Present the search result immediately.
307 | 3. If there are no search result found
308 | 1. Explain that no search result was found
309 | 2. Suggest to provide a more specific search query.
310 |
311 | ## Tool: dataplex_lookup_entry
312 | ### Request
313 | 1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool.
314 | 2. If you do not know the name of the entry, use `dataplex_search_entries` tool
315 | ### Response
316 | 1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
317 | ```
318 |
319 | ## Reference
320 |
321 | | **field** | **type** | **required** | **description** |
322 | |-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
323 | | kind | string | true | Must be "dataplex". |
324 | | project | string | true | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
```
--------------------------------------------------------------------------------
/docs/en/getting-started/introduction/_index.md:
--------------------------------------------------------------------------------
```markdown
1 | ---
2 | title: "Introduction"
3 | type: docs
4 | weight: 1
5 | description: >
6 | An introduction to MCP Toolbox for Databases.
7 | ---
8 |
9 | MCP Toolbox for Databases is an open source MCP server for databases. It enables
10 | you to develop tools easier, faster, and more securely by handling the complexities
11 | such as connection pooling, authentication, and more.
12 |
13 | {{< notice note >}}
14 | This solution was originally named “Gen AI Toolbox for
15 | Databases” as its initial development predated MCP, but was renamed to align
16 | with recently added MCP compatibility.
17 | {{< /notice >}}
18 |
19 | ## Why Toolbox?
20 |
21 | Toolbox helps you build Gen AI tools that let your agents access data in your
22 | database. Toolbox provides:
23 |
24 | - **Simplified development**: Integrate tools to your agent in less than 10
25 | lines of code, reuse tools between multiple agents or frameworks, and deploy
26 | new versions of tools more easily.
27 | - **Better performance**: Best practices such as connection pooling,
28 | authentication, and more.
29 | - **Enhanced security**: Integrated auth for more secure access to your data
30 | - **End-to-end observability**: Out of the box metrics and tracing with built-in
31 | support for OpenTelemetry.
32 |
33 | **⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
34 |
35 | Stop context-switching and let your AI assistant become a true co-developer. By
36 | [connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
37 | delegate complex and time-consuming database tasks, allowing you to build faster
38 | and focus on what matters. This isn't just about code completion; it's about
39 | giving your AI the context it needs to handle the entire development lifecycle.
40 |
41 | Here’s how it will save you time:
42 |
43 | - **Query in Plain English**: Interact with your data using natural language
44 | right from your IDE. Ask complex questions like, *"How many orders were
45 | delivered in 2024, and what items were in them?"* without writing any SQL.
46 | - **Automate Database Management**: Simply describe your data needs, and let the
47 | AI assistant manage your database for you. It can handle generating queries,
48 | creating tables, adding indexes, and more.
49 | - **Generate Context-Aware Code**: Empower your AI assistant to generate
50 | application code and tests with a deep understanding of your real-time
51 | database schema. This accelerates the development cycle by ensuring the
52 | generated code is directly usable.
53 | - **Slash Development Overhead**: Radically reduce the time spent on manual
54 | setup and boilerplate. MCP Toolbox helps streamline lengthy database
55 | configurations, repetitive code, and error-prone schema migrations.
56 |
57 | Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
58 |
59 | [connect-ide]: ../../how-to/connect-ide/
60 |
61 | ## General Architecture
62 |
63 | Toolbox sits between your application's orchestration framework and your
64 | database, providing a control plane that is used to modify, distribute, or
65 | invoke tools. It simplifies the management of your tools by providing you with a
66 | centralized location to store and update tools, allowing you to share tools
67 | between agents and applications and update those tools without necessarily
68 | redeploying your application.
69 |
70 | 
71 |
72 | ## Getting Started
73 |
74 | ### Installing the server
75 |
76 | For the latest version, check the [releases page][releases] and use the
77 | following instructions for your OS and CPU architecture.
78 |
79 | [releases]: https://github.com/googleapis/genai-toolbox/releases
80 |
81 | <!-- {x-release-please-start-version} -->
82 | {{< tabpane text=true >}}
83 | {{% tab header="Binary" lang="en" %}}
84 | {{< tabpane text=true >}}
85 | {{% tab header="Linux (AMD64)" lang="en" %}}
86 | To install Toolbox as a binary on Linux (AMD64):
87 | ```sh
88 | # see releases page for other versions
89 | export VERSION=0.18.0
90 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
91 | chmod +x toolbox
92 | ```
93 | {{% /tab %}}
94 | {{% tab header="macOS (Apple Silicon)" lang="en" %}}
95 | To install Toolbox as a binary on macOS (Apple Silicon):
96 | ```sh
97 | # see releases page for other versions
98 | export VERSION=0.18.0
99 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
100 | chmod +x toolbox
101 | ```
102 | {{% /tab %}}
103 | {{% tab header="macOS (Intel)" lang="en" %}}
104 | To install Toolbox as a binary on macOS (Intel):
105 | ```sh
106 | # see releases page for other versions
107 | export VERSION=0.18.0
108 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
109 | chmod +x toolbox
110 | ```
111 | {{% /tab %}}
112 | {{% tab header="Windows (AMD64)" lang="en" %}}
113 | To install Toolbox as a binary on Windows (AMD64):
114 | ```powershell
115 | # see releases page for other versions
116 | $VERSION = "0.18.0"
117 | Invoke-WebRequest -Uri "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" -OutFile "toolbox.exe"
118 | ```
119 | {{% /tab %}}
120 | {{< /tabpane >}}
121 | {{% /tab %}}
122 | {{% tab header="Container image" lang="en" %}}
123 | You can also install Toolbox as a container:
124 |
125 | ```sh
126 | # see releases page for other versions
127 | export VERSION=0.18.0
128 | docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
129 | ```
130 |
131 | {{% /tab %}}
132 | {{% tab header="Homebrew" lang="en" %}}
133 | To install Toolbox using Homebrew on macOS or Linux:
134 |
135 | ```sh
136 | brew install mcp-toolbox
137 | ```
138 |
139 | {{% /tab %}}
140 | {{% tab header="Compile from source" lang="en" %}}
141 |
142 | To install from source, ensure you have the latest version of
143 | [Go installed](https://go.dev/doc/install), and then run the following command:
144 |
145 | ```sh
146 | go install github.com/googleapis/[email protected]
147 | ```
148 |
149 | {{% /tab %}}
150 | {{< /tabpane >}}
151 | <!-- {x-release-please-end} -->
152 |
153 | ### Running the server
154 |
155 | [Configure](../configure.md) a `tools.yaml` to define your tools, and then
156 | execute `toolbox` to start the server:
157 |
158 | ```sh
159 | ./toolbox --tools-file "tools.yaml"
160 | ```
161 |
162 | {{< notice note >}}
163 | Toolbox enables dynamic reloading by default. To disable, use the
164 | `--disable-reload` flag.
165 | {{< /notice >}}
166 |
167 | #### Launching Toolbox UI
168 |
169 | To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
170 | tools and toolsets with features such as authorized parameters. To learn more,
171 | visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
172 |
173 | ```sh
174 | ./toolbox --ui
175 | ```
176 |
177 | #### Homebrew Users
178 |
179 | If you installed Toolbox using Homebrew, the `toolbox` binary is available in
180 | your system path. You can start the server with the same command:
181 |
182 | ```sh
183 | toolbox --tools-file "tools.yaml"
184 | ```
185 |
186 | You can use `toolbox help` for a full list of flags! To stop the server, send a
187 | terminate signal (`ctrl+c` on most platforms).
188 |
189 | For more detailed documentation on deploying to different environments, check
190 | out the resources in the [How-to section](../../how-to/)
191 |
192 | ### Integrating your application
193 |
194 | Once your server is up and running, you can load the tools into your
195 | application. See below the list of Client SDKs for using various frameworks:
196 |
197 | #### Python
198 |
199 | {{< tabpane text=true persist=header >}}
200 | {{% tab header="Core" lang="en" %}}
201 |
202 | Once you've installed the [Toolbox Core
203 | SDK](https://pypi.org/project/toolbox-core/), you can load
204 | tools:
205 |
206 | {{< highlight python >}}
207 | from toolbox_core import ToolboxClient
208 |
209 | # update the url to point to your server
210 |
211 | async with ToolboxClient("http://127.0.0.1:5000") as client:
212 |
213 | # these tools can be passed to your application!
214 | tools = await client.load_toolset("toolset_name")
215 | {{< /highlight >}}
216 |
217 | For more detailed instructions on using the Toolbox Core SDK, see the
218 | [project's
219 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md).
220 |
221 | {{% /tab %}}
222 | {{% tab header="LangChain" lang="en" %}}
223 |
224 | Once you've installed the [Toolbox LangChain
225 | SDK](https://pypi.org/project/toolbox-langchain/), you can load
226 | tools:
227 |
228 | {{< highlight python >}}
229 | from toolbox_langchain import ToolboxClient
230 |
231 | # update the url to point to your server
232 |
233 | async with ToolboxClient("http://127.0.0.1:5000") as client:
234 |
235 | # these tools can be passed to your application!
236 | tools = client.load_toolset()
237 | {{< /highlight >}}
238 |
239 | For more detailed instructions on using the Toolbox LangChain SDK, see the
240 | [project's
241 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md).
242 |
243 | {{% /tab %}}
244 | {{% tab header="Llamaindex" lang="en" %}}
245 |
246 | Once you've installed the [Toolbox Llamaindex
247 | SDK](https://github.com/googleapis/genai-toolbox-llamaindex-python), you can load
248 | tools:
249 |
250 | {{< highlight python >}}
251 | from toolbox_llamaindex import ToolboxClient
252 |
253 | # update the url to point to your server
254 |
255 | async with ToolboxClient("http://127.0.0.1:5000") as client:
256 |
257 | # these tools can be passed to your application
258 |
259 | tools = client.load_toolset()
260 | {{< /highlight >}}
261 |
262 | For more detailed instructions on using the Toolbox Llamaindex SDK, see the
263 | [project's
264 | README](https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md).
265 |
266 | {{% /tab %}}
267 | {{< /tabpane >}}
268 |
269 | #### Javascript/Typescript
270 |
271 | Once you've installed the [Toolbox Core
272 | SDK](https://www.npmjs.com/package/@toolbox-sdk/core), you can load
273 | tools:
274 |
275 | {{< tabpane text=true persist=header >}}
276 | {{% tab header="Core" lang="en" %}}
277 |
278 | {{< highlight javascript >}}
279 | import { ToolboxClient } from '@toolbox-sdk/core';
280 |
281 | // update the url to point to your server
282 | const URL = 'http://127.0.0.1:5000';
283 | let client = new ToolboxClient(URL);
284 |
285 | // these tools can be passed to your application!
286 | const toolboxTools = await client.loadToolset('toolsetName');
287 | {{< /highlight >}}
288 |
289 | {{% /tab %}}
290 | {{% tab header="LangChain/Langraph" lang="en" %}}
291 |
292 | {{< highlight javascript >}}
293 | import { ToolboxClient } from '@toolbox-sdk/core';
294 |
295 | // update the url to point to your server
296 | const URL = 'http://127.0.0.1:5000';
297 | let client = new ToolboxClient(URL);
298 |
299 | // these tools can be passed to your application!
300 | const toolboxTools = await client.loadToolset('toolsetName');
301 |
302 | // Define the basics of the tool: name, description, schema and core logic
303 | const getTool = (toolboxTool) => tool(currTool, {
304 | name: toolboxTool.getName(),
305 | description: toolboxTool.getDescription(),
306 | schema: toolboxTool.getParamSchema()
307 | });
308 |
309 | // Use these tools in your Langchain/Langraph applications
310 | const tools = toolboxTools.map(getTool);
311 | {{< /highlight >}}
312 |
313 | {{% /tab %}}
314 | {{% tab header="Genkit" lang="en" %}}
315 |
316 | {{< highlight javascript >}}
317 | import { ToolboxClient } from '@toolbox-sdk/core';
318 | import { genkit } from 'genkit';
319 |
320 | // Initialise genkit
321 | const ai = genkit({
322 | plugins: [
323 | googleAI({
324 | apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
325 | })
326 | ],
327 | model: googleAI.model('gemini-2.0-flash'),
328 | });
329 |
330 | // update the url to point to your server
331 | const URL = 'http://127.0.0.1:5000';
332 | let client = new ToolboxClient(URL);
333 |
334 | // these tools can be passed to your application!
335 | const toolboxTools = await client.loadToolset('toolsetName');
336 |
337 | // Define the basics of the tool: name, description, schema and core logic
338 | const getTool = (toolboxTool) => ai.defineTool({
339 | name: toolboxTool.getName(),
340 | description: toolboxTool.getDescription(),
341 | schema: toolboxTool.getParamSchema()
342 | }, toolboxTool)
343 |
344 | // Use these tools in your Genkit applications
345 | const tools = toolboxTools.map(getTool);
346 | {{< /highlight >}}
347 |
348 | {{% /tab %}}
349 | {{% tab header="LlamaIndex" lang="en" %}}
350 |
351 | {{< highlight javascript >}}
352 | import { ToolboxClient } from '@toolbox-sdk/core';
353 | import { tool } from "llamaindex";
354 |
355 | // update the url to point to your server
356 | const URL = 'http://127.0.0.1:5000';
357 | let client = new ToolboxClient(URL);
358 |
359 | // these tools can be passed to your application!
360 | const toolboxTools = await client.loadToolset('toolsetName');
361 |
362 | // Define the basics of the tool: name, description, schema and core logic
363 | const getTool = (toolboxTool) => tool({
364 | name: toolboxTool.getName(),
365 | description: toolboxTool.getDescription(),
366 | parameters: toolboxTool.getParamSchema(),
367 | execute: toolboxTool
368 | });;
369 |
370 | // Use these tools in your LlamaIndex applications
371 | const tools = toolboxTools.map(getTool);
372 |
373 | {{< /highlight >}}
374 |
375 | {{% /tab %}}
376 | {{< /tabpane >}}
377 |
378 | For more detailed instructions on using the Toolbox Core SDK, see the
379 | [project's
380 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
381 |
382 | #### Go
383 |
384 | Once you've installed the [Toolbox Go
385 | SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core), you can load
386 | tools:
387 |
388 | {{< tabpane text=true persist=header >}}
389 | {{% tab header="Core" lang="en" %}}
390 |
391 | {{< highlight go >}}
392 | package main
393 |
394 | import (
395 | "context"
396 | "log"
397 |
398 | "github.com/googleapis/mcp-toolbox-sdk-go/core"
399 | )
400 |
401 | func main() {
402 | // update the url to point to your server
403 | URL := "http://127.0.0.1:5000"
404 | ctx := context.Background()
405 |
406 | client, err := core.NewToolboxClient(URL)
407 | if err != nil {
408 | log.Fatalf("Failed to create Toolbox client: %v", err)
409 | }
410 |
411 | // Framework agnostic tools
412 | tools, err := client.LoadToolset("toolsetName", ctx)
413 | if err != nil {
414 | log.Fatalf("Failed to load tools: %v", err)
415 | }
416 | }
417 | {{< /highlight >}}
418 |
419 | {{% /tab %}}
420 | {{% tab header="LangChain Go" lang="en" %}}
421 |
422 | {{< highlight go >}}
423 | package main
424 |
425 | import (
426 | "context"
427 | "encoding/json"
428 | "log"
429 |
430 | "github.com/googleapis/mcp-toolbox-sdk-go/core"
431 | "github.com/tmc/langchaingo/llms"
432 | )
433 |
434 | func main() {
435 | // Make sure to add the error checks
436 | // update the url to point to your server
437 | URL := "http://127.0.0.1:5000"
438 | ctx := context.Background()
439 |
440 | client, err := core.NewToolboxClient(URL)
441 | if err != nil {
442 | log.Fatalf("Failed to create Toolbox client: %v", err)
443 | }
444 |
445 | // Framework agnostic tool
446 | tool, err := client.LoadTool("toolName", ctx)
447 | if err != nil {
448 | log.Fatalf("Failed to load tools: %v", err)
449 | }
450 |
451 | // Fetch the tool's input schema
452 | inputschema, err := tool.InputSchema()
453 | if err != nil {
454 | log.Fatalf("Failed to fetch inputSchema: %v", err)
455 | }
456 |
457 | var paramsSchema map[string]any
458 | _ = json.Unmarshal(inputschema, ¶msSchema)
459 |
460 | // Use this tool with LangChainGo
461 | langChainTool := llms.Tool{
462 | Type: "function",
463 | Function: &llms.FunctionDefinition{
464 | Name: tool.Name(),
465 | Description: tool.Description(),
466 | Parameters: paramsSchema,
467 | },
468 | }
469 | }
470 | {{< /highlight >}}
471 | For end-to-end samples on using the Toolbox Go SDK with LangChain Go, see the [project's
472 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
473 |
474 | {{% /tab %}}
475 | {{% tab header="Genkit Go" lang="en" %}}
476 |
477 | {{< highlight go >}}
478 | package main
479 | import (
480 | "context"
481 | "encoding/json"
482 | "log"
483 |
484 | "github.com/firebase/genkit/go/ai"
485 | "github.com/firebase/genkit/go/genkit"
486 | "github.com/googleapis/mcp-toolbox-sdk-go/core"
487 | "github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
488 | "github.com/invopop/jsonschema"
489 | )
490 |
491 | func main() {
492 | // Make sure to add the error checks
493 | // Update the url to point to your server
494 | URL := "http://127.0.0.1:5000"
495 | ctx := context.Background()
496 | g, err := genkit.Init(ctx)
497 |
498 | client, err := core.NewToolboxClient(URL)
499 | if err != nil {
500 | log.Fatalf("Failed to create Toolbox client: %v", err)
501 | }
502 |
503 | // Framework agnostic tool
504 | tool, err := client.LoadTool("toolName", ctx)
505 | if err != nil {
506 | log.Fatalf("Failed to load tools: %v", err)
507 | }
508 |
509 | // Convert the tool using the tbgenkit package
510 | // Use this tool with Genkit Go
511 | genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
512 | if err != nil {
513 | log.Fatalf("Failed to convert tool: %v\n", err)
514 | }
515 | }
516 | {{< /highlight >}}
517 | For end-to-end samples on using the Toolbox Go SDK with Genkit Go, see the [project's
518 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/tbgenkit/samples)
519 |
520 | {{% /tab %}}
521 | {{% tab header="Go GenAI" lang="en" %}}
522 |
523 | {{< highlight go >}}
524 | package main
525 |
526 | import (
527 | "context"
528 | "encoding/json"
529 | "log"
530 |
531 | "github.com/googleapis/mcp-toolbox-sdk-go/core"
532 | "google.golang.org/genai"
533 | )
534 |
535 | func main() {
536 | // Make sure to add the error checks
537 | // Update the url to point to your server
538 | URL := "http://127.0.0.1:5000"
539 | ctx := context.Background()
540 |
541 | client, err := core.NewToolboxClient(URL)
542 | if err != nil {
543 | log.Fatalf("Failed to create Toolbox client: %v", err)
544 | }
545 |
546 | // Framework agnostic tool
547 | tool, err := client.LoadTool("toolName", ctx)
548 | if err != nil {
549 | log.Fatalf("Failed to load tools: %v", err)
550 | }
551 |
552 | // Fetch the tool's input schema
553 | inputschema, err := tool.InputSchema()
554 | if err != nil {
555 | log.Fatalf("Failed to fetch inputSchema: %v", err)
556 | }
557 |
558 | var schema *genai.Schema
559 | _ = json.Unmarshal(inputschema, &schema)
560 |
561 | funcDeclaration := &genai.FunctionDeclaration{
562 | Name: tool.Name(),
563 | Description: tool.Description(),
564 | Parameters: schema,
565 | }
566 |
567 | // Use this tool with Go GenAI
568 | genAITool := &genai.Tool{
569 | FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
570 | }
571 | }
572 | {{< /highlight >}}
573 | For end-to-end samples on using the Toolbox Go SDK with Go GenAI, see the [project's
574 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
575 |
576 | {{% /tab %}}
577 |
578 | {{% tab header="OpenAI Go" lang="en" %}}
579 |
580 | {{< highlight go >}}
581 | package main
582 |
583 | import (
584 | "context"
585 | "encoding/json"
586 | "log"
587 |
588 | "github.com/googleapis/mcp-toolbox-sdk-go/core"
589 | openai "github.com/openai/openai-go"
590 | )
591 |
592 | func main() {
593 | // Make sure to add the error checks
594 | // Update the url to point to your server
595 | URL := "http://127.0.0.1:5000"
596 | ctx := context.Background()
597 |
598 | client, err := core.NewToolboxClient(URL)
599 | if err != nil {
600 | log.Fatalf("Failed to create Toolbox client: %v", err)
601 | }
602 |
603 | // Framework agnostic tool
604 | tool, err := client.LoadTool("toolName", ctx)
605 | if err != nil {
606 | log.Fatalf("Failed to load tools: %v", err)
607 | }
608 |
609 | // Fetch the tool's input schema
610 | inputschema, err := tool.InputSchema()
611 | if err != nil {
612 | log.Fatalf("Failed to fetch inputSchema: %v", err)
613 | }
614 |
615 | var paramsSchema openai.FunctionParameters
616 | _ = json.Unmarshal(inputschema, ¶msSchema)
617 |
618 | // Use this tool with OpenAI Go
619 | openAITool := openai.ChatCompletionToolParam{
620 | Function: openai.FunctionDefinitionParam{
621 | Name: tool.Name(),
622 | Description: openai.String(tool.Description()),
623 | Parameters: paramsSchema,
624 | },
625 | }
626 | }
627 | {{< /highlight >}}
628 | For end-to-end samples on using the Toolbox Go SDK with OpenAI Go, see the [project's
629 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
630 |
631 | {{% /tab %}}
632 |
633 | {{% tab header="ADK Go" lang="en" %}}
634 |
635 | {{< highlight go >}}
636 | package main
637 |
638 | import (
639 | "context"
640 | "fmt"
641 | "github.com/googleapis/mcp-toolbox-sdk-go/tbadk"
642 | )
643 |
644 | func main() {
645 | // Make sure to add the error checks
646 | // Update the url to point to your server
647 | URL := "http://127.0.0.1:5000"
648 | ctx := context.Background()
649 | client, err := tbadk.NewToolboxClient(URL)
650 | if err != nil {
651 | return fmt.Sprintln("Could not start Toolbox Client", err)
652 | }
653 |
654 | // Use this tool with ADK Go
655 | tool, err := client.LoadTool("toolName", ctx)
656 | if err != nil {
657 | return fmt.Sprintln("Could not load Toolbox Tool", err)
658 | }
659 | }
660 |
661 | {{< /highlight >}}
662 |
663 | For end-to-end samples on using the Toolbox Go SDK with ADK Go, see the [project's
664 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/tbadk/samples)
665 |
666 | {{% /tab %}}
667 | {{< /tabpane >}}
668 |
669 | For more detailed instructions on using the Toolbox Go SDK, see the
670 | [project's
671 | README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
672 |
```
--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/alloydb-postgres-observability.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | sources:
15 | cloud-monitoring-source:
16 | kind: cloud-monitoring
17 | tools:
18 | get_system_metrics:
19 | kind: cloud-monitoring-query-prometheus
20 | source: cloud-monitoring-source
21 | description: |
22 | Fetches system level cloudmonitoring data (timeseries metrics) for an AlloyDB cluster, instance.
23 | To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
24 |
25 | Generate the PromQL `query` for AlloyDB system metrics using the provided metrics and rules. Get labels like `cluster_id` and `instance_id` from the user's intent.
26 |
27 | Defaults:
28 | 1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.
29 |
30 | PromQL Query Examples:
31 | 1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
32 | 2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
33 | 3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
34 | 4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
35 | 5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
36 | 6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
37 | 7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
38 | 8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`
39 |
40 | Available Metrics List: metricname. description. monitored resource. labels
41 | 1. `alloydb.googleapis.com/instance/cpu/average_utilization`: The percentage of CPU being used on an instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
42 | 2. `alloydb.googleapis.com/instance/cpu/maximum_utilization`: Maximum CPU utilization across all currently serving nodes of the instance from 0 to 100. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
43 | 3. `alloydb.googleapis.com/cluster/storage/usage`: The total AlloyDB storage in bytes across the entire cluster. `alloydb.googleapis.com/Cluster`. `cluster_id`.
44 | 4. `alloydb.googleapis.com/instance/postgres/replication/replicas`: The number of read replicas connected to the primary instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`, `replica_instance_id`.
45 | 5. `alloydb.googleapis.com/instance/postgres/replication/maximum_lag`: The maximum replication time lag calculated across all serving read replicas of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `replica_instance_id`.
46 | 6. `alloydb.googleapis.com/instance/memory/min_available_memory`: The minimum available memory across all currently serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
47 | 7. `alloydb.googleapis.com/instance/postgres/instances`: The number of nodes in the instance, along with their status, which can be either up or down. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `status`.
48 | 8. `alloydb.googleapis.com/database/postgresql/tuples`: Number of tuples (rows) by state per database in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `state`.
49 | 9. `alloydb.googleapis.com/database/postgresql/temp_bytes_written_for_top_databases`: The total amount of data(in bytes) written to temporary files by the queries per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
50 | 10. `alloydb.googleapis.com/database/postgresql/temp_files_written_for_top_databases`: The number of temporary files used for writing data per database while performing internal algorithms like join, sort etc for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
51 | 11. `alloydb.googleapis.com/database/postgresql/inserted_tuples_count_for_top_databases`: The total number of rows inserted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
52 | 12. `alloydb.googleapis.com/database/postgresql/updated_tuples_count_for_top_databases`: The total number of rows updated per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
53 | 13. `alloydb.googleapis.com/database/postgresql/deleted_tuples_count_for_top_databases`: The total number of rows deleted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
54 | 14. `alloydb.googleapis.com/database/postgresql/backends_for_top_databases`: The current number of connections per database to the instance for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
55 | 15. `alloydb.googleapis.com/instance/postgresql/backends_by_state`: The current number of connections to the instance grouped by the state like idle, active, idle_in_transaction, idle_in_transaction_aborted, disabled, and fastpath_function_call. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`.
56 | 16. `alloydb.googleapis.com/instance/postgresql/backends_for_top_applications`: The current number of connections to the AlloyDB instance, grouped by applications for top 500 applications. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `application_name`.
57 | 17. `alloydb.googleapis.com/database/postgresql/new_connections_for_top_databases`: Total number of new connections added per database for top 500 databases to the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
58 | 18. `alloydb.googleapis.com/database/postgresql/deadlock_count_for_top_databases`: Total number of deadlocks detected in the instance per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
59 | 19. `alloydb.googleapis.com/database/postgresql/statements_executed_count`: Total count of statements executed in the instance per database per operation_type. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `operation_type`.
60 | 20. `alloydb.googleapis.com/instance/postgresql/returned_tuples_count`: Number of rows scanned while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
61 | 21. `alloydb.googleapis.com/instance/postgresql/fetched_tuples_count`: Number of rows fetched while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
62 | 22. `alloydb.googleapis.com/instance/postgresql/updated_tuples_count`: Number of rows updated while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
63 | 23. `alloydb.googleapis.com/instance/postgresql/inserted_tuples_count`: Number of rows inserted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
64 | 24. `alloydb.googleapis.com/instance/postgresql/deleted_tuples_count`: Number of rows deleted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
65 | 25. `alloydb.googleapis.com/instance/postgresql/written_tuples_count`: Number of rows written while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
66 | 26. `alloydb.googleapis.com/instance/postgresql/deadlock_count`: Number of deadlocks detected in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
67 | 27. `alloydb.googleapis.com/instance/postgresql/blks_read`: Number of blocks read by Postgres that were not in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
68 | 28. `alloydb.googleapis.com/instance/postgresql/blks_hit`: Number of times Postgres found the requested block in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
69 | 29. `alloydb.googleapis.com/instance/postgresql/temp_bytes_written_count`: The total amount of data(in bytes) written to temporary files by the queries while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
70 | 30. `alloydb.googleapis.com/instance/postgresql/temp_files_written_count`: The number of temporary files used for writing data in the instance while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
71 | 31. `alloydb.googleapis.com/instance/postgresql/new_connections_count`: The number new connections added to the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
72 | 32. `alloydb.googleapis.com/instance/postgresql/wait_count`: Total number of times processes waited for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
73 | 33. `alloydb.googleapis.com/instance/postgresql/wait_time`: Total elapsed wait time for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
74 | 34. `alloydb.googleapis.com/instance/postgres/transaction_count`: The number of committed and rolled back transactions across all serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
75 |
76 | get_query_metrics:
77 | kind: cloud-monitoring-query-prometheus
78 | source: cloud-monitoring-source
79 | description: |
80 | Fetches query level cloudmonitoring data (timeseries metrics) for queries running in an AlloyDB instance.
81 | To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
82 |
83 | Generate the PromQL `query` for AlloyDB query metrics using the provided metrics and rules. Get labels like `cluster_id`, `instance_id`, and `query_hash` from the user's intent. If `query_hash` is provided, use the per-query metrics.
84 |
85 | Defaults:
86 | 1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.
87 |
88 | PromQL Query Examples:
89 | 1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
90 | 2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
91 | 3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
92 | 4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
93 | 5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
94 | 6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
95 | 7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
96 | 8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`
97 |
98 | Available Metrics List: metricname. description. monitored resource. labels. aggregate is the aggregated values for all query stats, Use aggregate metrics if query id is not provided. For perquery metrics do not fetch querystring unless specified by user specifically. Have the aggregation on query hash to avoid fetching the querystring. Do not use latency metrics for anything.
99 | 1. `alloydb.googleapis.com/database/postgresql/insights/aggregate/latencies`: Aggregated query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
100 | 2. `alloydb.googleapis.com/database/postgresql/insights/aggregate/execution_time`: Accumulated aggregated query execution time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
101 | 3. `alloydb.googleapis.com/database/postgresql/insights/aggregate/io_time`: Accumulated aggregated IO time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`.
102 | 4. `alloydb.googleapis.com/database/postgresql/insights/aggregate/lock_time`: Accumulated aggregated lock wait time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`.
103 | 5. `alloydb.googleapis.com/database/postgresql/insights/aggregate/row_count`: Aggregated number of retrieved or affected rows since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
104 | 6. `alloydb.googleapis.com/database/postgresql/insights/aggregate/shared_blk_access_count`: Aggregated shared blocks accessed by statement execution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`.
105 | 7. `alloydb.googleapis.com/database/postgresql/insights/perquery/latencies`: Per query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
106 | 8. `alloydb.googleapis.com/database/postgresql/insights/perquery/execution_time`: Accumulated execution times per user per database per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
107 | 9. `alloydb.googleapis.com/database/postgresql/insights/perquery/io_time`: Accumulated IO time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`, `querystring`, `query_hash`.
108 | 10. `alloydb.googleapis.com/database/postgresql/insights/perquery/lock_time`: Accumulated lock wait time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`, `querystring`, `query_hash`.
109 | 11. `alloydb.googleapis.com/database/postgresql/insights/perquery/row_count`: The number of retrieved or affected rows since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
110 | 12. `alloydb.googleapis.com/database/postgresql/insights/perquery/shared_blk_access_count`: Shared blocks accessed by statement execution per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`, `querystring`, `query_hash`.
111 | 13. `alloydb.googleapis.com/database/postgresql/insights/pertag/latencies`: Query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
112 | 14. `alloydb.googleapis.com/database/postgresql/insights/pertag/execution_time`: Accumulated execution times since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
113 | 15. `alloydb.googleapis.com/database/postgresql/insights/pertag/io_time`: Accumulated IO time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `io_type`, `tag_hash`.
114 | 16. `alloydb.googleapis.com/database/postgresql/insights/pertag/lock_time`: Accumulated lock wait time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `lock_type`, `tag_hash`.
115 | 17. `alloydb.googleapis.com/database/postgresql/insights/pertag/shared_blk_access_count`: Shared blocks accessed by statement execution per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `access_type`, `tag_hash`.
116 | 18. `alloydb.googleapis.com/database/postgresql/insights/pertag/row_count`: The number of retrieved or affected rows since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
117 |
118 | toolsets:
119 | alloydb_postgres_cloud_monitoring_tools:
120 | - get_system_metrics
121 | - get_query_metrics
122 |
```
--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerycommon/table_name_parser_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package bigquerycommon_test
16 |
17 | import (
18 | "sort"
19 | "strings"
20 | "testing"
21 |
22 | "github.com/google/go-cmp/cmp"
23 | "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
24 | )
25 |
26 | func TestTableParser(t *testing.T) {
27 | testCases := []struct {
28 | name string
29 | sql string
30 | defaultProjectID string
31 | want []string
32 | wantErr bool
33 | wantErrMsg string
34 | }{
35 | {
36 | name: "single fully qualified table",
37 | sql: "SELECT * FROM `my-project.my_dataset.my_table`",
38 | defaultProjectID: "default-proj",
39 | want: []string{"my-project.my_dataset.my_table"},
40 | wantErr: false,
41 | },
42 | {
43 | name: "multiple statements with same table",
44 | sql: "select * from proj1.data1.tbl1 limit 1; select A.b from proj1.data1.tbl1 as A limit 1;",
45 | defaultProjectID: "default-proj",
46 | want: []string{"proj1.data1.tbl1"},
47 | wantErr: false,
48 | },
49 | {
50 | name: "multiple fully qualified tables",
51 | sql: "SELECT * FROM `proj1.data1`.`tbl1` JOIN proj2.`data2.tbl2` ON id",
52 | defaultProjectID: "default-proj",
53 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
54 | wantErr: false,
55 | },
56 | {
57 | name: "duplicate tables",
58 | sql: "SELECT * FROM `proj1.data1.tbl1` JOIN proj1.data1.tbl1 ON id",
59 | defaultProjectID: "default-proj",
60 | want: []string{"proj1.data1.tbl1"},
61 | wantErr: false,
62 | },
63 | {
64 | name: "partial table with default project",
65 | sql: "SELECT * FROM `my_dataset`.my_table",
66 | defaultProjectID: "default-proj",
67 | want: []string{"default-proj.my_dataset.my_table"},
68 | wantErr: false,
69 | },
70 | {
71 | name: "partial table without default project",
72 | sql: "SELECT * FROM `my_dataset.my_table`",
73 | defaultProjectID: "",
74 | want: nil,
75 | wantErr: true,
76 | },
77 | {
78 | name: "mixed fully qualified and partial tables",
79 | sql: "SELECT t1.*, t2.* FROM `proj1.data1.tbl1` AS t1 JOIN `data2.tbl2` AS t2 ON t1.id = t2.id",
80 | defaultProjectID: "default-proj",
81 | want: []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
82 | wantErr: false,
83 | },
84 | {
85 | name: "no tables",
86 | sql: "SELECT 1+1",
87 | defaultProjectID: "default-proj",
88 | want: []string{},
89 | wantErr: false,
90 | },
91 | {
92 | name: "ignore single part identifiers (like CTEs)",
93 | sql: "WITH my_cte AS (SELECT 1) SELECT * FROM `my_cte`",
94 | defaultProjectID: "default-proj",
95 | want: []string{},
96 | wantErr: false,
97 | },
98 | {
99 | name: "complex CTE",
100 | sql: "WITH cte1 AS (SELECT * FROM `real.table.one`), cte2 AS (SELECT * FROM cte1) SELECT * FROM cte2 JOIN `real.table.two` ON true",
101 | defaultProjectID: "default-proj",
102 | want: []string{"real.table.one", "real.table.two"},
103 | wantErr: false,
104 | },
105 | {
106 | name: "nested subquery should be parsed",
107 | sql: "SELECT * FROM (SELECT a FROM (SELECT A.b FROM `real.table.nested` AS A))",
108 | defaultProjectID: "default-proj",
109 | want: []string{"real.table.nested"},
110 | wantErr: false,
111 | },
112 | {
113 | name: "from clause with unnest",
114 | sql: "SELECT event.name FROM `my-project.my_dataset.my_table` AS A, UNNEST(A.events) AS event",
115 | defaultProjectID: "default-proj",
116 | want: []string{"my-project.my_dataset.my_table"},
117 | wantErr: false,
118 | },
119 | {
120 | name: "ignore more than 3 parts",
121 | sql: "SELECT * FROM `proj.data.tbl.col`",
122 | defaultProjectID: "default-proj",
123 | want: []string{},
124 | wantErr: false,
125 | },
126 | {
127 | name: "complex query",
128 | sql: "SELECT name FROM (SELECT name FROM `proj1.data1.tbl1`) UNION ALL SELECT name FROM `data2.tbl2`",
129 | defaultProjectID: "default-proj",
130 | want: []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
131 | wantErr: false,
132 | },
133 | {
134 | name: "empty sql",
135 | sql: "",
136 | defaultProjectID: "default-proj",
137 | want: []string{},
138 | wantErr: false,
139 | },
140 | {
141 | name: "with comments",
142 | sql: "SELECT * FROM `proj1.data1.tbl1`; -- comment `fake.table.one` \n SELECT * FROM `proj2.data2.tbl2`; # comment `fake.table.two`",
143 | defaultProjectID: "default-proj",
144 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
145 | wantErr: false,
146 | },
147 | {
148 | name: "multi-statement with semicolon",
149 | sql: "SELECT * FROM `proj1.data1.tbl1`; SELECT * FROM `proj2.data2.tbl2`",
150 | defaultProjectID: "default-proj",
151 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
152 | wantErr: false,
153 | },
154 | {
155 | name: "simple execute immediate",
156 | sql: "EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
157 | defaultProjectID: "default-proj",
158 | want: nil,
159 | wantErr: true,
160 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
161 | },
162 | {
163 | name: "execute immediate with multiple spaces",
164 | sql: "EXECUTE IMMEDIATE 'SELECT 1'",
165 | defaultProjectID: "default-proj",
166 | want: nil,
167 | wantErr: true,
168 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
169 | },
170 | {
171 | name: "execute immediate with newline",
172 | sql: "EXECUTE\nIMMEDIATE 'SELECT 1'",
173 | defaultProjectID: "default-proj",
174 | want: nil,
175 | wantErr: true,
176 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
177 | },
178 | {
179 | name: "execute immediate with comment",
180 | sql: "EXECUTE -- some comment\n IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
181 | defaultProjectID: "default-proj",
182 | want: nil,
183 | wantErr: true,
184 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
185 | },
186 | {
187 | name: "nested execute immediate",
188 | sql: "EXECUTE IMMEDIATE \"EXECUTE IMMEDIATE '''SELECT * FROM `nested.exec.tbl`'''\"",
189 | defaultProjectID: "default-proj",
190 | want: nil,
191 | wantErr: true,
192 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
193 | },
194 | {
195 | name: "begin execute immediate",
196 | sql: "BEGIN EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'; END;",
197 | defaultProjectID: "default-proj",
198 | want: nil,
199 | wantErr: true,
200 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
201 | },
202 | {
203 | name: "table inside string literal should be ignored",
204 | sql: "SELECT * FROM `real.table.one` WHERE name = 'select * from `fake.table.two`'",
205 | defaultProjectID: "default-proj",
206 | want: []string{"real.table.one"},
207 | wantErr: false,
208 | },
209 | {
210 | name: "string with escaped single quote",
211 | sql: "SELECT 'this is a string with an escaped quote \\' and a fake table `fake.table.one`' FROM `real.table.two`",
212 | defaultProjectID: "default-proj",
213 | want: []string{"real.table.two"},
214 | wantErr: false,
215 | },
216 | {
217 | name: "string with escaped double quote",
218 | sql: `SELECT "this is a string with an escaped quote \" and a fake table ` + "`fake.table.one`" + `" FROM ` + "`real.table.two`",
219 | defaultProjectID: "default-proj",
220 | want: []string{"real.table.two"},
221 | wantErr: false,
222 | },
223 | {
224 | name: "multi-line comment",
225 | sql: "/* `fake.table.1` */ SELECT * FROM `real.table.2`",
226 | defaultProjectID: "default-proj",
227 | want: []string{"real.table.2"},
228 | wantErr: false,
229 | },
230 | {
231 | name: "raw string with backslash should be ignored",
232 | sql: "SELECT * FROM `real.table.one` WHERE name = r'a raw string with a \\ and a fake table `fake.table.two`'",
233 | defaultProjectID: "default-proj",
234 | want: []string{"real.table.one"},
235 | wantErr: false,
236 | },
237 | {
238 | name: "capital R raw string with quotes inside should be ignored",
239 | sql: `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
240 | defaultProjectID: "default-proj",
241 | want: []string{"real.table.one"},
242 | wantErr: false,
243 | },
244 | {
245 | name: "triple quoted raw string should be ignored",
246 | sql: "SELECT * FROM `real.table.one` WHERE name = r'''a raw string with a ' and a \" and a \\ and a fake table `fake.table.two`'''",
247 | defaultProjectID: "default-proj",
248 | want: []string{"real.table.one"},
249 | wantErr: false,
250 | },
251 | {
252 | name: "triple quoted capital R raw string should be ignored",
253 | sql: `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
254 | defaultProjectID: "default-proj",
255 | want: []string{"real.table.one"},
256 | wantErr: false,
257 | },
258 | {
259 | name: "unquoted fully qualified table",
260 | sql: "SELECT * FROM my-project.my_dataset.my_table",
261 | defaultProjectID: "default-proj",
262 | want: []string{"my-project.my_dataset.my_table"},
263 | wantErr: false,
264 | },
265 | {
266 | name: "unquoted partial table with default project",
267 | sql: "SELECT * FROM my_dataset.my_table",
268 | defaultProjectID: "default-proj",
269 | want: []string{"default-proj.my_dataset.my_table"},
270 | wantErr: false,
271 | },
272 | {
273 | name: "unquoted partial table without default project",
274 | sql: "SELECT * FROM my_dataset.my_table",
275 | defaultProjectID: "",
276 | want: nil,
277 | wantErr: true,
278 | },
279 | {
280 | name: "mixed quoting style 1",
281 | sql: "SELECT * FROM `my-project`.my_dataset.my_table",
282 | defaultProjectID: "default-proj",
283 | want: []string{"my-project.my_dataset.my_table"},
284 | wantErr: false,
285 | },
286 | {
287 | name: "mixed quoting style 2",
288 | sql: "SELECT * FROM `my-project`.`my_dataset`.my_table",
289 | defaultProjectID: "default-proj",
290 | want: []string{"my-project.my_dataset.my_table"},
291 | wantErr: false,
292 | },
293 | {
294 | name: "mixed quoting style 3",
295 | sql: "SELECT * FROM `my-project`.`my_dataset`.`my_table`",
296 | defaultProjectID: "default-proj",
297 | want: []string{"my-project.my_dataset.my_table"},
298 | wantErr: false,
299 | },
300 | {
301 | name: "mixed quoted and unquoted tables",
302 | sql: "SELECT * FROM `proj1.data1.tbl1` JOIN proj2.data2.tbl2 ON id",
303 | defaultProjectID: "default-proj",
304 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
305 | wantErr: false,
306 | },
307 | {
308 | name: "create table statement",
309 | sql: "CREATE TABLE `my-project.my_dataset.my_table` (x INT64)",
310 | defaultProjectID: "default-proj",
311 | want: []string{"my-project.my_dataset.my_table"},
312 | wantErr: false,
313 | },
314 | {
315 | name: "insert into statement",
316 | sql: "INSERT INTO `my-project.my_dataset.my_table` (x) VALUES (1)",
317 | defaultProjectID: "default-proj",
318 | want: []string{"my-project.my_dataset.my_table"},
319 | wantErr: false,
320 | },
321 | {
322 | name: "update statement",
323 | sql: "UPDATE `my-project.my_dataset.my_table` SET x = 2 WHERE true",
324 | defaultProjectID: "default-proj",
325 | want: []string{"my-project.my_dataset.my_table"},
326 | wantErr: false,
327 | },
328 | {
329 | name: "delete from statement",
330 | sql: "DELETE FROM `my-project.my_dataset.my_table` WHERE true",
331 | defaultProjectID: "default-proj",
332 | want: []string{"my-project.my_dataset.my_table"},
333 | wantErr: false,
334 | },
335 | {
336 | name: "merge into statement",
337 | sql: "MERGE `proj.data.target` T USING `proj.data.source` S ON T.id = S.id WHEN NOT MATCHED THEN INSERT ROW",
338 | defaultProjectID: "default-proj",
339 | want: []string{"proj.data.source", "proj.data.target"},
340 | wantErr: false,
341 | },
342 | {
343 | name: "create schema statement",
344 | sql: "CREATE SCHEMA `my-project.my_dataset`",
345 | defaultProjectID: "default-proj",
346 | want: nil,
347 | wantErr: true,
348 | wantErrMsg: "dataset-level operations like 'CREATE SCHEMA' are not allowed",
349 | },
350 | {
351 | name: "create dataset statement",
352 | sql: "CREATE DATASET `my-project.my_dataset`",
353 | defaultProjectID: "default-proj",
354 | want: nil,
355 | wantErr: true,
356 | wantErrMsg: "dataset-level operations like 'CREATE DATASET' are not allowed",
357 | },
358 | {
359 | name: "drop schema statement",
360 | sql: "DROP SCHEMA `my-project.my_dataset`",
361 | defaultProjectID: "default-proj",
362 | want: nil,
363 | wantErr: true,
364 | wantErrMsg: "dataset-level operations like 'DROP SCHEMA' are not allowed",
365 | },
366 | {
367 | name: "drop dataset statement",
368 | sql: "DROP DATASET `my-project.my_dataset`",
369 | defaultProjectID: "default-proj",
370 | want: nil,
371 | wantErr: true,
372 | wantErrMsg: "dataset-level operations like 'DROP DATASET' are not allowed",
373 | },
374 | {
375 | name: "alter schema statement",
376 | sql: "ALTER SCHEMA my_dataset SET OPTIONS(description='new description')",
377 | defaultProjectID: "default-proj",
378 | want: nil,
379 | wantErr: true,
380 | wantErrMsg: "dataset-level operations like 'ALTER SCHEMA' are not allowed",
381 | },
382 | {
383 | name: "alter dataset statement",
384 | sql: "ALTER DATASET my_dataset SET OPTIONS(description='new description')",
385 | defaultProjectID: "default-proj",
386 | want: nil,
387 | wantErr: true,
388 | wantErrMsg: "dataset-level operations like 'ALTER DATASET' are not allowed",
389 | },
390 | {
391 | name: "begin...end block",
392 | sql: "BEGIN CREATE TABLE `proj.data.tbl1` (x INT64); INSERT `proj.data.tbl2` (y) VALUES (1); END;",
393 | defaultProjectID: "default-proj",
394 | want: []string{"proj.data.tbl1", "proj.data.tbl2"},
395 | wantErr: false,
396 | },
397 | {
398 | name: "complex begin...end block with comments and different quoting",
399 | sql: `
400 | BEGIN
401 | -- Create a new table
402 | CREATE TABLE proj.data.tbl1 (x INT64);
403 | /* Insert some data from another table */
404 | INSERT INTO ` + "`proj.data.tbl2`" + ` (y) SELECT y FROM proj.data.source;
405 | END;`,
406 | defaultProjectID: "default-proj",
407 | want: []string{"proj.data.source", "proj.data.tbl1", "proj.data.tbl2"},
408 | wantErr: false,
409 | },
410 | {
411 | name: "call fully qualified procedure",
412 | sql: "CALL my-project.my_dataset.my_procedure()",
413 | defaultProjectID: "default-proj",
414 | want: nil,
415 | wantErr: true,
416 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place",
417 | },
418 | {
419 | name: "call partially qualified procedure",
420 | sql: "CALL my_dataset.my_procedure()",
421 | defaultProjectID: "default-proj",
422 | want: nil,
423 | wantErr: true,
424 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place",
425 | },
426 | {
427 | name: "call procedure in begin...end block",
428 | sql: "BEGIN CALL proj.data.proc1(); SELECT * FROM proj.data.tbl1; END;",
429 | defaultProjectID: "default-proj",
430 | want: nil,
431 | wantErr: true,
432 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place",
433 | },
434 | {
435 | name: "call procedure with newline",
436 | sql: "CALL\nmy_dataset.my_procedure()",
437 | defaultProjectID: "default-proj",
438 | want: nil,
439 | wantErr: true,
440 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place",
441 | },
442 | {
443 | name: "call procedure without default project should fail",
444 | sql: "CALL my_dataset.my_procedure()",
445 | defaultProjectID: "",
446 | want: nil,
447 | wantErr: true,
448 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place",
449 | },
450 | {
451 | name: "create procedure statement",
452 | sql: "CREATE PROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
453 | defaultProjectID: "default-proj",
454 | want: nil,
455 | wantErr: true,
456 | wantErrMsg: "unanalyzable statements like 'CREATE PROCEDURE' are not allowed",
457 | },
458 | {
459 | name: "create or replace procedure statement",
460 | sql: "CREATE\n OR \nREPLACE \nPROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
461 | defaultProjectID: "default-proj",
462 | want: nil,
463 | wantErr: true,
464 | wantErrMsg: "unanalyzable statements like 'CREATE OR REPLACE PROCEDURE' are not allowed",
465 | },
466 | {
467 | name: "create function statement",
468 | sql: "CREATE FUNCTION my_dataset.my_function() RETURNS INT64 AS (1);",
469 | defaultProjectID: "default-proj",
470 | want: nil,
471 | wantErr: true,
472 | wantErrMsg: "unanalyzable statements like 'CREATE FUNCTION' are not allowed",
473 | },
474 | }
475 |
476 | for _, tc := range testCases {
477 | t.Run(tc.name, func(t *testing.T) {
478 | got, err := bigquerycommon.TableParser(tc.sql, tc.defaultProjectID)
479 | if (err != nil) != tc.wantErr {
480 | t.Errorf("TableParser() error = %v, wantErr %v", err, tc.wantErr)
481 | return
482 | }
483 | if tc.wantErr && tc.wantErrMsg != "" {
484 | if err == nil || !strings.Contains(err.Error(), tc.wantErrMsg) {
485 | t.Errorf("TableParser() error = %v, want err containing %q", err, tc.wantErrMsg)
486 | }
487 | }
488 | // Sort slices to ensure comparison is order-independent.
489 | sort.Strings(got)
490 | sort.Strings(tc.want)
491 | if diff := cmp.Diff(tc.want, got); diff != "" {
492 | t.Errorf("TableParser() mismatch (-want +got):\n%s", diff)
493 | }
494 | })
495 | }
496 | }
497 |
```