This is page 59 of 76. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_prompts_with_coverage.sh
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .gemini
│ └── config.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ ├── trusted-contribution.yml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── link_checker_workflow.yaml
│ ├── lint.yaml
│ ├── publish-mcp.yml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── .lycheeignore
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ ├── ALLOYDBADMIN_README.md
│ ├── ALLOYDBPG_README.md
│ ├── BIGQUERY_README.md
│ ├── CLOUDSQLMSSQL_README.md
│ ├── CLOUDSQLMSSQLADMIN_README.md
│ ├── CLOUDSQLMYSQL_README.md
│ ├── CLOUDSQLMYSQLADMIN_README.md
│ ├── CLOUDSQLPG_README.md
│ ├── CLOUDSQLPGADMIN_README.md
│ ├── DATAPLEX_README.md
│ ├── en
│ │ ├── _index.md
│ │ ├── about
│ │ │ ├── _index.md
│ │ │ └── faq.md
│ │ ├── blogs
│ │ │ └── _index.md
│ │ ├── concepts
│ │ │ ├── _index.md
│ │ │ └── telemetry
│ │ │ ├── index.md
│ │ │ ├── telemetry_flow.png
│ │ │ └── telemetry_traces.png
│ │ ├── getting-started
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart.ipynb
│ │ │ ├── configure.md
│ │ │ ├── introduction
│ │ │ │ ├── _index.md
│ │ │ │ └── architecture.png
│ │ │ ├── local_quickstart_go.md
│ │ │ ├── local_quickstart_js.md
│ │ │ ├── local_quickstart.md
│ │ │ ├── mcp_quickstart
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ ├── prompts_quickstart_gemini_cli.md
│ │ │ └── quickstart
│ │ │ ├── go
│ │ │ │ ├── adkgo
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── genAI
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── genkit
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── langchain
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── openAI
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ └── quickstart_test.go
│ │ │ ├── golden.txt
│ │ │ ├── js
│ │ │ │ ├── adk
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── genAI
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── genkit
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── langchain
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── llamaindex
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ └── quickstart.test.js
│ │ │ ├── python
│ │ │ │ ├── __init__.py
│ │ │ │ ├── adk
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── core
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── langchain
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── llamaindex
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ └── quickstart_test.py
│ │ │ └── shared
│ │ │ ├── cloud_setup.md
│ │ │ ├── configure_toolbox.md
│ │ │ └── database_setup.md
│ │ ├── how-to
│ │ │ ├── _index.md
│ │ │ ├── connect_via_geminicli.md
│ │ │ ├── connect_via_mcp.md
│ │ │ ├── connect-ide
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ │ ├── alloydb_pg_mcp.md
│ │ │ │ ├── bigquery_mcp.md
│ │ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ │ ├── firestore_mcp.md
│ │ │ │ ├── looker_mcp.md
│ │ │ │ ├── mssql_mcp.md
│ │ │ │ ├── mysql_mcp.md
│ │ │ │ ├── neo4j_mcp.md
│ │ │ │ ├── postgres_mcp.md
│ │ │ │ ├── spanner_mcp.md
│ │ │ │ └── sqlite_mcp.md
│ │ │ ├── deploy_adk_agent.md
│ │ │ ├── deploy_docker.md
│ │ │ ├── deploy_gke.md
│ │ │ ├── deploy_toolbox.md
│ │ │ ├── export_telemetry.md
│ │ │ └── toolbox-ui
│ │ │ ├── edit-headers.gif
│ │ │ ├── edit-headers.png
│ │ │ ├── index.md
│ │ │ ├── optional-param-checked.png
│ │ │ ├── optional-param-unchecked.png
│ │ │ ├── run-tool.gif
│ │ │ ├── tools.png
│ │ │ └── toolsets.png
│ │ ├── reference
│ │ │ ├── _index.md
│ │ │ ├── cli.md
│ │ │ └── prebuilt-tools.md
│ │ ├── resources
│ │ │ ├── _index.md
│ │ │ ├── authServices
│ │ │ │ ├── _index.md
│ │ │ │ └── google.md
│ │ │ ├── embeddingModels
│ │ │ │ ├── _index.md
│ │ │ │ └── gemini.md
│ │ │ ├── prompts
│ │ │ │ ├── _index.md
│ │ │ │ └── custom
│ │ │ │ └── _index.md
│ │ │ ├── sources
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb-admin.md
│ │ │ │ ├── alloydb-pg.md
│ │ │ │ ├── bigquery.md
│ │ │ │ ├── bigtable.md
│ │ │ │ ├── cassandra.md
│ │ │ │ ├── clickhouse.md
│ │ │ │ ├── cloud-gda.md
│ │ │ │ ├── cloud-healthcare.md
│ │ │ │ ├── cloud-monitoring.md
│ │ │ │ ├── cloud-sql-admin.md
│ │ │ │ ├── cloud-sql-mssql.md
│ │ │ │ ├── cloud-sql-mysql.md
│ │ │ │ ├── cloud-sql-pg.md
│ │ │ │ ├── couchbase.md
│ │ │ │ ├── dataplex.md
│ │ │ │ ├── dgraph.md
│ │ │ │ ├── elasticsearch.md
│ │ │ │ ├── firebird.md
│ │ │ │ ├── firestore.md
│ │ │ │ ├── http.md
│ │ │ │ ├── looker.md
│ │ │ │ ├── mariadb.md
│ │ │ │ ├── mindsdb.md
│ │ │ │ ├── mongodb.md
│ │ │ │ ├── mssql.md
│ │ │ │ ├── mysql.md
│ │ │ │ ├── neo4j.md
│ │ │ │ ├── oceanbase.md
│ │ │ │ ├── oracle.md
│ │ │ │ ├── postgres.md
│ │ │ │ ├── redis.md
│ │ │ │ ├── serverless-spark.md
│ │ │ │ ├── singlestore.md
│ │ │ │ ├── snowflake.md
│ │ │ │ ├── spanner.md
│ │ │ │ ├── sqlite.md
│ │ │ │ ├── tidb.md
│ │ │ │ ├── trino.md
│ │ │ │ ├── valkey.md
│ │ │ │ └── yugabytedb.md
│ │ │ └── tools
│ │ │ ├── _index.md
│ │ │ ├── alloydb
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb-create-cluster.md
│ │ │ │ ├── alloydb-create-instance.md
│ │ │ │ ├── alloydb-create-user.md
│ │ │ │ ├── alloydb-get-cluster.md
│ │ │ │ ├── alloydb-get-instance.md
│ │ │ │ ├── alloydb-get-user.md
│ │ │ │ ├── alloydb-list-clusters.md
│ │ │ │ ├── alloydb-list-instances.md
│ │ │ │ ├── alloydb-list-users.md
│ │ │ │ └── alloydb-wait-for-operation.md
│ │ │ ├── alloydbainl
│ │ │ │ ├── _index.md
│ │ │ │ └── alloydb-ai-nl.md
│ │ │ ├── bigquery
│ │ │ │ ├── _index.md
│ │ │ │ ├── bigquery-analyze-contribution.md
│ │ │ │ ├── bigquery-conversational-analytics.md
│ │ │ │ ├── bigquery-execute-sql.md
│ │ │ │ ├── bigquery-forecast.md
│ │ │ │ ├── bigquery-get-dataset-info.md
│ │ │ │ ├── bigquery-get-table-info.md
│ │ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ │ ├── bigquery-list-table-ids.md
│ │ │ │ ├── bigquery-search-catalog.md
│ │ │ │ └── bigquery-sql.md
│ │ │ ├── bigtable
│ │ │ │ ├── _index.md
│ │ │ │ └── bigtable-sql.md
│ │ │ ├── cassandra
│ │ │ │ ├── _index.md
│ │ │ │ └── cassandra-cql.md
│ │ │ ├── clickhouse
│ │ │ │ ├── _index.md
│ │ │ │ ├── clickhouse-execute-sql.md
│ │ │ │ ├── clickhouse-list-databases.md
│ │ │ │ ├── clickhouse-list-tables.md
│ │ │ │ └── clickhouse-sql.md
│ │ │ ├── cloudgda
│ │ │ │ ├── _index.md
│ │ │ │ └── cloud-gda-query.md
│ │ │ ├── cloudhealthcare
│ │ │ │ ├── _index.md
│ │ │ │ ├── cloud-healthcare-fhir-fetch-page.md
│ │ │ │ ├── cloud-healthcare-fhir-patient-everything.md
│ │ │ │ ├── cloud-healthcare-fhir-patient-search.md
│ │ │ │ ├── cloud-healthcare-get-dataset.md
│ │ │ │ ├── cloud-healthcare-get-dicom-store-metrics.md
│ │ │ │ ├── cloud-healthcare-get-dicom-store.md
│ │ │ │ ├── cloud-healthcare-get-fhir-resource.md
│ │ │ │ ├── cloud-healthcare-get-fhir-store-metrics.md
│ │ │ │ ├── cloud-healthcare-get-fhir-store.md
│ │ │ │ ├── cloud-healthcare-list-dicom-stores.md
│ │ │ │ ├── cloud-healthcare-list-fhir-stores.md
│ │ │ │ ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│ │ │ │ ├── cloud-healthcare-search-dicom-instances.md
│ │ │ │ ├── cloud-healthcare-search-dicom-series.md
│ │ │ │ └── cloud-healthcare-search-dicom-studies.md
│ │ │ ├── cloudmonitoring
│ │ │ │ ├── _index.md
│ │ │ │ └── cloud-monitoring-query-prometheus.md
│ │ │ ├── cloudsql
│ │ │ │ ├── _index.md
│ │ │ │ ├── cloudsqlcloneinstance.md
│ │ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ │ ├── cloudsqlcreateusers.md
│ │ │ │ ├── cloudsqlgetinstances.md
│ │ │ │ ├── cloudsqllistdatabases.md
│ │ │ │ ├── cloudsqllistinstances.md
│ │ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ │ ├── cloudsqlpgupgradeprecheck.md
│ │ │ │ └── cloudsqlwaitforoperation.md
│ │ │ ├── couchbase
│ │ │ │ ├── _index.md
│ │ │ │ └── couchbase-sql.md
│ │ │ ├── dataform
│ │ │ │ ├── _index.md
│ │ │ │ └── dataform-compile-local.md
│ │ │ ├── dataplex
│ │ │ │ ├── _index.md
│ │ │ │ ├── dataplex-lookup-entry.md
│ │ │ │ ├── dataplex-search-aspect-types.md
│ │ │ │ └── dataplex-search-entries.md
│ │ │ ├── dgraph
│ │ │ │ ├── _index.md
│ │ │ │ └── dgraph-dql.md
│ │ │ ├── elasticsearch
│ │ │ │ ├── _index.md
│ │ │ │ └── elasticsearch-esql.md
│ │ │ ├── firebird
│ │ │ │ ├── _index.md
│ │ │ │ ├── firebird-execute-sql.md
│ │ │ │ └── firebird-sql.md
│ │ │ ├── firestore
│ │ │ │ ├── _index.md
│ │ │ │ ├── firestore-add-documents.md
│ │ │ │ ├── firestore-delete-documents.md
│ │ │ │ ├── firestore-get-documents.md
│ │ │ │ ├── firestore-get-rules.md
│ │ │ │ ├── firestore-list-collections.md
│ │ │ │ ├── firestore-query-collection.md
│ │ │ │ ├── firestore-query.md
│ │ │ │ ├── firestore-update-document.md
│ │ │ │ └── firestore-validate-rules.md
│ │ │ ├── http
│ │ │ │ ├── _index.md
│ │ │ │ └── http.md
│ │ │ ├── looker
│ │ │ │ ├── _index.md
│ │ │ │ ├── looker-add-dashboard-element.md
│ │ │ │ ├── looker-add-dashboard-filter.md
│ │ │ │ ├── looker-conversational-analytics.md
│ │ │ │ ├── looker-create-project-file.md
│ │ │ │ ├── looker-delete-project-file.md
│ │ │ │ ├── looker-dev-mode.md
│ │ │ │ ├── looker-generate-embed-url.md
│ │ │ │ ├── looker-get-connection-databases.md
│ │ │ │ ├── looker-get-connection-schemas.md
│ │ │ │ ├── looker-get-connection-table-columns.md
│ │ │ │ ├── looker-get-connection-tables.md
│ │ │ │ ├── looker-get-connections.md
│ │ │ │ ├── looker-get-dashboards.md
│ │ │ │ ├── looker-get-dimensions.md
│ │ │ │ ├── looker-get-explores.md
│ │ │ │ ├── looker-get-filters.md
│ │ │ │ ├── looker-get-looks.md
│ │ │ │ ├── looker-get-measures.md
│ │ │ │ ├── looker-get-models.md
│ │ │ │ ├── looker-get-parameters.md
│ │ │ │ ├── looker-get-project-file.md
│ │ │ │ ├── looker-get-project-files.md
│ │ │ │ ├── looker-get-projects.md
│ │ │ │ ├── looker-health-analyze.md
│ │ │ │ ├── looker-health-pulse.md
│ │ │ │ ├── looker-health-vacuum.md
│ │ │ │ ├── looker-make-dashboard.md
│ │ │ │ ├── looker-make-look.md
│ │ │ │ ├── looker-query-sql.md
│ │ │ │ ├── looker-query-url.md
│ │ │ │ ├── looker-query.md
│ │ │ │ ├── looker-run-dashboard.md
│ │ │ │ ├── looker-run-look.md
│ │ │ │ └── looker-update-project-file.md
│ │ │ ├── mindsdb
│ │ │ │ ├── _index.md
│ │ │ │ ├── mindsdb-execute-sql.md
│ │ │ │ └── mindsdb-sql.md
│ │ │ ├── mongodb
│ │ │ │ ├── _index.md
│ │ │ │ ├── mongodb-aggregate.md
│ │ │ │ ├── mongodb-delete-many.md
│ │ │ │ ├── mongodb-delete-one.md
│ │ │ │ ├── mongodb-find-one.md
│ │ │ │ ├── mongodb-find.md
│ │ │ │ ├── mongodb-insert-many.md
│ │ │ │ ├── mongodb-insert-one.md
│ │ │ │ ├── mongodb-update-many.md
│ │ │ │ └── mongodb-update-one.md
│ │ │ ├── mssql
│ │ │ │ ├── _index.md
│ │ │ │ ├── mssql-execute-sql.md
│ │ │ │ ├── mssql-list-tables.md
│ │ │ │ └── mssql-sql.md
│ │ │ ├── mysql
│ │ │ │ ├── _index.md
│ │ │ │ ├── mysql-execute-sql.md
│ │ │ │ ├── mysql-get-query-plan.md
│ │ │ │ ├── mysql-list-active-queries.md
│ │ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ │ ├── mysql-list-tables.md
│ │ │ │ └── mysql-sql.md
│ │ │ ├── neo4j
│ │ │ │ ├── _index.md
│ │ │ │ ├── neo4j-cypher.md
│ │ │ │ ├── neo4j-execute-cypher.md
│ │ │ │ └── neo4j-schema.md
│ │ │ ├── oceanbase
│ │ │ │ ├── _index.md
│ │ │ │ ├── oceanbase-execute-sql.md
│ │ │ │ └── oceanbase-sql.md
│ │ │ ├── oracle
│ │ │ │ ├── _index.md
│ │ │ │ ├── oracle-execute-sql.md
│ │ │ │ └── oracle-sql.md
│ │ │ ├── postgres
│ │ │ │ ├── _index.md
│ │ │ │ ├── postgres-database-overview.md
│ │ │ │ ├── postgres-execute-sql.md
│ │ │ │ ├── postgres-get-column-cardinality.md
│ │ │ │ ├── postgres-list-active-queries.md
│ │ │ │ ├── postgres-list-available-extensions.md
│ │ │ │ ├── postgres-list-database-stats.md
│ │ │ │ ├── postgres-list-indexes.md
│ │ │ │ ├── postgres-list-installed-extensions.md
│ │ │ │ ├── postgres-list-locks.md
│ │ │ │ ├── postgres-list-pg-settings.md
│ │ │ │ ├── postgres-list-publication-tables.md
│ │ │ │ ├── postgres-list-query-stats.md
│ │ │ │ ├── postgres-list-roles.md
│ │ │ │ ├── postgres-list-schemas.md
│ │ │ │ ├── postgres-list-sequences.md
│ │ │ │ ├── postgres-list-stored-procedure.md
│ │ │ │ ├── postgres-list-table-stats.md
│ │ │ │ ├── postgres-list-tables.md
│ │ │ │ ├── postgres-list-tablespaces.md
│ │ │ │ ├── postgres-list-triggers.md
│ │ │ │ ├── postgres-list-views.md
│ │ │ │ ├── postgres-long-running-transactions.md
│ │ │ │ ├── postgres-replication-stats.md
│ │ │ │ └── postgres-sql.md
│ │ │ ├── redis
│ │ │ │ ├── _index.md
│ │ │ │ └── redis.md
│ │ │ ├── serverless-spark
│ │ │ │ ├── _index.md
│ │ │ │ ├── serverless-spark-cancel-batch.md
│ │ │ │ ├── serverless-spark-create-pyspark-batch.md
│ │ │ │ ├── serverless-spark-create-spark-batch.md
│ │ │ │ ├── serverless-spark-get-batch.md
│ │ │ │ └── serverless-spark-list-batches.md
│ │ │ ├── singlestore
│ │ │ │ ├── _index.md
│ │ │ │ ├── singlestore-execute-sql.md
│ │ │ │ └── singlestore-sql.md
│ │ │ ├── snowflake
│ │ │ │ ├── _index.md
│ │ │ │ ├── snowflake-execute-sql.md
│ │ │ │ └── snowflake-sql.md
│ │ │ ├── spanner
│ │ │ │ ├── _index.md
│ │ │ │ ├── spanner-execute-sql.md
│ │ │ │ ├── spanner-list-graphs.md
│ │ │ │ ├── spanner-list-tables.md
│ │ │ │ └── spanner-sql.md
│ │ │ ├── sqlite
│ │ │ │ ├── _index.md
│ │ │ │ ├── sqlite-execute-sql.md
│ │ │ │ └── sqlite-sql.md
│ │ │ ├── tidb
│ │ │ │ ├── _index.md
│ │ │ │ ├── tidb-execute-sql.md
│ │ │ │ └── tidb-sql.md
│ │ │ ├── trino
│ │ │ │ ├── _index.md
│ │ │ │ ├── trino-execute-sql.md
│ │ │ │ └── trino-sql.md
│ │ │ ├── utility
│ │ │ │ ├── _index.md
│ │ │ │ └── wait.md
│ │ │ ├── valkey
│ │ │ │ ├── _index.md
│ │ │ │ └── valkey.md
│ │ │ └── yuagbytedb
│ │ │ ├── _index.md
│ │ │ └── yugabytedb-sql.md
│ │ ├── samples
│ │ │ ├── _index.md
│ │ │ ├── alloydb
│ │ │ │ ├── _index.md
│ │ │ │ ├── ai-nl
│ │ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ │ └── index.md
│ │ │ │ └── mcp_quickstart.md
│ │ │ ├── bigquery
│ │ │ │ ├── _index.md
│ │ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ │ ├── local_quickstart.md
│ │ │ │ └── mcp_quickstart
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ ├── looker
│ │ │ │ ├── _index.md
│ │ │ │ ├── looker_gemini_oauth
│ │ │ │ │ ├── _index.md
│ │ │ │ │ ├── authenticated.png
│ │ │ │ │ ├── authorize.png
│ │ │ │ │ └── registration.png
│ │ │ │ ├── looker_gemini.md
│ │ │ │ └── looker_mcp_inspector
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ └── snowflake
│ │ │ ├── _index.md
│ │ │ ├── runme.py
│ │ │ ├── snowflake-config.yaml
│ │ │ ├── snowflake-env.sh
│ │ │ └── test-snowflake.sh
│ │ └── sdks
│ │ ├── _index.md
│ │ ├── go-sdk.md
│ │ ├── js-sdk.md
│ │ └── python-sdk.md
│ ├── LOOKER_README.md
│ ├── SPANNER_README.md
│ └── TOOLBOX_README.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── embeddingmodels
│ │ ├── embeddingmodels.go
│ │ └── gemini
│ │ ├── gemini_test.go
│ │ └── gemini.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-healthcare.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── elasticsearch.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mindsdb.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── singlestore.yaml
│ │ ├── snowflake.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── prompts
│ │ ├── arguments_test.go
│ │ ├── arguments.go
│ │ ├── custom
│ │ │ ├── custom_test.go
│ │ │ └── custom.go
│ │ ├── messages_test.go
│ │ ├── messages.go
│ │ ├── prompts_test.go
│ │ ├── prompts.go
│ │ ├── promptsets_test.go
│ │ └── promptsets.go
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── resources
│ │ │ ├── resources_test.go
│ │ │ └── resources.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ ├── bigquery.go
│ │ │ └── cache.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudgda
│ │ │ ├── cloud_gda_test.go
│ │ │ └── cloud_gda.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloud_healthcare_test.go
│ │ │ └── cloud_healthcare.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── elasticsearch
│ │ │ ├── elasticsearch_test.go
│ │ │ └── elasticsearch.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mindsdb
│ │ │ ├── mindsdb_test.go
│ │ │ └── mindsdb.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ ├── oracle_test.go
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ ├── serverlessspark.go
│ │ │ ├── url_test.go
│ │ │ └── url.go
│ │ ├── singlestore
│ │ │ ├── singlestore_test.go
│ │ │ └── singlestore.go
│ │ ├── snowflake
│ │ │ ├── snowflake_test.go
│ │ │ └── snowflake.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudgda
│ │ │ ├── cloudgda_test.go
│ │ │ ├── cloudgda.go
│ │ │ └── types.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloudhealthcarefhirfetchpage
│ │ │ │ ├── cloudhealthcarefhirfetchpage_test.go
│ │ │ │ └── cloudhealthcarefhirfetchpage.go
│ │ │ ├── cloudhealthcarefhirpatienteverything
│ │ │ │ ├── cloudhealthcarefhirpatienteverything_test.go
│ │ │ │ └── cloudhealthcarefhirpatienteverything.go
│ │ │ ├── cloudhealthcarefhirpatientsearch
│ │ │ │ ├── cloudhealthcarefhirpatientsearch_test.go
│ │ │ │ └── cloudhealthcarefhirpatientsearch.go
│ │ │ ├── cloudhealthcaregetdataset
│ │ │ │ ├── cloudhealthcaregetdataset_test.go
│ │ │ │ └── cloudhealthcaregetdataset.go
│ │ │ ├── cloudhealthcaregetdicomstore
│ │ │ │ ├── cloudhealthcaregetdicomstore_test.go
│ │ │ │ └── cloudhealthcaregetdicomstore.go
│ │ │ ├── cloudhealthcaregetdicomstoremetrics
│ │ │ │ ├── cloudhealthcaregetdicomstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetdicomstoremetrics.go
│ │ │ ├── cloudhealthcaregetfhirresource
│ │ │ │ ├── cloudhealthcaregetfhirresource_test.go
│ │ │ │ └── cloudhealthcaregetfhirresource.go
│ │ │ ├── cloudhealthcaregetfhirstore
│ │ │ │ ├── cloudhealthcaregetfhirstore_test.go
│ │ │ │ └── cloudhealthcaregetfhirstore.go
│ │ │ ├── cloudhealthcaregetfhirstoremetrics
│ │ │ │ ├── cloudhealthcaregetfhirstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetfhirstoremetrics.go
│ │ │ ├── cloudhealthcarelistdicomstores
│ │ │ │ ├── cloudhealthcarelistdicomstores_test.go
│ │ │ │ └── cloudhealthcarelistdicomstores.go
│ │ │ ├── cloudhealthcarelistfhirstores
│ │ │ │ ├── cloudhealthcarelistfhirstores_test.go
│ │ │ │ └── cloudhealthcarelistfhirstores.go
│ │ │ ├── cloudhealthcareretrieverendereddicominstance
│ │ │ │ ├── cloudhealthcareretrieverendereddicominstance_test.go
│ │ │ │ └── cloudhealthcareretrieverendereddicominstance.go
│ │ │ ├── cloudhealthcaresearchdicominstances
│ │ │ │ ├── cloudhealthcaresearchdicominstances_test.go
│ │ │ │ └── cloudhealthcaresearchdicominstances.go
│ │ │ ├── cloudhealthcaresearchdicomseries
│ │ │ │ ├── cloudhealthcaresearchdicomseries_test.go
│ │ │ │ └── cloudhealthcaresearchdicomseries.go
│ │ │ ├── cloudhealthcaresearchdicomstudies
│ │ │ │ ├── cloudhealthcaresearchdicomstudies_test.go
│ │ │ │ └── cloudhealthcaresearchdicomstudies.go
│ │ │ └── common
│ │ │ └── util.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcloneinstance
│ │ │ │ ├── cloudsqlcloneinstance_test.go
│ │ │ │ └── cloudsqlcloneinstance.go
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloudsqlpgcreateinstances
│ │ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ │ └── cloudsqlpgcreateinstances.go
│ │ │ └── cloudsqlpgupgradeprecheck
│ │ │ ├── cloudsqlpgupgradeprecheck_test.go
│ │ │ └── cloudsqlpgupgradeprecheck.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── elasticsearch
│ │ │ └── elasticsearchesql
│ │ │ ├── elasticsearchesql_test.go
│ │ │ └── elasticsearchesql.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookeradddashboardfilter
│ │ │ │ ├── lookeradddashboardfilter_test.go
│ │ │ │ └── lookeradddashboardfilter.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergenerateembedurl
│ │ │ │ ├── lookergenerateembedurl_test.go
│ │ │ │ └── lookergenerateembedurl.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrundashboard
│ │ │ │ ├── lookerrundashboard_test.go
│ │ │ │ └── lookerrundashboard.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mindsdb
│ │ │ ├── mindsdbexecutesql
│ │ │ │ ├── mindsdbexecutesql_test.go
│ │ │ │ └── mindsdbexecutesql.go
│ │ │ └── mindsdbsql
│ │ │ ├── mindsdbsql_test.go
│ │ │ └── mindsdbsql.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqlgetqueryplan
│ │ │ │ ├── mysqlgetqueryplan_test.go
│ │ │ │ └── mysqlgetqueryplan.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ ├── oracleexecutesql_test.go
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ ├── oraclesql_test.go
│ │ │ └── oraclesql.go
│ │ ├── postgres
│ │ │ ├── postgresdatabaseoverview
│ │ │ │ ├── postgresdatabaseoverview_test.go
│ │ │ │ └── postgresdatabaseoverview.go
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgresgetcolumncardinality
│ │ │ │ ├── postgresgetcolumncardinality_test.go
│ │ │ │ └── postgresgetcolumncardinality.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistdatabasestats
│ │ │ │ ├── postgreslistdatabasestats_test.go
│ │ │ │ └── postgreslistdatabasestats.go
│ │ │ ├── postgreslistindexes
│ │ │ │ ├── postgreslistindexes_test.go
│ │ │ │ └── postgreslistindexes.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslistlocks
│ │ │ │ ├── postgreslistlocks_test.go
│ │ │ │ └── postgreslistlocks.go
│ │ │ ├── postgreslistpgsettings
│ │ │ │ ├── postgreslistpgsettings_test.go
│ │ │ │ └── postgreslistpgsettings.go
│ │ │ ├── postgreslistpublicationtables
│ │ │ │ ├── postgreslistpublicationtables_test.go
│ │ │ │ └── postgreslistpublicationtables.go
│ │ │ ├── postgreslistquerystats
│ │ │ │ ├── postgreslistquerystats_test.go
│ │ │ │ └── postgreslistquerystats.go
│ │ │ ├── postgreslistroles
│ │ │ │ ├── postgreslistroles_test.go
│ │ │ │ └── postgreslistroles.go
│ │ │ ├── postgreslistschemas
│ │ │ │ ├── postgreslistschemas_test.go
│ │ │ │ └── postgreslistschemas.go
│ │ │ ├── postgreslistsequences
│ │ │ │ ├── postgreslistsequences_test.go
│ │ │ │ └── postgreslistsequences.go
│ │ │ ├── postgresliststoredprocedure
│ │ │ │ ├── postgresliststoredprocedure_test.go
│ │ │ │ └── postgresliststoredprocedure.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslisttablespaces
│ │ │ │ ├── postgreslisttablespaces_test.go
│ │ │ │ └── postgreslisttablespaces.go
│ │ │ ├── postgreslisttablestats
│ │ │ │ ├── postgreslisttablestats_test.go
│ │ │ │ └── postgreslisttablestats.go
│ │ │ ├── postgreslisttriggers
│ │ │ │ ├── postgreslisttriggers_test.go
│ │ │ │ └── postgreslisttriggers.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ ├── postgreslongrunningtransactions
│ │ │ │ ├── postgreslongrunningtransactions_test.go
│ │ │ │ └── postgreslongrunningtransactions.go
│ │ │ ├── postgresreplicationstats
│ │ │ │ ├── postgresreplicationstats_test.go
│ │ │ │ └── postgresreplicationstats.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── createbatch
│ │ │ │ ├── config.go
│ │ │ │ └── tool.go
│ │ │ ├── serverlesssparkcancelbatch
│ │ │ │ ├── serverlesssparkcancelbatch_test.go
│ │ │ │ └── serverlesssparkcancelbatch.go
│ │ │ ├── serverlesssparkcreatepysparkbatch
│ │ │ │ ├── serverlesssparkcreatepysparkbatch_test.go
│ │ │ │ └── serverlesssparkcreatepysparkbatch.go
│ │ │ ├── serverlesssparkcreatesparkbatch
│ │ │ │ ├── serverlesssparkcreatesparkbatch_test.go
│ │ │ │ └── serverlesssparkcreatesparkbatch.go
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ ├── serverlesssparklistbatches
│ │ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ │ └── serverlesssparklistbatches.go
│ │ │ └── testutils
│ │ │ └── testutils.go
│ │ ├── singlestore
│ │ │ ├── singlestoreexecutesql
│ │ │ │ ├── singlestoreexecutesql_test.go
│ │ │ │ └── singlestoreexecutesql.go
│ │ │ └── singlestoresql
│ │ │ ├── singlestoresql_test.go
│ │ │ └── singlestoresql.go
│ │ ├── snowflake
│ │ │ ├── snowflakeexecutesql
│ │ │ │ ├── snowflakeexecutesql_test.go
│ │ │ │ └── snowflakeexecutesql.go
│ │ │ └── snowflakesql
│ │ │ ├── snowflakesql_test.go
│ │ │ └── snowflakesql.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlistgraphs
│ │ │ │ ├── spannerlistgraphs_test.go
│ │ │ │ └── spannerlistgraphs.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ ├── orderedmap
│ │ ├── orderedmap_test.go
│ │ └── orderedmap.go
│ ├── parameters
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── parameters_test.go
│ │ └── parameters.go
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
├── server.json
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudgda
│ └── cloud_gda_integration_test.go
├── cloudhealthcare
│ └── cloud_healthcare_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_clone_instance_test.go
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ ├── cloud_sql_pg_integration_test.go
│ └── cloud_sql_pg_upgrade_precheck_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── elasticsearch
│ └── elasticsearch_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mariadb
│ └── mariadb_integration_test.go
├── mindsdb
│ └── mindsdb_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── prompts
│ └── custom
│ └── prompts_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── singlestore
│ └── singlestore_integration_test.go
├── snowflake
│ └── snowflake_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/tests/dataplex/dataplex_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package dataplex
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "os"
25 | "regexp"
26 | "strings"
27 | "testing"
28 | "time"
29 |
30 | bigqueryapi "cloud.google.com/go/bigquery"
31 | dataplex "cloud.google.com/go/dataplex/apiv1"
32 | dataplexpb "cloud.google.com/go/dataplex/apiv1/dataplexpb"
33 | "github.com/google/uuid"
34 | "github.com/googleapis/genai-toolbox/internal/testutils"
35 | "github.com/googleapis/genai-toolbox/tests"
36 | "golang.org/x/oauth2/google"
37 | "google.golang.org/api/googleapi"
38 | "google.golang.org/api/iterator"
39 | "google.golang.org/api/option"
40 | )
41 |
42 | var (
43 | DataplexSourceKind = "dataplex"
44 | DataplexSearchEntriesToolKind = "dataplex-search-entries"
45 | DataplexLookupEntryToolKind = "dataplex-lookup-entry"
46 | DataplexSearchAspectTypesToolKind = "dataplex-search-aspect-types"
47 | DataplexProject = os.Getenv("DATAPLEX_PROJECT")
48 | )
49 |
50 | func getDataplexVars(t *testing.T) map[string]any {
51 | switch "" {
52 | case DataplexProject:
53 | t.Fatal("'DATAPLEX_PROJECT' not set")
54 | }
55 | return map[string]any{
56 | "kind": DataplexSourceKind,
57 | "project": DataplexProject,
58 | }
59 | }
60 |
61 | // Copied over from bigquery.go
62 | func initBigQueryConnection(ctx context.Context, project string) (*bigqueryapi.Client, error) {
63 | cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
64 | if err != nil {
65 | return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
66 | }
67 |
68 | client, err := bigqueryapi.NewClient(ctx, project, option.WithCredentials(cred))
69 | if err != nil {
70 | return nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
71 | }
72 | return client, nil
73 | }
74 |
75 | func initDataplexConnection(ctx context.Context) (*dataplex.CatalogClient, error) {
76 | cred, err := google.FindDefaultCredentials(ctx)
77 | if err != nil {
78 | return nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
79 | }
80 |
81 | client, err := dataplex.NewCatalogClient(ctx, option.WithCredentials(cred))
82 | if err != nil {
83 | return nil, fmt.Errorf("failed to create Dataplex client %w", err)
84 | }
85 | return client, nil
86 | }
87 |
88 | func TestDataplexToolEndpoints(t *testing.T) {
89 | sourceConfig := getDataplexVars(t)
90 | ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
91 | defer cancel()
92 |
93 | var args []string
94 |
95 | bigqueryClient, err := initBigQueryConnection(ctx, DataplexProject)
96 | if err != nil {
97 | t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
98 | }
99 |
100 | dataplexClient, err := initDataplexConnection(ctx)
101 | if err != nil {
102 | t.Fatalf("unable to create Dataplex connection: %s", err)
103 | }
104 |
105 | // create resources with UUID
106 | datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
107 | tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
108 | aspectTypeId := fmt.Sprintf("param-aspect-type-%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
109 |
110 | teardownTable1 := setupBigQueryTable(t, ctx, bigqueryClient, datasetName, tableName)
111 | teardownAspectType1 := setupDataplexThirdPartyAspectType(t, ctx, dataplexClient, aspectTypeId)
112 | time.Sleep(2 * time.Minute) // wait for table and aspect type to be ingested
113 | defer teardownTable1(t)
114 | defer teardownAspectType1(t)
115 |
116 | toolsFile := getDataplexToolsConfig(sourceConfig)
117 |
118 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
119 | if err != nil {
120 | t.Fatalf("command initialization returned an error: %s", err)
121 | }
122 | defer cleanup()
123 |
124 | waitCtx, cancel := context.WithTimeout(ctx, 3*time.Minute)
125 | defer cancel()
126 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
127 | if err != nil {
128 | t.Logf("toolbox command logs: \n%s", out)
129 | t.Fatalf("toolbox didn't start successfully: %s", err)
130 | }
131 |
132 | runDataplexToolGetTest(t)
133 | runDataplexSearchEntriesToolInvokeTest(t, tableName, datasetName)
134 | runDataplexLookupEntryToolInvokeTest(t, tableName, datasetName)
135 | runDataplexSearchAspectTypesToolInvokeTest(t, aspectTypeId)
136 | }
137 |
138 | func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, datasetName string, tableName string) func(*testing.T) {
139 | // Create dataset
140 | dataset := client.Dataset(datasetName)
141 | _, err := dataset.Metadata(ctx)
142 |
143 | if err != nil {
144 | apiErr, ok := err.(*googleapi.Error)
145 | if !ok || apiErr.Code != 404 {
146 | t.Fatalf("Failed to check dataset %q existence: %v", datasetName, err)
147 | }
148 | metadataToCreate := &bigqueryapi.DatasetMetadata{Name: datasetName}
149 | if err := dataset.Create(ctx, metadataToCreate); err != nil {
150 | t.Fatalf("Failed to create dataset %q: %v", datasetName, err)
151 | }
152 | }
153 |
154 | // Create table
155 | tab := client.Dataset(datasetName).Table(tableName)
156 | meta := &bigqueryapi.TableMetadata{}
157 | if err := tab.Create(ctx, meta); err != nil {
158 | t.Fatalf("Create table job for %s failed: %v", tableName, err)
159 | }
160 |
161 | return func(t *testing.T) {
162 | // tear down table
163 | dropSQL := fmt.Sprintf("drop table %s.%s", datasetName, tableName)
164 | dropJob, err := client.Query(dropSQL).Run(ctx)
165 | if err != nil {
166 | t.Errorf("Failed to start drop table job for %s: %v", tableName, err)
167 | return
168 | }
169 | dropStatus, err := dropJob.Wait(ctx)
170 | if err != nil {
171 | t.Errorf("Failed to wait for drop table job for %s: %v", tableName, err)
172 | return
173 | }
174 | if err := dropStatus.Err(); err != nil {
175 | t.Errorf("Error dropping table %s: %v", tableName, err)
176 | }
177 |
178 | // tear down dataset
179 | datasetToTeardown := client.Dataset(datasetName)
180 | tablesIterator := datasetToTeardown.Tables(ctx)
181 | _, err = tablesIterator.Next()
182 |
183 | if err == iterator.Done {
184 | if err := datasetToTeardown.Delete(ctx); err != nil {
185 | t.Errorf("Failed to delete dataset %s: %v", datasetName, err)
186 | }
187 | } else if err != nil {
188 | t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
189 | }
190 | }
191 | }
192 |
193 | func setupDataplexThirdPartyAspectType(t *testing.T, ctx context.Context, client *dataplex.CatalogClient, aspectTypeId string) func(*testing.T) {
194 | parent := fmt.Sprintf("projects/%s/locations/us", DataplexProject)
195 | createAspectTypeReq := &dataplexpb.CreateAspectTypeRequest{
196 | Parent: parent,
197 | AspectTypeId: aspectTypeId,
198 | AspectType: &dataplexpb.AspectType{
199 | Name: fmt.Sprintf("%s/aspectTypes/%s", parent, aspectTypeId),
200 | MetadataTemplate: &dataplexpb.AspectType_MetadataTemplate{
201 | Name: "UserSchema",
202 | Type: "record",
203 | },
204 | },
205 | }
206 | _, err := client.CreateAspectType(ctx, createAspectTypeReq)
207 | if err != nil {
208 | t.Fatalf("Failed to create aspect type %s: %v", aspectTypeId, err)
209 | }
210 |
211 | return func(t *testing.T) {
212 | // tear down aspect type
213 | deleteAspectTypeReq := &dataplexpb.DeleteAspectTypeRequest{
214 | Name: fmt.Sprintf("%s/aspectTypes/%s", parent, aspectTypeId),
215 | }
216 | if _, err := client.DeleteAspectType(ctx, deleteAspectTypeReq); err != nil {
217 | t.Errorf("Failed to delete aspect type %s: %v", aspectTypeId, err)
218 | }
219 | }
220 | }
221 |
222 | func getDataplexToolsConfig(sourceConfig map[string]any) map[string]any {
223 | // Write config into a file and pass it to command
224 | toolsFile := map[string]any{
225 | "sources": map[string]any{
226 | "my-dataplex-instance": sourceConfig,
227 | },
228 | "authServices": map[string]any{
229 | "my-google-auth": map[string]any{
230 | "kind": "google",
231 | "clientId": tests.ClientId,
232 | },
233 | },
234 | "tools": map[string]any{
235 | "my-dataplex-search-entries-tool": map[string]any{
236 | "kind": DataplexSearchEntriesToolKind,
237 | "source": "my-dataplex-instance",
238 | "description": "Simple dataplex search entries tool to test end to end functionality.",
239 | },
240 | "my-auth-dataplex-search-entries-tool": map[string]any{
241 | "kind": DataplexSearchEntriesToolKind,
242 | "source": "my-dataplex-instance",
243 | "description": "Simple dataplex search entries tool to test end to end functionality.",
244 | "authRequired": []string{"my-google-auth"},
245 | },
246 | "my-dataplex-lookup-entry-tool": map[string]any{
247 | "kind": DataplexLookupEntryToolKind,
248 | "source": "my-dataplex-instance",
249 | "description": "Simple dataplex lookup entry tool to test end to end functionality.",
250 | },
251 | "my-auth-dataplex-lookup-entry-tool": map[string]any{
252 | "kind": DataplexLookupEntryToolKind,
253 | "source": "my-dataplex-instance",
254 | "description": "Simple dataplex lookup entry tool to test end to end functionality.",
255 | "authRequired": []string{"my-google-auth"},
256 | },
257 | "my-dataplex-search-aspect-types-tool": map[string]any{
258 | "kind": DataplexSearchAspectTypesToolKind,
259 | "source": "my-dataplex-instance",
260 | "description": "Simple dataplex search aspect types tool to test end to end functionality.",
261 | },
262 | "my-auth-dataplex-search-aspect-types-tool": map[string]any{
263 | "kind": DataplexSearchAspectTypesToolKind,
264 | "source": "my-dataplex-instance",
265 | "description": "Simple dataplex search aspect types tool to test end to end functionality.",
266 | "authRequired": []string{"my-google-auth"},
267 | },
268 | },
269 | }
270 |
271 | return toolsFile
272 | }
273 |
274 | func runDataplexToolGetTest(t *testing.T) {
275 | testCases := []struct {
276 | name string
277 | toolName string
278 | expectedParams []string
279 | }{
280 | {
281 | name: "get my-dataplex-search-entries-tool",
282 | toolName: "my-dataplex-search-entries-tool",
283 | expectedParams: []string{"pageSize", "query", "orderBy"},
284 | },
285 | {
286 | name: "get my-dataplex-lookup-entry-tool",
287 | toolName: "my-dataplex-lookup-entry-tool",
288 | expectedParams: []string{"name", "view", "aspectTypes", "entry"},
289 | },
290 | {
291 | name: "get my-dataplex-search-aspect-types-tool",
292 | toolName: "my-dataplex-search-aspect-types-tool",
293 | expectedParams: []string{"pageSize", "query", "orderBy"},
294 | },
295 | }
296 |
297 | for _, tc := range testCases {
298 | t.Run(tc.name, func(t *testing.T) {
299 | resp, err := http.Get(fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/", tc.toolName))
300 | if err != nil {
301 | t.Fatalf("error when sending a request: %s", err)
302 | }
303 | defer resp.Body.Close()
304 | if resp.StatusCode != 200 {
305 | t.Fatalf("response status code is not 200")
306 | }
307 | var body map[string]interface{}
308 | err = json.NewDecoder(resp.Body).Decode(&body)
309 | if err != nil {
310 | t.Fatalf("error parsing response body")
311 | }
312 | got, ok := body["tools"]
313 | if !ok {
314 | t.Fatalf("unable to find tools in response body")
315 | }
316 |
317 | toolsMap, ok := got.(map[string]interface{})
318 | if !ok {
319 | t.Fatalf("expected 'tools' to be a map, got %T", got)
320 | }
321 | tool, ok := toolsMap[tc.toolName].(map[string]interface{})
322 | if !ok {
323 | t.Fatalf("expected tool %q to be a map, got %T", tc.toolName, toolsMap[tc.toolName])
324 | }
325 | params, ok := tool["parameters"].([]interface{})
326 | if !ok {
327 | t.Fatalf("expected 'parameters' to be a slice, got %T", tool["parameters"])
328 | }
329 | paramSet := make(map[string]struct{})
330 | for _, param := range params {
331 | paramMap, ok := param.(map[string]interface{})
332 | if ok {
333 | if name, ok := paramMap["name"].(string); ok {
334 | paramSet[name] = struct{}{}
335 | }
336 | }
337 | }
338 | var missing []string
339 | for _, want := range tc.expectedParams {
340 | if _, found := paramSet[want]; !found {
341 | missing = append(missing, want)
342 | }
343 | }
344 | if len(missing) > 0 {
345 | t.Fatalf("missing parameters for tool %q: %v", tc.toolName, missing)
346 | }
347 | })
348 | }
349 | }
350 |
351 | func runDataplexSearchEntriesToolInvokeTest(t *testing.T, tableName string, datasetName string) {
352 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
353 | if err != nil {
354 | t.Fatalf("error getting Google ID token: %s", err)
355 | }
356 |
357 | testCases := []struct {
358 | name string
359 | api string
360 | requestHeader map[string]string
361 | requestBody io.Reader
362 | wantStatusCode int
363 | expectResult bool
364 | wantContentKey string
365 | }{
366 | {
367 | name: "Success - Entry Found",
368 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-entries-tool/invoke",
369 | requestHeader: map[string]string{},
370 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
371 | wantStatusCode: 200,
372 | expectResult: true,
373 | wantContentKey: "dataplex_entry",
374 | },
375 | {
376 | name: "Success with Authorization - Entry Found",
377 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
378 | requestHeader: map[string]string{"my-google-auth_token": idToken},
379 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
380 | wantStatusCode: 200,
381 | expectResult: true,
382 | wantContentKey: "dataplex_entry",
383 | },
384 | {
385 | name: "Failure - Invalid Authorization Token",
386 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
387 | requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
388 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
389 | wantStatusCode: 401,
390 | expectResult: false,
391 | wantContentKey: "dataplex_entry",
392 | },
393 | {
394 | name: "Failure - Without Authorization Token",
395 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-entries-tool/invoke",
396 | requestHeader: map[string]string{},
397 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"displayname=%s system=bigquery parent:%s\"}", tableName, datasetName))),
398 | wantStatusCode: 401,
399 | expectResult: false,
400 | wantContentKey: "dataplex_entry",
401 | },
402 | {
403 | name: "Failure - Entry Not Found",
404 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-entries-tool/invoke",
405 | requestHeader: map[string]string{},
406 | requestBody: bytes.NewBuffer([]byte(`{"query":"displayname=\"\" system=bigquery parent:\"\""}`)),
407 | wantStatusCode: 200,
408 | expectResult: false,
409 | wantContentKey: "",
410 | },
411 | }
412 |
413 | for _, tc := range testCases {
414 | t.Run(tc.name, func(t *testing.T) {
415 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
416 | if err != nil {
417 | t.Fatalf("unable to create request: %s", err)
418 | }
419 | req.Header.Add("Content-type", "application/json")
420 | for k, v := range tc.requestHeader {
421 | req.Header.Add(k, v)
422 | }
423 | resp, err := http.DefaultClient.Do(req)
424 | if err != nil {
425 | t.Fatalf("unable to send request: %s", err)
426 | }
427 | defer resp.Body.Close()
428 | if resp.StatusCode != tc.wantStatusCode {
429 | t.Fatalf("response status code is not %d. It is %d", tc.wantStatusCode, resp.StatusCode)
430 | bodyBytes, _ := io.ReadAll(resp.Body)
431 | t.Fatalf("Response body: %s", string(bodyBytes))
432 | }
433 | var result map[string]interface{}
434 | if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
435 | t.Fatalf("error parsing response body: %s", err)
436 | }
437 | resultStr, ok := result["result"].(string)
438 | if !ok {
439 | if result["result"] == nil && !tc.expectResult {
440 | return
441 | }
442 | t.Fatalf("expected 'result' field to be a string, got %T", result["result"])
443 | }
444 | if !tc.expectResult && (resultStr == "" || resultStr == "[]") {
445 | return
446 | }
447 | var entries []interface{}
448 | if err := json.Unmarshal([]byte(resultStr), &entries); err != nil {
449 | t.Fatalf("error unmarshalling result string: %v", err)
450 | }
451 |
452 | if tc.expectResult {
453 | if len(entries) != 1 {
454 | t.Fatalf("expected exactly one entry, but got %d", len(entries))
455 | }
456 | entry, ok := entries[0].(map[string]interface{})
457 | if !ok {
458 | t.Fatalf("expected first entry to be a map, got %T", entries[0])
459 | }
460 | if _, ok := entry[tc.wantContentKey]; !ok {
461 | t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
462 | }
463 | } else {
464 | if len(entries) != 0 {
465 | t.Fatalf("expected 0 entries, but got %d", len(entries))
466 | }
467 | }
468 | })
469 | }
470 | }
471 |
472 | func runDataplexLookupEntryToolInvokeTest(t *testing.T, tableName string, datasetName string) {
473 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
474 | if err != nil {
475 | t.Fatalf("error getting Google ID token: %s", err)
476 | }
477 |
478 | testCases := []struct {
479 | name string
480 | wantStatusCode int
481 | api string
482 | requestHeader map[string]string
483 | requestBody io.Reader
484 | expectResult bool
485 | wantContentKey string
486 | dontWantContentKey string
487 | aspectCheck bool
488 | reqBodyMap map[string]any
489 | }{
490 | {
491 | name: "Success - Entry Found",
492 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
493 | requestHeader: map[string]string{},
494 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
495 | wantStatusCode: 200,
496 | expectResult: true,
497 | wantContentKey: "name",
498 | },
499 | {
500 | name: "Success - Entry Found with Authorization",
501 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
502 | requestHeader: map[string]string{"my-google-auth_token": idToken},
503 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
504 | wantStatusCode: 200,
505 | expectResult: true,
506 | wantContentKey: "name",
507 | },
508 | {
509 | name: "Failure - Invalid Authorization Token",
510 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
511 | requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
512 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
513 | wantStatusCode: 401,
514 | expectResult: false,
515 | wantContentKey: "name",
516 | },
517 | {
518 | name: "Failure - Without Authorization Token",
519 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-lookup-entry-tool/invoke",
520 | requestHeader: map[string]string{},
521 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, datasetName))),
522 | wantStatusCode: 401,
523 | expectResult: false,
524 | wantContentKey: "name",
525 | },
526 | {
527 | name: "Failure - Entry Not Found or Permission Denied",
528 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
529 | requestHeader: map[string]string{},
530 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s\"}", DataplexProject, DataplexProject, DataplexProject, "non-existent-dataset"))),
531 | wantStatusCode: 400,
532 | expectResult: false,
533 | },
534 | {
535 | name: "Success - Entry Found with Basic View",
536 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
537 | requestHeader: map[string]string{},
538 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 1))),
539 | wantStatusCode: 200,
540 | expectResult: true,
541 | wantContentKey: "name",
542 | dontWantContentKey: "aspects",
543 | },
544 | {
545 | name: "Failure - Entry with Custom View without Aspect Types",
546 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
547 | requestHeader: map[string]string{},
548 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 3))),
549 | wantStatusCode: 400,
550 | expectResult: false,
551 | },
552 | {
553 | name: "Success - Entry Found with only Schema Aspect",
554 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-lookup-entry-tool/invoke",
555 | requestHeader: map[string]string{},
556 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"name\":\"projects/%s/locations/us\", \"entry\":\"projects/%s/locations/us/entryGroups/@bigquery/entries/bigquery.googleapis.com/projects/%s/datasets/%s/tables/%s\", \"aspectTypes\":[\"projects/dataplex-types/locations/global/aspectTypes/schema\"], \"view\": %d}", DataplexProject, DataplexProject, DataplexProject, datasetName, tableName, 3))),
557 | wantStatusCode: 200,
558 | expectResult: true,
559 | wantContentKey: "aspects",
560 | aspectCheck: true,
561 | },
562 | }
563 |
564 | for _, tc := range testCases {
565 | t.Run(tc.name, func(t *testing.T) {
566 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
567 | if err != nil {
568 | t.Fatalf("unable to create request: %s", err)
569 | }
570 | req.Header.Add("Content-type", "application/json")
571 | for k, v := range tc.requestHeader {
572 | req.Header.Add(k, v)
573 | }
574 | resp, err := http.DefaultClient.Do(req)
575 | if err != nil {
576 | t.Fatalf("unable to send request: %s", err)
577 | }
578 | defer resp.Body.Close()
579 |
580 | if resp.StatusCode != tc.wantStatusCode {
581 | bodyBytes, _ := io.ReadAll(resp.Body)
582 | t.Fatalf("Response status code got %d, want %d\nResponse body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
583 | }
584 |
585 | var result map[string]interface{}
586 | if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
587 | t.Fatalf("Error parsing response body: %v", err)
588 | }
589 |
590 | if tc.expectResult {
591 | resultStr, ok := result["result"].(string)
592 | if !ok {
593 | t.Fatalf("Expected 'result' field to be a string on success, got %T", result["result"])
594 | }
595 | if resultStr == "" || resultStr == "{}" || resultStr == "null" {
596 | t.Fatal("Expected an entry, but got empty result")
597 | }
598 |
599 | var entry map[string]interface{}
600 | if err := json.Unmarshal([]byte(resultStr), &entry); err != nil {
601 | t.Fatalf("Error unmarshalling result string into entry map: %v", err)
602 | }
603 |
604 | if _, ok := entry[tc.wantContentKey]; !ok {
605 | t.Fatalf("Expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
606 | }
607 |
608 | if _, ok := entry[tc.dontWantContentKey]; ok {
609 | t.Fatalf("Expected entry to not have key '%s', but it was found in %v", tc.dontWantContentKey, entry)
610 | }
611 |
612 | if tc.aspectCheck {
613 | // Check length of aspects
614 | aspects, ok := entry["aspects"].(map[string]interface{})
615 | if !ok {
616 | t.Fatalf("Expected 'aspects' to be a map, got %T", aspects)
617 | }
618 | if len(aspects) != 1 {
619 | t.Fatalf("Expected exactly one aspect, but got %d", len(aspects))
620 | }
621 | }
622 | } else { // Handle expected error response
623 | _, ok := result["error"]
624 | if !ok {
625 | t.Fatalf("Expected 'error' field in response, got %v", result)
626 | }
627 | }
628 | })
629 | }
630 | }
631 |
632 | func runDataplexSearchAspectTypesToolInvokeTest(t *testing.T, aspectTypeId string) {
633 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
634 | if err != nil {
635 | t.Fatalf("error getting Google ID token: %s", err)
636 | }
637 |
638 | testCases := []struct {
639 | name string
640 | api string
641 | requestHeader map[string]string
642 | requestBody io.Reader
643 | wantStatusCode int
644 | expectResult bool
645 | wantContentKey string
646 | }{
647 | {
648 | name: "Success - Aspect Type Found",
649 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-aspect-types-tool/invoke",
650 | requestHeader: map[string]string{},
651 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
652 | wantStatusCode: 200,
653 | expectResult: true,
654 | wantContentKey: "metadata_template",
655 | },
656 | {
657 | name: "Success - Aspect Type Found with Authorization",
658 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
659 | requestHeader: map[string]string{"my-google-auth_token": idToken},
660 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
661 | wantStatusCode: 200,
662 | expectResult: true,
663 | wantContentKey: "metadata_template",
664 | },
665 | {
666 | name: "Failure - Aspect Type Not Found",
667 | api: "http://127.0.0.1:5000/api/tool/my-dataplex-search-aspect-types-tool/invoke",
668 | requestHeader: map[string]string{},
669 | requestBody: bytes.NewBuffer([]byte(`"{\"query\":\"name:_aspectType\"}"`)),
670 | wantStatusCode: 400,
671 | expectResult: false,
672 | },
673 | {
674 | name: "Failure - Invalid Authorization Token",
675 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
676 | requestHeader: map[string]string{"my-google-auth_token": "invalid_token"},
677 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
678 | wantStatusCode: 401,
679 | expectResult: false,
680 | },
681 | {
682 | name: "Failure - No Authorization Token",
683 | api: "http://127.0.0.1:5000/api/tool/my-auth-dataplex-search-aspect-types-tool/invoke",
684 | requestHeader: map[string]string{},
685 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"query\":\"name:%s_aspectType\"}", aspectTypeId))),
686 | wantStatusCode: 401,
687 | expectResult: false,
688 | },
689 | }
690 |
691 | for _, tc := range testCases {
692 | t.Run(tc.name, func(t *testing.T) {
693 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
694 | if err != nil {
695 | t.Fatalf("unable to create request: %s", err)
696 | }
697 | req.Header.Add("Content-type", "application/json")
698 | for k, v := range tc.requestHeader {
699 | req.Header.Add(k, v)
700 | }
701 | resp, err := http.DefaultClient.Do(req)
702 | if err != nil {
703 | t.Fatalf("unable to send request: %s", err)
704 | }
705 | defer resp.Body.Close()
706 | if resp.StatusCode != tc.wantStatusCode {
707 | t.Fatalf("response status code is not %d. It is %d", tc.wantStatusCode, resp.StatusCode)
708 | }
709 | var result map[string]interface{}
710 | if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
711 | t.Fatalf("error parsing response body: %s", err)
712 | }
713 | resultStr, ok := result["result"].(string)
714 | if !ok {
715 | if result["result"] == nil && !tc.expectResult {
716 | return
717 | }
718 | t.Fatalf("expected 'result' field to be a string, got %T", result["result"])
719 | }
720 | if !tc.expectResult && (resultStr == "" || resultStr == "[]") {
721 | return
722 | }
723 | var entries []interface{}
724 | if err := json.Unmarshal([]byte(resultStr), &entries); err != nil {
725 | t.Fatalf("error unmarshalling result string: %v", err)
726 | }
727 |
728 | if tc.expectResult {
729 | if len(entries) != 1 {
730 | t.Fatalf("expected exactly one entry, but got %d", len(entries))
731 | }
732 | entry, ok := entries[0].(map[string]interface{})
733 | if !ok {
734 | t.Fatalf("expected entry to be a map, got %T", entries[0])
735 | }
736 | if _, ok := entry[tc.wantContentKey]; !ok {
737 | t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantContentKey, entry)
738 | }
739 | } else {
740 | if len(entries) != 0 {
741 | t.Fatalf("expected 0 entries, but got %d", len(entries))
742 | }
743 | }
744 | })
745 | }
746 | }
747 |
```
--------------------------------------------------------------------------------
/internal/sources/bigquery/bigquery.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package bigquery
16 |
17 | import (
18 | "context"
19 | "fmt"
20 | "math/big"
21 | "net/http"
22 | "reflect"
23 | "strings"
24 | "sync"
25 | "time"
26 |
27 | bigqueryapi "cloud.google.com/go/bigquery"
28 | dataplexapi "cloud.google.com/go/dataplex/apiv1"
29 | "github.com/goccy/go-yaml"
30 | "github.com/googleapis/genai-toolbox/internal/sources"
31 | "github.com/googleapis/genai-toolbox/internal/tools"
32 | "github.com/googleapis/genai-toolbox/internal/util"
33 | "github.com/googleapis/genai-toolbox/internal/util/orderedmap"
34 | "go.opentelemetry.io/otel/trace"
35 | "golang.org/x/oauth2"
36 | "golang.org/x/oauth2/google"
37 | bigqueryrestapi "google.golang.org/api/bigquery/v2"
38 | "google.golang.org/api/googleapi"
39 | "google.golang.org/api/impersonate"
40 | "google.golang.org/api/iterator"
41 | "google.golang.org/api/option"
42 | )
43 |
44 | const SourceKind string = "bigquery"
45 |
46 | // CloudPlatformScope is a broad scope for Google Cloud Platform services.
47 | const CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
48 |
49 | const (
50 | // No write operations are allowed.
51 | WriteModeBlocked string = "blocked"
52 | // Only protected write operations are allowed in a BigQuery session.
53 | WriteModeProtected string = "protected"
54 | // All write operations are allowed.
55 | WriteModeAllowed string = "allowed"
56 | )
57 |
58 | // validate interface
59 | var _ sources.SourceConfig = Config{}
60 |
61 | type BigqueryClientCreator func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
62 |
63 | type BigQuerySessionProvider func(ctx context.Context) (*Session, error)
64 |
65 | type DataplexClientCreator func(tokenString string) (*dataplexapi.CatalogClient, error)
66 |
67 | func init() {
68 | if !sources.Register(SourceKind, newConfig) {
69 | panic(fmt.Sprintf("source kind %q already registered", SourceKind))
70 | }
71 | }
72 |
73 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
74 | actual := Config{Name: name}
75 | if err := decoder.DecodeContext(ctx, &actual); err != nil {
76 | return nil, err
77 | }
78 | return actual, nil
79 | }
80 |
81 | type Config struct {
82 | // BigQuery configs
83 | Name string `yaml:"name" validate:"required"`
84 | Kind string `yaml:"kind" validate:"required"`
85 | Project string `yaml:"project" validate:"required"`
86 | Location string `yaml:"location"`
87 | WriteMode string `yaml:"writeMode"`
88 | AllowedDatasets StringOrStringSlice `yaml:"allowedDatasets"`
89 | UseClientOAuth bool `yaml:"useClientOAuth"`
90 | ImpersonateServiceAccount string `yaml:"impersonateServiceAccount"`
91 | Scopes StringOrStringSlice `yaml:"scopes"`
92 | MaxQueryResultRows int `yaml:"maxQueryResultRows"`
93 | }
94 |
95 | // StringOrStringSlice is a custom type that can unmarshal both a single string
96 | // (which it splits by comma) and a sequence of strings into a string slice.
97 | type StringOrStringSlice []string
98 |
99 | // UnmarshalYAML implements the yaml.Unmarshaler interface.
100 | func (s *StringOrStringSlice) UnmarshalYAML(unmarshal func(any) error) error {
101 | var v any
102 | if err := unmarshal(&v); err != nil {
103 | return err
104 | }
105 | switch val := v.(type) {
106 | case string:
107 | *s = strings.Split(val, ",")
108 | return nil
109 | case []any:
110 | for _, item := range val {
111 | if str, ok := item.(string); ok {
112 | *s = append(*s, str)
113 | } else {
114 | return fmt.Errorf("element in sequence is not a string: %v", item)
115 | }
116 | }
117 | return nil
118 | }
119 | return fmt.Errorf("cannot unmarshal %T into StringOrStringSlice", v)
120 | }
121 |
122 | func (r Config) SourceConfigKind() string {
123 | // Returns BigQuery source kind
124 | return SourceKind
125 | }
126 | func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
127 | if r.WriteMode == "" {
128 | r.WriteMode = WriteModeAllowed
129 | }
130 |
131 | if r.MaxQueryResultRows == 0 {
132 | r.MaxQueryResultRows = 50
133 | }
134 |
135 | if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
136 | // The protected mode only allows write operations to the session's temporary datasets.
137 | // when using client OAuth, a new session is created every
138 | // time a BigQuery tool is invoked. Therefore, no session data can
139 | // be preserved as needed by the protected mode.
140 | return nil, fmt.Errorf("writeMode 'protected' cannot be used with useClientOAuth 'true'")
141 | }
142 |
143 | if r.UseClientOAuth && r.ImpersonateServiceAccount != "" {
144 | return nil, fmt.Errorf("useClientOAuth cannot be used with impersonateServiceAccount")
145 | }
146 |
147 | var client *bigqueryapi.Client
148 | var restService *bigqueryrestapi.Service
149 | var tokenSource oauth2.TokenSource
150 | var clientCreator BigqueryClientCreator
151 | var err error
152 |
153 | s := &Source{
154 | Config: r,
155 | Client: client,
156 | RestService: restService,
157 | TokenSource: tokenSource,
158 | MaxQueryResultRows: r.MaxQueryResultRows,
159 | ClientCreator: clientCreator,
160 | }
161 |
162 | if r.UseClientOAuth {
163 | // use client OAuth
164 | baseClientCreator, err := newBigQueryClientCreator(ctx, tracer, r.Project, r.Location, r.Name)
165 | if err != nil {
166 | return nil, fmt.Errorf("error constructing client creator: %w", err)
167 | }
168 | setupClientCaching(s, baseClientCreator)
169 |
170 | } else {
171 | // Initializes a BigQuery Google SQL source
172 | client, restService, tokenSource, err = initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location, r.ImpersonateServiceAccount, r.Scopes)
173 | if err != nil {
174 | return nil, fmt.Errorf("error creating client from ADC: %w", err)
175 | }
176 | s.Client = client
177 | s.RestService = restService
178 | s.TokenSource = tokenSource
179 | }
180 |
181 | allowedDatasets := make(map[string]struct{})
182 | // Get full id of allowed datasets and verify they exist.
183 | if len(r.AllowedDatasets) > 0 {
184 | for _, allowed := range r.AllowedDatasets {
185 | var projectID, datasetID, allowedFullID string
186 | if strings.Contains(allowed, ".") {
187 | parts := strings.Split(allowed, ".")
188 | if len(parts) != 2 {
189 | return nil, fmt.Errorf("invalid allowedDataset format: %q, expected 'project.dataset' or 'dataset'", allowed)
190 | }
191 | projectID = parts[0]
192 | datasetID = parts[1]
193 | allowedFullID = allowed
194 | } else {
195 | projectID = r.Project
196 | datasetID = allowed
197 | allowedFullID = fmt.Sprintf("%s.%s", projectID, datasetID)
198 | }
199 |
200 | if s.Client != nil {
201 | dataset := s.Client.DatasetInProject(projectID, datasetID)
202 | _, err := dataset.Metadata(ctx)
203 | if err != nil {
204 | if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound {
205 | return nil, fmt.Errorf("allowedDataset '%s' not found in project '%s'", datasetID, projectID)
206 | }
207 | return nil, fmt.Errorf("failed to verify allowedDataset '%s' in project '%s': %w", datasetID, projectID, err)
208 | }
209 | }
210 | allowedDatasets[allowedFullID] = struct{}{}
211 | }
212 | }
213 |
214 | s.AllowedDatasets = allowedDatasets
215 | s.SessionProvider = s.newBigQuerySessionProvider()
216 |
217 | if r.WriteMode != WriteModeAllowed && r.WriteMode != WriteModeBlocked && r.WriteMode != WriteModeProtected {
218 | return nil, fmt.Errorf("invalid writeMode %q: must be one of %q, %q, or %q", r.WriteMode, WriteModeAllowed, WriteModeProtected, WriteModeBlocked)
219 | }
220 | s.makeDataplexCatalogClient = s.lazyInitDataplexClient(ctx, tracer)
221 | return s, nil
222 | }
223 |
224 | // setupClientCaching initializes caches and wraps the base client creator with caching logic.
225 | func setupClientCaching(s *Source, baseCreator BigqueryClientCreator) {
226 | // Define eviction handlers
227 | onBqEvict := func(key string, value interface{}) {
228 | if client, ok := value.(*bigqueryapi.Client); ok && client != nil {
229 | client.Close()
230 | }
231 | }
232 | onDataplexEvict := func(key string, value interface{}) {
233 | if client, ok := value.(*dataplexapi.CatalogClient); ok && client != nil {
234 | client.Close()
235 | }
236 | }
237 |
238 | // Initialize caches
239 | s.bqClientCache = NewCache(onBqEvict)
240 | s.bqRestCache = NewCache(nil)
241 | s.dataplexCache = NewCache(onDataplexEvict)
242 |
243 | // Create the caching wrapper for the client creator
244 | s.ClientCreator = func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
245 | // Check cache
246 | bqClientVal, bqFound := s.bqClientCache.Get(tokenString)
247 |
248 | if wantRestService {
249 | restServiceVal, restFound := s.bqRestCache.Get(tokenString)
250 | if bqFound && restFound {
251 | // Cache hit for both
252 | return bqClientVal.(*bigqueryapi.Client), restServiceVal.(*bigqueryrestapi.Service), nil
253 | }
254 | } else {
255 | if bqFound {
256 | return bqClientVal.(*bigqueryapi.Client), nil, nil
257 | }
258 | }
259 |
260 | // Cache miss - call the client creator
261 | client, restService, err := baseCreator(tokenString, wantRestService)
262 | if err != nil {
263 | return nil, nil, err
264 | }
265 |
266 | // Set in cache
267 | s.bqClientCache.Set(tokenString, client)
268 | if wantRestService && restService != nil {
269 | s.bqRestCache.Set(tokenString, restService)
270 | }
271 |
272 | return client, restService, nil
273 | }
274 | }
275 |
276 | var _ sources.Source = &Source{}
277 |
278 | type Source struct {
279 | Config
280 | Client *bigqueryapi.Client
281 | RestService *bigqueryrestapi.Service
282 | TokenSource oauth2.TokenSource
283 | MaxQueryResultRows int
284 | ClientCreator BigqueryClientCreator
285 | AllowedDatasets map[string]struct{}
286 | sessionMutex sync.Mutex
287 | makeDataplexCatalogClient func() (*dataplexapi.CatalogClient, DataplexClientCreator, error)
288 | SessionProvider BigQuerySessionProvider
289 | Session *Session
290 |
291 | // Caches for OAuth clients
292 | bqClientCache *Cache
293 | bqRestCache *Cache
294 | dataplexCache *Cache
295 | }
296 |
297 | type Session struct {
298 | ID string
299 | ProjectID string
300 | DatasetID string
301 | CreationTime time.Time
302 | LastUsed time.Time
303 | }
304 |
305 | func (s *Source) SourceKind() string {
306 | // Returns BigQuery Google SQL source kind
307 | return SourceKind
308 | }
309 |
310 | func (s *Source) ToConfig() sources.SourceConfig {
311 | return s.Config
312 | }
313 |
314 | func (s *Source) BigQueryClient() *bigqueryapi.Client {
315 | return s.Client
316 | }
317 |
318 | func (s *Source) BigQueryRestService() *bigqueryrestapi.Service {
319 | return s.RestService
320 | }
321 |
322 | func (s *Source) BigQueryWriteMode() string {
323 | return s.WriteMode
324 | }
325 |
326 | func (s *Source) BigQuerySession() BigQuerySessionProvider {
327 | return s.SessionProvider
328 | }
329 |
330 | func (s *Source) newBigQuerySessionProvider() BigQuerySessionProvider {
331 | return func(ctx context.Context) (*Session, error) {
332 | if s.WriteMode != WriteModeProtected {
333 | return nil, nil
334 | }
335 |
336 | s.sessionMutex.Lock()
337 | defer s.sessionMutex.Unlock()
338 |
339 | logger, err := util.LoggerFromContext(ctx)
340 | if err != nil {
341 | return nil, fmt.Errorf("failed to get logger from context: %w", err)
342 | }
343 |
344 | if s.Session != nil {
345 | // Absolute 7-day lifetime check.
346 | const sessionMaxLifetime = 7 * 24 * time.Hour
347 | // This assumes a single task will not exceed 30 minutes, preventing it from failing mid-execution.
348 | const refreshThreshold = 30 * time.Minute
349 | if time.Since(s.Session.CreationTime) > (sessionMaxLifetime - refreshThreshold) {
350 | logger.DebugContext(ctx, "Session is approaching its 7-day maximum lifetime. Creating a new one.")
351 | } else {
352 | job := &bigqueryrestapi.Job{
353 | Configuration: &bigqueryrestapi.JobConfiguration{
354 | DryRun: true,
355 | Query: &bigqueryrestapi.JobConfigurationQuery{
356 | Query: "SELECT 1",
357 | UseLegacySql: new(bool),
358 | ConnectionProperties: []*bigqueryrestapi.ConnectionProperty{{Key: "session_id", Value: s.Session.ID}},
359 | },
360 | },
361 | }
362 | _, err := s.RestService.Jobs.Insert(s.Project, job).Do()
363 | if err == nil {
364 | s.Session.LastUsed = time.Now()
365 | return s.Session, nil
366 | }
367 | logger.DebugContext(ctx, "Session validation failed (likely expired), creating a new one.", "error", err)
368 | }
369 | }
370 |
371 | // Create a new session if one doesn't exist, it has passed its 7-day lifetime,
372 | // or it failed the validation dry run.
373 |
374 | creationTime := time.Now()
375 | job := &bigqueryrestapi.Job{
376 | JobReference: &bigqueryrestapi.JobReference{
377 | ProjectId: s.Project,
378 | Location: s.Location,
379 | },
380 | Configuration: &bigqueryrestapi.JobConfiguration{
381 | DryRun: true,
382 | Query: &bigqueryrestapi.JobConfigurationQuery{
383 | Query: "SELECT 1",
384 | CreateSession: true,
385 | },
386 | },
387 | }
388 |
389 | createdJob, err := s.RestService.Jobs.Insert(s.Project, job).Do()
390 | if err != nil {
391 | return nil, fmt.Errorf("failed to create new session: %w", err)
392 | }
393 |
394 | var sessionID, sessionDatasetID, projectID string
395 | if createdJob.Status != nil && createdJob.Statistics.SessionInfo != nil {
396 | sessionID = createdJob.Statistics.SessionInfo.SessionId
397 | } else {
398 | return nil, fmt.Errorf("failed to get session ID from new session job")
399 | }
400 |
401 | if createdJob.Configuration != nil && createdJob.Configuration.Query != nil && createdJob.Configuration.Query.DestinationTable != nil {
402 | sessionDatasetID = createdJob.Configuration.Query.DestinationTable.DatasetId
403 | projectID = createdJob.Configuration.Query.DestinationTable.ProjectId
404 | } else {
405 | return nil, fmt.Errorf("failed to get session dataset ID from new session job")
406 | }
407 |
408 | s.Session = &Session{
409 | ID: sessionID,
410 | ProjectID: projectID,
411 | DatasetID: sessionDatasetID,
412 | CreationTime: creationTime,
413 | LastUsed: creationTime,
414 | }
415 | return s.Session, nil
416 | }
417 | }
418 |
419 | func (s *Source) UseClientAuthorization() bool {
420 | return s.UseClientOAuth
421 | }
422 |
423 | func (s *Source) BigQueryProject() string {
424 | return s.Project
425 | }
426 |
427 | func (s *Source) BigQueryLocation() string {
428 | return s.Location
429 | }
430 |
431 | func (s *Source) BigQueryTokenSource() oauth2.TokenSource {
432 | return s.TokenSource
433 | }
434 |
435 | func (s *Source) BigQueryTokenSourceWithScope(ctx context.Context, scopes []string) (oauth2.TokenSource, error) {
436 | if len(scopes) == 0 {
437 | scopes = s.Scopes
438 | if len(scopes) == 0 {
439 | scopes = []string{CloudPlatformScope}
440 | }
441 | }
442 |
443 | if s.ImpersonateServiceAccount != "" {
444 | // Create impersonated credentials token source with the requested scopes
445 | ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
446 | TargetPrincipal: s.ImpersonateServiceAccount,
447 | Scopes: scopes,
448 | })
449 | if err != nil {
450 | return nil, fmt.Errorf("failed to create impersonated credentials for %q with scopes %v: %w", s.ImpersonateServiceAccount, scopes, err)
451 | }
452 | return ts, nil
453 | }
454 | return google.DefaultTokenSource(ctx, scopes...)
455 | }
456 |
457 | func (s *Source) GetMaxQueryResultRows() int {
458 | return s.MaxQueryResultRows
459 | }
460 |
461 | func (s *Source) BigQueryClientCreator() BigqueryClientCreator {
462 | return s.ClientCreator
463 | }
464 |
465 | func (s *Source) BigQueryAllowedDatasets() []string {
466 | if len(s.AllowedDatasets) == 0 {
467 | return nil
468 | }
469 | datasets := make([]string, 0, len(s.AllowedDatasets))
470 | for d := range s.AllowedDatasets {
471 | datasets = append(datasets, d)
472 | }
473 | return datasets
474 | }
475 |
476 | // IsDatasetAllowed checks if a given dataset is accessible based on the source's configuration.
477 | func (s *Source) IsDatasetAllowed(projectID, datasetID string) bool {
478 | // If the normalized map is empty, it means no restrictions were configured.
479 | if len(s.AllowedDatasets) == 0 {
480 | return true
481 | }
482 |
483 | targetDataset := fmt.Sprintf("%s.%s", projectID, datasetID)
484 | _, ok := s.AllowedDatasets[targetDataset]
485 | return ok
486 | }
487 |
488 | func (s *Source) MakeDataplexCatalogClient() func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
489 | return s.makeDataplexCatalogClient
490 | }
491 |
492 | func (s *Source) lazyInitDataplexClient(ctx context.Context, tracer trace.Tracer) func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
493 | var once sync.Once
494 | var client *dataplexapi.CatalogClient
495 | var clientCreator DataplexClientCreator
496 | var err error
497 |
498 | return func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
499 | once.Do(func() {
500 | c, cc, e := initDataplexConnection(ctx, tracer, s.Name, s.Project, s.UseClientOAuth, s.ImpersonateServiceAccount, s.Scopes)
501 | if e != nil {
502 | err = fmt.Errorf("failed to initialize dataplex client: %w", e)
503 | return
504 | }
505 | client = c
506 |
507 | // If using OAuth, wrap the provided client creator (cc) with caching logic
508 | if s.UseClientOAuth && cc != nil {
509 | clientCreator = func(tokenString string) (*dataplexapi.CatalogClient, error) {
510 | // Check cache
511 | if val, found := s.dataplexCache.Get(tokenString); found {
512 | return val.(*dataplexapi.CatalogClient), nil
513 | }
514 |
515 | // Cache miss - call client creator
516 | dpClient, err := cc(tokenString)
517 | if err != nil {
518 | return nil, err
519 | }
520 |
521 | // Set in cache
522 | s.dataplexCache.Set(tokenString, dpClient)
523 | return dpClient, nil
524 | }
525 | } else {
526 | // Not using OAuth or no creator was returned
527 | clientCreator = cc
528 | }
529 | })
530 | return client, clientCreator, err
531 | }
532 | }
533 |
534 | func (s *Source) RetrieveClientAndService(accessToken tools.AccessToken) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
535 | bqClient := s.BigQueryClient()
536 | restService := s.BigQueryRestService()
537 |
538 | // Initialize new client if using user OAuth token
539 | if s.UseClientAuthorization() {
540 | tokenStr, err := accessToken.ParseBearerToken()
541 | if err != nil {
542 | return nil, nil, fmt.Errorf("error parsing access token: %w", err)
543 | }
544 | bqClient, restService, err = s.BigQueryClientCreator()(tokenStr, true)
545 | if err != nil {
546 | return nil, nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
547 | }
548 | }
549 | return bqClient, restService, nil
550 | }
551 |
552 | func (s *Source) RunSQL(ctx context.Context, bqClient *bigqueryapi.Client, statement, statementType string, params []bigqueryapi.QueryParameter, connProps []*bigqueryapi.ConnectionProperty) (any, error) {
553 | query := bqClient.Query(statement)
554 | query.Location = bqClient.Location
555 | if params != nil {
556 | query.Parameters = params
557 | }
558 | if connProps != nil {
559 | query.ConnectionProperties = connProps
560 | }
561 |
562 | // This block handles SELECT statements, which return a row set.
563 | // We iterate through the results, convert each row into a map of
564 | // column names to values, and return the collection of rows.
565 | job, err := query.Run(ctx)
566 | if err != nil {
567 | return nil, fmt.Errorf("unable to execute query: %w", err)
568 | }
569 | it, err := job.Read(ctx)
570 | if err != nil {
571 | return nil, fmt.Errorf("unable to read query results: %w", err)
572 | }
573 |
574 | var out []any
575 | for s.MaxQueryResultRows <= 0 || len(out) < s.MaxQueryResultRows {
576 | var val []bigqueryapi.Value
577 | err = it.Next(&val)
578 | if err == iterator.Done {
579 | break
580 | }
581 | if err != nil {
582 | return nil, fmt.Errorf("unable to iterate through query results: %w", err)
583 | }
584 | schema := it.Schema
585 | row := orderedmap.Row{}
586 | for i, field := range schema {
587 | row.Add(field.Name, NormalizeValue(val[i]))
588 | }
589 | out = append(out, row)
590 | }
591 | // If the query returned any rows, return them directly.
592 | if len(out) > 0 {
593 | return out, nil
594 | }
595 |
596 | // This handles the standard case for a SELECT query that successfully
597 | // executes but returns zero rows.
598 | if statementType == "SELECT" {
599 | return "The query returned 0 rows.", nil
600 | }
601 | // This is the fallback for a successful query that doesn't return content.
602 | // In most cases, this will be for DML/DDL statements like INSERT, UPDATE, CREATE, etc.
603 | // However, it is also possible that this was a query that was expected to return rows
604 | // but returned none, a case that we cannot distinguish here.
605 | return "Query executed successfully and returned no content.", nil
606 | }
607 |
608 | // NormalizeValue converts BigQuery specific types to standard JSON-compatible types.
609 | // Specifically, it handles *big.Rat (used for NUMERIC/BIGNUMERIC) by converting
610 | // them to decimal strings with up to 38 digits of precision, trimming trailing zeros.
611 | // It recursively handles slices (arrays) and maps (structs) using reflection.
612 | func NormalizeValue(v any) any {
613 | if v == nil {
614 | return nil
615 | }
616 |
617 | // Handle *big.Rat specifically.
618 | if rat, ok := v.(*big.Rat); ok {
619 | // Convert big.Rat to a decimal string.
620 | // Use a precision of 38 digits (enough for BIGNUMERIC and NUMERIC)
621 | // and trim trailing zeros to match BigQuery's behavior.
622 | s := rat.FloatString(38)
623 | if strings.Contains(s, ".") {
624 | s = strings.TrimRight(s, "0")
625 | s = strings.TrimRight(s, ".")
626 | }
627 | return s
628 | }
629 |
630 | // Use reflection for slices and maps to handle various underlying types.
631 | rv := reflect.ValueOf(v)
632 | switch rv.Kind() {
633 | case reflect.Slice, reflect.Array:
634 | // Preserve []byte as is, so json.Marshal encodes it as Base64 string (BigQuery BYTES behavior).
635 | if rv.Type().Elem().Kind() == reflect.Uint8 {
636 | return v
637 | }
638 | newSlice := make([]any, rv.Len())
639 | for i := 0; i < rv.Len(); i++ {
640 | newSlice[i] = NormalizeValue(rv.Index(i).Interface())
641 | }
642 | return newSlice
643 | case reflect.Map:
644 | // Ensure keys are strings to produce a JSON-compatible map.
645 | if rv.Type().Key().Kind() != reflect.String {
646 | return v
647 | }
648 | newMap := make(map[string]any, rv.Len())
649 | iter := rv.MapRange()
650 | for iter.Next() {
651 | newMap[iter.Key().String()] = NormalizeValue(iter.Value().Interface())
652 | }
653 | return newMap
654 | }
655 |
656 | return v
657 | }
658 |
659 | func initBigQueryConnection(
660 | ctx context.Context,
661 | tracer trace.Tracer,
662 | name string,
663 | project string,
664 | location string,
665 | impersonateServiceAccount string,
666 | scopes []string,
667 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, oauth2.TokenSource, error) {
668 | ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
669 | defer span.End()
670 |
671 | userAgent, err := util.UserAgentFromContext(ctx)
672 | if err != nil {
673 | return nil, nil, nil, err
674 | }
675 |
676 | var tokenSource oauth2.TokenSource
677 | var opts []option.ClientOption
678 |
679 | var credScopes []string
680 | if len(scopes) > 0 {
681 | credScopes = scopes
682 | } else if impersonateServiceAccount != "" {
683 | credScopes = []string{CloudPlatformScope}
684 | } else {
685 | credScopes = []string{bigqueryapi.Scope}
686 | }
687 |
688 | if impersonateServiceAccount != "" {
689 | // Create impersonated credentials token source
690 | // This broader scope is needed for tools like conversational analytics
691 | cloudPlatformTokenSource, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
692 | TargetPrincipal: impersonateServiceAccount,
693 | Scopes: credScopes,
694 | })
695 | if err != nil {
696 | return nil, nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
697 | }
698 | tokenSource = cloudPlatformTokenSource
699 | opts = []option.ClientOption{
700 | option.WithUserAgent(userAgent),
701 | option.WithTokenSource(cloudPlatformTokenSource),
702 | }
703 | } else {
704 | // Use default credentials
705 | cred, err := google.FindDefaultCredentials(ctx, credScopes...)
706 | if err != nil {
707 | return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scopes %v: %w", credScopes, err)
708 | }
709 | tokenSource = cred.TokenSource
710 | opts = []option.ClientOption{
711 | option.WithUserAgent(userAgent),
712 | option.WithCredentials(cred),
713 | }
714 | }
715 |
716 | // Initialize the high-level BigQuery client
717 | client, err := bigqueryapi.NewClient(ctx, project, opts...)
718 | if err != nil {
719 | return nil, nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
720 | }
721 | client.Location = location
722 |
723 | // Initialize the low-level BigQuery REST service using the same credentials
724 | restService, err := bigqueryrestapi.NewService(ctx, opts...)
725 | if err != nil {
726 | return nil, nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
727 | }
728 |
729 | return client, restService, tokenSource, nil
730 | }
731 |
732 | // initBigQueryConnectionWithOAuthToken initialize a BigQuery client with an
733 | // OAuth access token.
734 | func initBigQueryConnectionWithOAuthToken(
735 | ctx context.Context,
736 | tracer trace.Tracer,
737 | project string,
738 | location string,
739 | name string,
740 | userAgent string,
741 | tokenString string,
742 | wantRestService bool,
743 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
744 | ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
745 | defer span.End()
746 | // Construct token source
747 | token := &oauth2.Token{
748 | AccessToken: string(tokenString),
749 | }
750 | ts := oauth2.StaticTokenSource(token)
751 |
752 | // Initialize the BigQuery client with tokenSource
753 | client, err := bigqueryapi.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
754 | if err != nil {
755 | return nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
756 | }
757 | client.Location = location
758 |
759 | if wantRestService {
760 | // Initialize the low-level BigQuery REST service using the same credentials
761 | restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
762 | if err != nil {
763 | return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
764 | }
765 | return client, restService, nil
766 | }
767 |
768 | return client, nil, nil
769 | }
770 |
771 | // newBigQueryClientCreator sets the project parameters for the init helper
772 | // function. The returned function takes in an OAuth access token and uses it to
773 | // create a BQ client.
774 | func newBigQueryClientCreator(
775 | ctx context.Context,
776 | tracer trace.Tracer,
777 | project string,
778 | location string,
779 | name string,
780 | ) (func(string, bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error), error) {
781 | userAgent, err := util.UserAgentFromContext(ctx)
782 | if err != nil {
783 | return nil, err
784 | }
785 |
786 | return func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
787 | return initBigQueryConnectionWithOAuthToken(ctx, tracer, project, location, name, userAgent, tokenString, wantRestService)
788 | }, nil
789 | }
790 |
791 | func initDataplexConnection(
792 | ctx context.Context,
793 | tracer trace.Tracer,
794 | name string,
795 | project string,
796 | useClientOAuth bool,
797 | impersonateServiceAccount string,
798 | scopes []string,
799 | ) (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
800 | var client *dataplexapi.CatalogClient
801 | var clientCreator DataplexClientCreator
802 | var err error
803 |
804 | ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
805 | defer span.End()
806 |
807 | userAgent, err := util.UserAgentFromContext(ctx)
808 | if err != nil {
809 | return nil, nil, err
810 | }
811 |
812 | if useClientOAuth {
813 | clientCreator = newDataplexClientCreator(ctx, project, userAgent)
814 | } else {
815 | var opts []option.ClientOption
816 |
817 | credScopes := scopes
818 | if len(credScopes) == 0 {
819 | credScopes = []string{CloudPlatformScope}
820 | }
821 |
822 | if impersonateServiceAccount != "" {
823 | // Create impersonated credentials token source
824 | ts, err := impersonate.CredentialsTokenSource(ctx, impersonate.CredentialsConfig{
825 | TargetPrincipal: impersonateServiceAccount,
826 | Scopes: credScopes,
827 | })
828 | if err != nil {
829 | return nil, nil, fmt.Errorf("failed to create impersonated credentials for %q: %w", impersonateServiceAccount, err)
830 | }
831 | opts = []option.ClientOption{
832 | option.WithUserAgent(userAgent),
833 | option.WithTokenSource(ts),
834 | }
835 | } else {
836 | // Use default credentials
837 | cred, err := google.FindDefaultCredentials(ctx, credScopes...)
838 | if err != nil {
839 | return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
840 | }
841 | opts = []option.ClientOption{
842 | option.WithUserAgent(userAgent),
843 | option.WithCredentials(cred),
844 | }
845 | }
846 |
847 | client, err = dataplexapi.NewCatalogClient(ctx, opts...)
848 | if err != nil {
849 | return nil, nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
850 | }
851 | }
852 |
853 | return client, clientCreator, nil
854 | }
855 |
856 | func initDataplexConnectionWithOAuthToken(
857 | ctx context.Context,
858 | project string,
859 | userAgent string,
860 | tokenString string,
861 | ) (*dataplexapi.CatalogClient, error) {
862 | // Construct token source
863 | token := &oauth2.Token{
864 | AccessToken: string(tokenString),
865 | }
866 | ts := oauth2.StaticTokenSource(token)
867 |
868 | client, err := dataplexapi.NewCatalogClient(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
869 | if err != nil {
870 | return nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
871 | }
872 | return client, nil
873 | }
874 |
875 | func newDataplexClientCreator(
876 | ctx context.Context,
877 | project string,
878 | userAgent string,
879 | ) func(string) (*dataplexapi.CatalogClient, error) {
880 | return func(tokenString string) (*dataplexapi.CatalogClient, error) {
881 | return initDataplexConnectionWithOAuthToken(ctx, project, userAgent, tokenString)
882 | }
883 | }
884 |
```