This is page 59 of 59. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── adkgo
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-healthcare.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── elasticsearch.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mindsdb.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── singlestore.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudhealthcare
│ │ │ ├── _index.md
│ │ │ ├── cloud-healthcare-fhir-fetch-page.md
│ │ │ ├── cloud-healthcare-fhir-patient-everything.md
│ │ │ ├── cloud-healthcare-fhir-patient-search.md
│ │ │ ├── cloud-healthcare-get-dataset.md
│ │ │ ├── cloud-healthcare-get-dicom-store-metrics.md
│ │ │ ├── cloud-healthcare-get-dicom-store.md
│ │ │ ├── cloud-healthcare-get-fhir-resource.md
│ │ │ ├── cloud-healthcare-get-fhir-store-metrics.md
│ │ │ ├── cloud-healthcare-get-fhir-store.md
│ │ │ ├── cloud-healthcare-list-dicom-stores.md
│ │ │ ├── cloud-healthcare-list-fhir-stores.md
│ │ │ ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│ │ │ ├── cloud-healthcare-search-dicom-instances.md
│ │ │ ├── cloud-healthcare-search-dicom-series.md
│ │ │ └── cloud-healthcare-search-dicom-studies.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── elasticsearch
│ │ │ ├── _index.md
│ │ │ └── elasticsearch-esql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-connection-databases.md
│ │ │ ├── looker-get-connection-schemas.md
│ │ │ ├── looker-get-connection-table-columns.md
│ │ │ ├── looker-get-connection-tables.md
│ │ │ ├── looker-get-connections.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-dashboard.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mindsdb
│ │ │ ├── _index.md
│ │ │ ├── mindsdb-execute-sql.md
│ │ │ └── mindsdb-sql.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-schemas.md
│ │ │ ├── postgres-list-tables.md
│ │ │ ├── postgres-list-views.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ ├── serverless-spark-cancel-batch.md
│ │ │ ├── serverless-spark-get-batch.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── singlestore
│ │ │ ├── _index.md
│ │ │ ├── singlestore-execute-sql.md
│ │ │ └── singlestore-sql.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-healthcare.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── elasticsearch.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mindsdb.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── singlestore.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ ├── bigquery.go
│ │ │ └── cache.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloud_healthcare_test.go
│ │ │ └── cloud_healthcare.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── elasticsearch
│ │ │ ├── elasticsearch_test.go
│ │ │ └── elasticsearch.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mindsdb
│ │ │ ├── mindsdb_test.go
│ │ │ └── mindsdb.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── singlestore
│ │ │ ├── singlestore_test.go
│ │ │ └── singlestore.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloudhealthcarefhirfetchpage
│ │ │ │ ├── cloudhealthcarefhirfetchpage_test.go
│ │ │ │ └── cloudhealthcarefhirfetchpage.go
│ │ │ ├── cloudhealthcarefhirpatienteverything
│ │ │ │ ├── cloudhealthcarefhirpatienteverything_test.go
│ │ │ │ └── cloudhealthcarefhirpatienteverything.go
│ │ │ ├── cloudhealthcarefhirpatientsearch
│ │ │ │ ├── cloudhealthcarefhirpatientsearch_test.go
│ │ │ │ └── cloudhealthcarefhirpatientsearch.go
│ │ │ ├── cloudhealthcaregetdataset
│ │ │ │ ├── cloudhealthcaregetdataset_test.go
│ │ │ │ └── cloudhealthcaregetdataset.go
│ │ │ ├── cloudhealthcaregetdicomstore
│ │ │ │ ├── cloudhealthcaregetdicomstore_test.go
│ │ │ │ └── cloudhealthcaregetdicomstore.go
│ │ │ ├── cloudhealthcaregetdicomstoremetrics
│ │ │ │ ├── cloudhealthcaregetdicomstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetdicomstoremetrics.go
│ │ │ ├── cloudhealthcaregetfhirresource
│ │ │ │ ├── cloudhealthcaregetfhirresource_test.go
│ │ │ │ └── cloudhealthcaregetfhirresource.go
│ │ │ ├── cloudhealthcaregetfhirstore
│ │ │ │ ├── cloudhealthcaregetfhirstore_test.go
│ │ │ │ └── cloudhealthcaregetfhirstore.go
│ │ │ ├── cloudhealthcaregetfhirstoremetrics
│ │ │ │ ├── cloudhealthcaregetfhirstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetfhirstoremetrics.go
│ │ │ ├── cloudhealthcarelistdicomstores
│ │ │ │ ├── cloudhealthcarelistdicomstores_test.go
│ │ │ │ └── cloudhealthcarelistdicomstores.go
│ │ │ ├── cloudhealthcarelistfhirstores
│ │ │ │ ├── cloudhealthcarelistfhirstores_test.go
│ │ │ │ └── cloudhealthcarelistfhirstores.go
│ │ │ ├── cloudhealthcareretrieverendereddicominstance
│ │ │ │ ├── cloudhealthcareretrieverendereddicominstance_test.go
│ │ │ │ └── cloudhealthcareretrieverendereddicominstance.go
│ │ │ ├── cloudhealthcaresearchdicominstances
│ │ │ │ ├── cloudhealthcaresearchdicominstances_test.go
│ │ │ │ └── cloudhealthcaresearchdicominstances.go
│ │ │ ├── cloudhealthcaresearchdicomseries
│ │ │ │ ├── cloudhealthcaresearchdicomseries_test.go
│ │ │ │ └── cloudhealthcaresearchdicomseries.go
│ │ │ ├── cloudhealthcaresearchdicomstudies
│ │ │ │ ├── cloudhealthcaresearchdicomstudies_test.go
│ │ │ │ └── cloudhealthcaresearchdicomstudies.go
│ │ │ └── common
│ │ │ └── util.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── elasticsearch
│ │ │ └── elasticsearchesql
│ │ │ ├── elasticsearchesql_test.go
│ │ │ └── elasticsearchesql.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrundashboard
│ │ │ │ ├── lookerrundashboard_test.go
│ │ │ │ └── lookerrundashboard.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mindsdb
│ │ │ ├── mindsdbexecutesql
│ │ │ │ ├── mindsdbexecutesql_test.go
│ │ │ │ └── mindsdbexecutesql.go
│ │ │ └── mindsdbsql
│ │ │ ├── mindsdbsql_test.go
│ │ │ └── mindsdbsql.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslistschemas
│ │ │ │ ├── postgreslistschemas_test.go
│ │ │ │ └── postgreslistschemas.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlesssparkcancelbatch
│ │ │ │ ├── serverlesssparkcancelbatch_test.go
│ │ │ │ └── serverlesssparkcancelbatch.go
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── singlestore
│ │ │ ├── singlestoreexecutesql
│ │ │ │ ├── singlestoreexecutesql_test.go
│ │ │ │ └── singlestoreexecutesql.go
│ │ │ └── singlestoresql
│ │ │ ├── singlestoresql_test.go
│ │ │ └── singlestoresql.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ ├── orderedmap
│ │ ├── orderedmap_test.go
│ │ └── orderedmap.go
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudhealthcare
│ └── cloud_healthcare_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── elasticsearch
│ └── elasticsearch_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mindsdb
│ └── mindsdb_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── singlestore
│ └── singlestore_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/tests/bigquery/bigquery_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package bigquery
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "os"
25 | "reflect"
26 | "regexp"
27 | "sort"
28 | "strings"
29 | "testing"
30 | "time"
31 |
32 | bigqueryapi "cloud.google.com/go/bigquery"
33 | "github.com/google/uuid"
34 | "github.com/googleapis/genai-toolbox/internal/sources"
35 | "github.com/googleapis/genai-toolbox/internal/testutils"
36 | "github.com/googleapis/genai-toolbox/tests"
37 | "golang.org/x/oauth2/google"
38 | "google.golang.org/api/googleapi"
39 | "google.golang.org/api/iterator"
40 | "google.golang.org/api/option"
41 | )
42 |
43 | var (
44 | BigquerySourceKind = "bigquery"
45 | BigqueryToolKind = "bigquery-sql"
46 | BigqueryProject = os.Getenv("BIGQUERY_PROJECT")
47 | )
48 |
49 | func getBigQueryVars(t *testing.T) map[string]any {
50 | switch "" {
51 | case BigqueryProject:
52 | t.Fatal("'BIGQUERY_PROJECT' not set")
53 | }
54 |
55 | return map[string]any{
56 | "kind": BigquerySourceKind,
57 | "project": BigqueryProject,
58 | }
59 | }
60 |
61 | // Copied over from bigquery.go
62 | func initBigQueryConnection(project string) (*bigqueryapi.Client, error) {
63 | ctx := context.Background()
64 | cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
65 | if err != nil {
66 | return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
67 | }
68 |
69 | client, err := bigqueryapi.NewClient(ctx, project, option.WithCredentials(cred))
70 | if err != nil {
71 | return nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
72 | }
73 | return client, nil
74 | }
75 |
76 | func TestBigQueryToolEndpoints(t *testing.T) {
77 | sourceConfig := getBigQueryVars(t)
78 | ctx, cancel := context.WithTimeout(context.Background(), 7*time.Minute)
79 | defer cancel()
80 |
81 | var args []string
82 |
83 | client, err := initBigQueryConnection(BigqueryProject)
84 | if err != nil {
85 | t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
86 | }
87 |
88 | // create table name with UUID
89 | datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
90 | tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
91 | tableNameParam := fmt.Sprintf("`%s.%s.%s`",
92 | BigqueryProject,
93 | datasetName,
94 | tableName,
95 | )
96 | tableNameAuth := fmt.Sprintf("`%s.%s.auth_table_%s`",
97 | BigqueryProject,
98 | datasetName,
99 | strings.ReplaceAll(uuid.New().String(), "-", ""),
100 | )
101 | tableNameTemplateParam := fmt.Sprintf("`%s.%s.template_param_table_%s`",
102 | BigqueryProject,
103 | datasetName,
104 | strings.ReplaceAll(uuid.New().String(), "-", ""),
105 | )
106 | tableNameDataType := fmt.Sprintf("`%s.%s.datatype_table_%s`",
107 | BigqueryProject,
108 | datasetName,
109 | strings.ReplaceAll(uuid.New().String(), "-", ""),
110 | )
111 | tableNameForecast := fmt.Sprintf("`%s.%s.forecast_table_%s`",
112 | BigqueryProject,
113 | datasetName,
114 | strings.ReplaceAll(uuid.New().String(), "-", ""),
115 | )
116 |
117 | tableNameAnalyzeContribution := fmt.Sprintf("`%s.%s.analyze_contribution_table_%s`",
118 | BigqueryProject,
119 | datasetName,
120 | strings.ReplaceAll(uuid.New().String(), "-", ""),
121 | )
122 |
123 | // set up data for param tool
124 | createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
125 | teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
126 | defer teardownTable1(t)
127 |
128 | // set up data for auth tool
129 | createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth)
130 | teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
131 | defer teardownTable2(t)
132 |
133 | // set up data for data type test tool
134 | createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType)
135 | teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
136 | defer teardownTable3(t)
137 |
138 | // set up data for forecast tool
139 | createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast)
140 | teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
141 | defer teardownTable4(t)
142 |
143 | // set up data for analyze contribution tool
144 | createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution)
145 | teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
146 | defer teardownTable5(t)
147 |
148 | // Write config into a file and pass it to command
149 | toolsFile := tests.GetToolsConfig(sourceConfig, BigqueryToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
150 | toolsFile = addClientAuthSourceConfig(t, toolsFile)
151 | toolsFile = addBigQuerySqlToolConfig(t, toolsFile, dataTypeToolStmt, arrayDataTypeToolStmt)
152 | toolsFile = addBigQueryPrebuiltToolsConfig(t, toolsFile)
153 | tmplSelectCombined, tmplSelectFilterCombined := getBigQueryTmplToolStatement()
154 | toolsFile = tests.AddTemplateParamConfig(t, toolsFile, BigqueryToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
155 |
156 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
157 | if err != nil {
158 | t.Fatalf("command initialization returned an error: %s", err)
159 | }
160 | defer cleanup()
161 |
162 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
163 | defer cancel()
164 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
165 | if err != nil {
166 | t.Logf("toolbox command logs: \n%s", out)
167 | t.Fatalf("toolbox didn't start successfully: %s", err)
168 | }
169 |
170 | // Get configs for tests
171 | select1Want := "[{\"f0_\":1}]"
172 | invokeParamWant := "[{\"id\":1,\"name\":\"Alice\"},{\"id\":3,\"name\":\"Sid\"}]"
173 | datasetInfoWant := "\"Location\":\"US\",\"DefaultTableExpiration\":0,\"Labels\":null,\"Access\":"
174 | tableInfoWant := "{\"Name\":\"\",\"Location\":\"US\",\"Description\":\"\",\"Schema\":[{\"Name\":\"id\""
175 | ddlWant := `"Query executed successfully and returned no content."`
176 | dataInsightsWant := `(?s)Schema Resolved.*Retrieval Query.*SQL Generated.*Answer`
177 | // Partial message; the full error message is too long.
178 | mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"query validation failed: failed to insert dry run job: googleapi: Error 400: Syntax error: Unexpected identifier \"SELEC\" at [1:1]`
179 | mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"f0_\":1}"}]}}`
180 | createColArray := `["id INT64", "name STRING", "age INT64"]`
181 | selectEmptyWant := `"The query returned 0 rows."`
182 |
183 | // Run tests
184 | tests.RunToolGetTest(t)
185 | tests.RunToolInvokeTest(t, select1Want, tests.DisableOptionalNullParamTest(), tests.EnableClientAuthTest())
186 | tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want, tests.EnableMcpClientAuthTest())
187 | tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam,
188 | tests.WithCreateColArray(createColArray),
189 | tests.WithDdlWant(ddlWant),
190 | tests.WithSelectEmptyWant(selectEmptyWant),
191 | tests.WithInsert1Want(ddlWant),
192 | )
193 |
194 | runBigQueryExecuteSqlToolInvokeTest(t, select1Want, invokeParamWant, tableNameParam, ddlWant)
195 | runBigQueryExecuteSqlToolInvokeDryRunTest(t, datasetName)
196 | runBigQueryForecastToolInvokeTest(t, tableNameForecast)
197 | runBigQueryAnalyzeContributionToolInvokeTest(t, tableNameAnalyzeContribution)
198 | runBigQueryDataTypeTests(t)
199 | runBigQueryListDatasetToolInvokeTest(t, datasetName)
200 | runBigQueryGetDatasetInfoToolInvokeTest(t, datasetName, datasetInfoWant)
201 | runBigQueryListTableIdsToolInvokeTest(t, datasetName, tableName)
202 | runBigQueryGetTableInfoToolInvokeTest(t, datasetName, tableName, tableInfoWant)
203 | runBigQueryConversationalAnalyticsInvokeTest(t, datasetName, tableName, dataInsightsWant)
204 | runBigQuerySearchCatalogToolInvokeTest(t, datasetName, tableName)
205 | }
206 |
207 | func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
208 | ctx, cancel := context.WithTimeout(context.Background(), 4*time.Minute)
209 | defer cancel()
210 |
211 | client, err := initBigQueryConnection(BigqueryProject)
212 | if err != nil {
213 | t.Fatalf("unable to create BigQuery client: %s", err)
214 | }
215 |
216 | // Create two datasets, one allowed, one not.
217 | baseName := strings.ReplaceAll(uuid.New().String(), "-", "")
218 | allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", baseName)
219 | allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", baseName)
220 | disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", baseName)
221 | allowedTableName1 := "allowed_table_1"
222 | allowedTableName2 := "allowed_table_2"
223 | disallowedTableName := "disallowed_table"
224 | allowedForecastTableName1 := "allowed_forecast_table_1"
225 | allowedForecastTableName2 := "allowed_forecast_table_2"
226 | disallowedForecastTableName := "disallowed_forecast_table"
227 |
228 | allowedAnalyzeContributionTableName1 := "allowed_analyze_contribution_table_1"
229 | allowedAnalyzeContributionTableName2 := "allowed_analyze_contribution_table_2"
230 | disallowedAnalyzeContributionTableName := "disallowed_analyze_contribution_table"
231 | // Setup allowed table
232 | allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1)
233 | createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1)
234 | teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
235 | defer teardownAllowed1(t)
236 |
237 | allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2)
238 | createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2)
239 | teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
240 | defer teardownAllowed2(t)
241 |
242 | // Setup allowed forecast table
243 | allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1)
244 | createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1)
245 | teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
246 | defer teardownAllowedForecast1(t)
247 |
248 | allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2)
249 | createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2)
250 | teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
251 | defer teardownAllowedForecast2(t)
252 |
253 | // Setup disallowed table
254 | disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName)
255 | createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam)
256 | teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
257 | defer teardownDisallowed(t)
258 |
259 | // Setup disallowed forecast table
260 | disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName)
261 | createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName)
262 | teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
263 | defer teardownDisallowedForecast(t)
264 |
265 | // Setup allowed analyze contribution table
266 | allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1)
267 | createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1)
268 | teardownAllowedAnalyzeContribution1 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
269 | defer teardownAllowedAnalyzeContribution1(t)
270 |
271 | allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2)
272 | createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2)
273 | teardownAllowedAnalyzeContribution2 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
274 | defer teardownAllowedAnalyzeContribution2(t)
275 |
276 | // Setup disallowed analyze contribution table
277 | disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName)
278 | createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName)
279 | teardownDisallowedAnalyzeContribution := setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
280 | defer teardownDisallowedAnalyzeContribution(t)
281 |
282 | // Configure source with dataset restriction.
283 | sourceConfig := getBigQueryVars(t)
284 | sourceConfig["allowedDatasets"] = []string{allowedDatasetName1, allowedDatasetName2}
285 |
286 | // Configure tool
287 | toolsConfig := map[string]any{
288 | "list-dataset-ids-restricted": map[string]any{
289 | "kind": "bigquery-list-dataset-ids",
290 | "source": "my-instance",
291 | "description": "Tool to list dataset ids",
292 | },
293 | "list-table-ids-restricted": map[string]any{
294 | "kind": "bigquery-list-table-ids",
295 | "source": "my-instance",
296 | "description": "Tool to list table within a dataset",
297 | },
298 | "get-dataset-info-restricted": map[string]any{
299 | "kind": "bigquery-get-dataset-info",
300 | "source": "my-instance",
301 | "description": "Tool to get dataset info",
302 | },
303 | "get-table-info-restricted": map[string]any{
304 | "kind": "bigquery-get-table-info",
305 | "source": "my-instance",
306 | "description": "Tool to get table info",
307 | },
308 | "execute-sql-restricted": map[string]any{
309 | "kind": "bigquery-execute-sql",
310 | "source": "my-instance",
311 | "description": "Tool to execute SQL",
312 | },
313 | "conversational-analytics-restricted": map[string]any{
314 | "kind": "bigquery-conversational-analytics",
315 | "source": "my-instance",
316 | "description": "Tool to ask BigQuery conversational analytics",
317 | },
318 | "forecast-restricted": map[string]any{
319 | "kind": "bigquery-forecast",
320 | "source": "my-instance",
321 | "description": "Tool to forecast",
322 | },
323 | "analyze-contribution-restricted": map[string]any{
324 | "kind": "bigquery-analyze-contribution",
325 | "source": "my-instance",
326 | "description": "Tool to analyze contribution",
327 | },
328 | }
329 |
330 | // Create config file
331 | config := map[string]any{
332 | "sources": map[string]any{
333 | "my-instance": sourceConfig,
334 | },
335 | "tools": toolsConfig,
336 | }
337 |
338 | // Start server
339 | cmd, cleanup, err := tests.StartCmd(ctx, config)
340 | if err != nil {
341 | t.Fatalf("command initialization returned an error: %s", err)
342 | }
343 | defer cleanup()
344 |
345 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
346 | defer cancel()
347 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
348 | if err != nil {
349 | t.Logf("toolbox command logs: \n%s", out)
350 | t.Fatalf("toolbox didn't start successfully: %s", err)
351 | }
352 |
353 | // Run tests
354 | runListDatasetIdsWithRestriction(t, allowedDatasetName1, allowedDatasetName2)
355 | runListTableIdsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, allowedForecastTableName1, allowedAnalyzeContributionTableName1)
356 | runListTableIdsWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, allowedForecastTableName2, allowedAnalyzeContributionTableName2)
357 | runGetDatasetInfoWithRestriction(t, allowedDatasetName1, disallowedDatasetName)
358 | runGetDatasetInfoWithRestriction(t, allowedDatasetName2, disallowedDatasetName)
359 | runGetTableInfoWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName)
360 | runGetTableInfoWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, disallowedTableName)
361 | runExecuteSqlWithRestriction(t, allowedTableNameParam1, disallowedTableNameParam)
362 | runExecuteSqlWithRestriction(t, allowedTableNameParam2, disallowedTableNameParam)
363 | runConversationalAnalyticsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName)
364 | runConversationalAnalyticsWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, disallowedTableName)
365 | runForecastWithRestriction(t, allowedForecastTableFullName1, disallowedForecastTableFullName)
366 | runForecastWithRestriction(t, allowedForecastTableFullName2, disallowedForecastTableFullName)
367 | runAnalyzeContributionWithRestriction(t, allowedAnalyzeContributionTableFullName1, disallowedAnalyzeContributionTableFullName)
368 | runAnalyzeContributionWithRestriction(t, allowedAnalyzeContributionTableFullName2, disallowedAnalyzeContributionTableFullName)
369 | }
370 |
371 | func TestBigQueryWriteModeAllowed(t *testing.T) {
372 | sourceConfig := getBigQueryVars(t)
373 | sourceConfig["writeMode"] = "allowed"
374 |
375 | ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
376 | defer cancel()
377 |
378 | datasetName := fmt.Sprintf("temp_toolbox_test_allowed_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
379 |
380 | client, err := initBigQueryConnection(BigqueryProject)
381 | if err != nil {
382 | t.Fatalf("unable to create BigQuery connection: %s", err)
383 | }
384 |
385 | dataset := client.Dataset(datasetName)
386 | if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: datasetName}); err != nil {
387 | t.Fatalf("Failed to create dataset %q: %v", datasetName, err)
388 | }
389 | defer func() {
390 | if err := dataset.DeleteWithContents(ctx); err != nil {
391 | t.Logf("failed to cleanup dataset %s: %v", datasetName, err)
392 | }
393 | }()
394 |
395 | toolsFile := map[string]any{
396 | "sources": map[string]any{
397 | "my-instance": sourceConfig,
398 | },
399 | "tools": map[string]any{
400 | "my-exec-sql-tool": map[string]any{
401 | "kind": "bigquery-execute-sql",
402 | "source": "my-instance",
403 | "description": "Tool to execute sql",
404 | },
405 | },
406 | }
407 |
408 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
409 | if err != nil {
410 | t.Fatalf("command initialization returned an error: %s", err)
411 | }
412 | defer cleanup()
413 |
414 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
415 | defer cancel()
416 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
417 | if err != nil {
418 | t.Logf("toolbox command logs: \n%s", out)
419 | t.Fatalf("toolbox didn't start successfully: %s", err)
420 | }
421 |
422 | runBigQueryWriteModeAllowedTest(t, datasetName)
423 | }
424 |
425 | func TestBigQueryWriteModeBlocked(t *testing.T) {
426 | sourceConfig := getBigQueryVars(t)
427 | sourceConfig["writeMode"] = "blocked"
428 |
429 | ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
430 | defer cancel()
431 |
432 | datasetName := fmt.Sprintf("temp_toolbox_test_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
433 | tableName := fmt.Sprintf("param_table_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
434 | tableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, datasetName, tableName)
435 |
436 | client, err := initBigQueryConnection(BigqueryProject)
437 | if err != nil {
438 | t.Fatalf("unable to create BigQuery connection: %s", err)
439 | }
440 | createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
441 | teardownTable := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
442 | defer teardownTable(t)
443 |
444 | toolsFile := map[string]any{
445 | "sources": map[string]any{"my-instance": sourceConfig},
446 | "tools": map[string]any{
447 | "my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"},
448 | },
449 | }
450 |
451 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
452 | if err != nil {
453 | t.Fatalf("command initialization returned an error: %s", err)
454 | }
455 | defer cleanup()
456 |
457 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
458 | defer cancel()
459 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
460 | if err != nil {
461 | t.Logf("toolbox command logs: \n%s", out)
462 | t.Fatalf("toolbox didn't start successfully: %s", err)
463 | }
464 |
465 | runBigQueryWriteModeBlockedTest(t, tableNameParam, datasetName)
466 | }
467 |
468 | func TestBigQueryWriteModeProtected(t *testing.T) {
469 | sourceConfig := getBigQueryVars(t)
470 | sourceConfig["writeMode"] = "protected"
471 |
472 | ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
473 | defer cancel()
474 |
475 | permanentDatasetName := fmt.Sprintf("perm_dataset_protected_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
476 | client, err := initBigQueryConnection(BigqueryProject)
477 | if err != nil {
478 | t.Fatalf("unable to create BigQuery connection: %s", err)
479 | }
480 | dataset := client.Dataset(permanentDatasetName)
481 | if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: permanentDatasetName}); err != nil {
482 | t.Fatalf("Failed to create dataset %q: %v", permanentDatasetName, err)
483 | }
484 | defer func() {
485 | if err := dataset.DeleteWithContents(ctx); err != nil {
486 | t.Logf("failed to cleanup dataset %s: %v", permanentDatasetName, err)
487 | }
488 | }()
489 |
490 | toolsFile := map[string]any{
491 | "sources": map[string]any{"my-instance": sourceConfig},
492 | "tools": map[string]any{
493 | "my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"},
494 | "my-sql-tool-protected": map[string]any{
495 | "kind": "bigquery-sql",
496 | "source": "my-instance",
497 | "description": "Tool to query from the session",
498 | "statement": "SELECT * FROM my_shared_temp_table",
499 | },
500 | "my-forecast-tool-protected": map[string]any{
501 | "kind": "bigquery-forecast",
502 | "source": "my-instance",
503 | "description": "Tool to forecast from session temp table",
504 | },
505 | "my-analyze-contribution-tool-protected": map[string]any{
506 | "kind": "bigquery-analyze-contribution",
507 | "source": "my-instance",
508 | "description": "Tool to analyze contribution from session temp table",
509 | },
510 | },
511 | }
512 |
513 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
514 | if err != nil {
515 | t.Fatalf("command initialization returned an error: %s", err)
516 | }
517 | defer cleanup()
518 |
519 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
520 | defer cancel()
521 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
522 | if err != nil {
523 | t.Logf("toolbox command logs: \n%s", out)
524 | t.Fatalf("toolbox didn't start successfully: %s", err)
525 | }
526 |
527 | runBigQueryWriteModeProtectedTest(t, permanentDatasetName)
528 | }
529 |
530 | // getBigQueryParamToolInfo returns statements and param for my-tool for bigquery kind
531 | func getBigQueryParamToolInfo(tableName string) (string, string, string, string, string, string, []bigqueryapi.QueryParameter) {
532 | createStatement := fmt.Sprintf(`
533 | CREATE TABLE IF NOT EXISTS %s (id INT64, name STRING);`, tableName)
534 | insertStatement := fmt.Sprintf(`
535 | INSERT INTO %s (id, name) VALUES (?, ?), (?, ?), (?, ?), (?, NULL);`, tableName)
536 | toolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE id = ? OR name = ? ORDER BY id;`, tableName)
537 | idToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE id = ? ORDER BY id;`, tableName)
538 | nameToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE name = ? ORDER BY id;`, tableName)
539 | arrayToolStatememt := fmt.Sprintf(`SELECT * FROM %s WHERE id IN UNNEST(@idArray) AND name IN UNNEST(@nameArray) ORDER BY id;`, tableName)
540 | params := []bigqueryapi.QueryParameter{
541 | {Value: int64(1)}, {Value: "Alice"},
542 | {Value: int64(2)}, {Value: "Jane"},
543 | {Value: int64(3)}, {Value: "Sid"},
544 | {Value: int64(4)},
545 | }
546 | return createStatement, insertStatement, toolStatement, idToolStatement, nameToolStatement, arrayToolStatememt, params
547 | }
548 |
549 | // getBigQueryAuthToolInfo returns statements and param of my-auth-tool for bigquery kind
550 | func getBigQueryAuthToolInfo(tableName string) (string, string, string, []bigqueryapi.QueryParameter) {
551 | createStatement := fmt.Sprintf(`
552 | CREATE TABLE IF NOT EXISTS %s (id INT64, name STRING, email STRING)`, tableName)
553 | insertStatement := fmt.Sprintf(`
554 | INSERT INTO %s (id, name, email) VALUES (?, ?, ?), (?, ?, ?)`, tableName)
555 | toolStatement := fmt.Sprintf(`
556 | SELECT name FROM %s WHERE email = ?`, tableName)
557 | params := []bigqueryapi.QueryParameter{
558 | {Value: int64(1)}, {Value: "Alice"}, {Value: tests.ServiceAccountEmail},
559 | {Value: int64(2)}, {Value: "Jane"}, {Value: "[email protected]"},
560 | }
561 | return createStatement, insertStatement, toolStatement, params
562 | }
563 |
564 | // getBigQueryDataTypeTestInfo returns statements and params for data type tests.
565 | func getBigQueryDataTypeTestInfo(tableName string) (string, string, string, string, []bigqueryapi.QueryParameter) {
566 | createStatement := fmt.Sprintf(`
567 | CREATE TABLE IF NOT EXISTS %s (id INT64, int_val INT64, string_val STRING, float_val FLOAT64, bool_val BOOL);`, tableName)
568 | insertStatement := fmt.Sprintf(`
569 | INSERT INTO %s (id, int_val, string_val, float_val, bool_val) VALUES (?, ?, ?, ?, ?), (?, ?, ?, ?, ?), (?, ?, ?, ?, ?);`, tableName)
570 | toolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE int_val = ? AND string_val = ? AND float_val = ? AND bool_val = ?;`, tableName)
571 | arrayToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE int_val IN UNNEST(@int_array) AND string_val IN UNNEST(@string_array) AND float_val IN UNNEST(@float_array) AND bool_val IN UNNEST(@bool_array) ORDER BY id;`, tableName)
572 | params := []bigqueryapi.QueryParameter{
573 | {Value: int64(1)}, {Value: int64(123)}, {Value: "hello"}, {Value: 3.14}, {Value: true},
574 | {Value: int64(2)}, {Value: int64(-456)}, {Value: "world"}, {Value: -0.55}, {Value: false},
575 | {Value: int64(3)}, {Value: int64(789)}, {Value: "test"}, {Value: 100.1}, {Value: true},
576 | }
577 | return createStatement, insertStatement, toolStatement, arrayToolStatement, params
578 | }
579 |
580 | // getBigQueryForecastToolInfo returns statements and params for the forecast tool.
581 | func getBigQueryForecastToolInfo(tableName string) (string, string, []bigqueryapi.QueryParameter) {
582 | createStatement := fmt.Sprintf(`
583 | CREATE TABLE IF NOT EXISTS %s (ts TIMESTAMP, data FLOAT64, id STRING);`, tableName)
584 | insertStatement := fmt.Sprintf(`
585 | INSERT INTO %s (ts, data, id) VALUES
586 | (?, ?, ?), (?, ?, ?), (?, ?, ?),
587 | (?, ?, ?), (?, ?, ?), (?, ?, ?);`, tableName)
588 | params := []bigqueryapi.QueryParameter{
589 | {Value: "2025-01-01T00:00:00Z"}, {Value: 10.0}, {Value: "a"},
590 | {Value: "2025-01-01T01:00:00Z"}, {Value: 11.0}, {Value: "a"},
591 | {Value: "2025-01-01T02:00:00Z"}, {Value: 12.0}, {Value: "a"},
592 | {Value: "2025-01-01T00:00:00Z"}, {Value: 20.0}, {Value: "b"},
593 | {Value: "2025-01-01T01:00:00Z"}, {Value: 21.0}, {Value: "b"},
594 | {Value: "2025-01-01T02:00:00Z"}, {Value: 22.0}, {Value: "b"},
595 | }
596 | return createStatement, insertStatement, params
597 | }
598 |
599 | // getBigQueryAnalyzeContributionToolInfo returns statements and params for the analyze-contribution tool.
600 | func getBigQueryAnalyzeContributionToolInfo(tableName string) (string, string, []bigqueryapi.QueryParameter) {
601 | createStatement := fmt.Sprintf(`
602 | CREATE TABLE IF NOT EXISTS %s (dim1 STRING, dim2 STRING, is_test BOOL, metric FLOAT64);`, tableName)
603 | insertStatement := fmt.Sprintf(`
604 | INSERT INTO %s (dim1, dim2, is_test, metric) VALUES
605 | (?, ?, ?, ?), (?, ?, ?, ?), (?, ?, ?, ?), (?, ?, ?, ?);`, tableName)
606 | params := []bigqueryapi.QueryParameter{
607 | {Value: "a"}, {Value: "x"}, {Value: true}, {Value: 100.0},
608 | {Value: "a"}, {Value: "x"}, {Value: false}, {Value: 110.0},
609 | {Value: "a"}, {Value: "y"}, {Value: true}, {Value: 120.0},
610 | {Value: "a"}, {Value: "y"}, {Value: false}, {Value: 100.0},
611 | {Value: "b"}, {Value: "x"}, {Value: true}, {Value: 40.0},
612 | {Value: "b"}, {Value: "x"}, {Value: false}, {Value: 100.0},
613 | {Value: "b"}, {Value: "y"}, {Value: true}, {Value: 60.0},
614 | {Value: "b"}, {Value: "y"}, {Value: false}, {Value: 60.0},
615 | }
616 | return createStatement, insertStatement, params
617 | }
618 |
619 | // getBigQueryTmplToolStatement returns statements for template parameter test cases for bigquery kind
620 | func getBigQueryTmplToolStatement() (string, string) {
621 | tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = ? ORDER BY id"
622 | tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = ? ORDER BY id"
623 | return tmplSelectCombined, tmplSelectFilterCombined
624 | }
625 |
626 | func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) func(*testing.T) {
627 | // Create dataset
628 | dataset := client.Dataset(datasetName)
629 | _, err := dataset.Metadata(ctx)
630 |
631 | if err != nil {
632 | apiErr, ok := err.(*googleapi.Error)
633 | if !ok || apiErr.Code != 404 {
634 | t.Fatalf("Failed to check dataset %q existence: %v", datasetName, err)
635 | }
636 | metadataToCreate := &bigqueryapi.DatasetMetadata{Name: datasetName}
637 | if err := dataset.Create(ctx, metadataToCreate); err != nil {
638 | t.Fatalf("Failed to create dataset %q: %v", datasetName, err)
639 | }
640 | }
641 |
642 | // Create table
643 | createJob, err := client.Query(createStatement).Run(ctx)
644 |
645 | if err != nil {
646 | t.Fatalf("Failed to start create table job for %s: %v", tableName, err)
647 | }
648 | createStatus, err := createJob.Wait(ctx)
649 | if err != nil {
650 | t.Fatalf("Failed to wait for create table job for %s: %v", tableName, err)
651 | }
652 | if err := createStatus.Err(); err != nil {
653 | t.Fatalf("Create table job for %s failed: %v", tableName, err)
654 | }
655 |
656 | if len(params) > 0 {
657 | // Insert test data
658 | insertQuery := client.Query(insertStatement)
659 | insertQuery.Parameters = params
660 | insertJob, err := insertQuery.Run(ctx)
661 | if err != nil {
662 | t.Fatalf("Failed to start insert job for %s: %v", tableName, err)
663 | }
664 | insertStatus, err := insertJob.Wait(ctx)
665 | if err != nil {
666 | t.Fatalf("Failed to wait for insert job for %s: %v", tableName, err)
667 | }
668 | if err := insertStatus.Err(); err != nil {
669 | t.Fatalf("Insert job for %s failed: %v", tableName, err)
670 | }
671 | }
672 |
673 | return func(t *testing.T) {
674 | // tear down table
675 | dropSQL := fmt.Sprintf("drop table %s", tableName)
676 | dropJob, err := client.Query(dropSQL).Run(ctx)
677 | if err != nil {
678 | t.Errorf("Failed to start drop table job for %s: %v", tableName, err)
679 | return
680 | }
681 | dropStatus, err := dropJob.Wait(ctx)
682 | if err != nil {
683 | t.Errorf("Failed to wait for drop table job for %s: %v", tableName, err)
684 | return
685 | }
686 | if err := dropStatus.Err(); err != nil {
687 | t.Errorf("Error dropping table %s: %v", tableName, err)
688 | }
689 |
690 | // tear down dataset
691 | datasetToTeardown := client.Dataset(datasetName)
692 | tablesIterator := datasetToTeardown.Tables(ctx)
693 | _, err = tablesIterator.Next()
694 |
695 | if err == iterator.Done {
696 | if err := datasetToTeardown.Delete(ctx); err != nil {
697 | t.Errorf("Failed to delete dataset %s: %v", datasetName, err)
698 | }
699 | } else if err != nil {
700 | t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
701 | }
702 | }
703 | }
704 |
705 | func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[string]any {
706 | tools, ok := config["tools"].(map[string]any)
707 | if !ok {
708 | t.Fatalf("unable to get tools from config")
709 | }
710 | tools["my-exec-sql-tool"] = map[string]any{
711 | "kind": "bigquery-execute-sql",
712 | "source": "my-instance",
713 | "description": "Tool to execute sql",
714 | }
715 | tools["my-auth-exec-sql-tool"] = map[string]any{
716 | "kind": "bigquery-execute-sql",
717 | "source": "my-instance",
718 | "description": "Tool to execute sql",
719 | "authRequired": []string{
720 | "my-google-auth",
721 | },
722 | }
723 | tools["my-client-auth-exec-sql-tool"] = map[string]any{
724 | "kind": "bigquery-execute-sql",
725 | "source": "my-client-auth-source",
726 | "description": "Tool to execute sql",
727 | }
728 | tools["my-forecast-tool"] = map[string]any{
729 | "kind": "bigquery-forecast",
730 | "source": "my-instance",
731 | "description": "Tool to forecast time series data.",
732 | }
733 | tools["my-auth-forecast-tool"] = map[string]any{
734 | "kind": "bigquery-forecast",
735 | "source": "my-instance",
736 | "description": "Tool to forecast time series data with auth.",
737 | "authRequired": []string{
738 | "my-google-auth",
739 | },
740 | }
741 | tools["my-client-auth-forecast-tool"] = map[string]any{
742 | "kind": "bigquery-forecast",
743 | "source": "my-client-auth-source",
744 | "description": "Tool to forecast time series data with auth.",
745 | }
746 | tools["my-analyze-contribution-tool"] = map[string]any{
747 | "kind": "bigquery-analyze-contribution",
748 | "source": "my-instance",
749 | "description": "Tool to analyze contribution.",
750 | }
751 | tools["my-auth-analyze-contribution-tool"] = map[string]any{
752 | "kind": "bigquery-analyze-contribution",
753 | "source": "my-instance",
754 | "description": "Tool to analyze contribution with auth.",
755 | "authRequired": []string{
756 | "my-google-auth",
757 | },
758 | }
759 | tools["my-client-auth-analyze-contribution-tool"] = map[string]any{
760 | "kind": "bigquery-analyze-contribution",
761 | "source": "my-client-auth-source",
762 | "description": "Tool to analyze contribution with auth.",
763 | }
764 | tools["my-list-dataset-ids-tool"] = map[string]any{
765 | "kind": "bigquery-list-dataset-ids",
766 | "source": "my-instance",
767 | "description": "Tool to list dataset",
768 | }
769 | tools["my-auth-list-dataset-ids-tool"] = map[string]any{
770 | "kind": "bigquery-list-dataset-ids",
771 | "source": "my-instance",
772 | "description": "Tool to list dataset",
773 | "authRequired": []string{
774 | "my-google-auth",
775 | },
776 | }
777 | tools["my-client-auth-list-dataset-ids-tool"] = map[string]any{
778 | "kind": "bigquery-list-dataset-ids",
779 | "source": "my-client-auth-source",
780 | "description": "Tool to list dataset",
781 | }
782 | tools["my-get-dataset-info-tool"] = map[string]any{
783 | "kind": "bigquery-get-dataset-info",
784 | "source": "my-instance",
785 | "description": "Tool to show dataset metadata",
786 | }
787 | tools["my-auth-get-dataset-info-tool"] = map[string]any{
788 | "kind": "bigquery-get-dataset-info",
789 | "source": "my-instance",
790 | "description": "Tool to show dataset metadata",
791 | "authRequired": []string{
792 | "my-google-auth",
793 | },
794 | }
795 | tools["my-client-auth-get-dataset-info-tool"] = map[string]any{
796 | "kind": "bigquery-get-dataset-info",
797 | "source": "my-client-auth-source",
798 | "description": "Tool to show dataset metadata",
799 | }
800 | tools["my-list-table-ids-tool"] = map[string]any{
801 | "kind": "bigquery-list-table-ids",
802 | "source": "my-instance",
803 | "description": "Tool to list table within a dataset",
804 | }
805 | tools["my-auth-list-table-ids-tool"] = map[string]any{
806 | "kind": "bigquery-list-table-ids",
807 | "source": "my-instance",
808 | "description": "Tool to list table within a dataset",
809 | "authRequired": []string{
810 | "my-google-auth",
811 | },
812 | }
813 | tools["my-client-auth-list-table-ids-tool"] = map[string]any{
814 | "kind": "bigquery-list-table-ids",
815 | "source": "my-client-auth-source",
816 | "description": "Tool to list table within a dataset",
817 | }
818 | tools["my-get-table-info-tool"] = map[string]any{
819 | "kind": "bigquery-get-table-info",
820 | "source": "my-instance",
821 | "description": "Tool to show dataset metadata",
822 | }
823 | tools["my-auth-get-table-info-tool"] = map[string]any{
824 | "kind": "bigquery-get-table-info",
825 | "source": "my-instance",
826 | "description": "Tool to show dataset metadata",
827 | "authRequired": []string{
828 | "my-google-auth",
829 | },
830 | }
831 | tools["my-client-auth-get-table-info-tool"] = map[string]any{
832 | "kind": "bigquery-get-table-info",
833 | "source": "my-client-auth-source",
834 | "description": "Tool to show dataset metadata",
835 | }
836 | tools["my-conversational-analytics-tool"] = map[string]any{
837 | "kind": "bigquery-conversational-analytics",
838 | "source": "my-instance",
839 | "description": "Tool to ask BigQuery conversational analytics",
840 | }
841 | tools["my-auth-conversational-analytics-tool"] = map[string]any{
842 | "kind": "bigquery-conversational-analytics",
843 | "source": "my-instance",
844 | "description": "Tool to ask BigQuery conversational analytics",
845 | "authRequired": []string{
846 | "my-google-auth",
847 | },
848 | }
849 | tools["my-client-auth-conversational-analytics-tool"] = map[string]any{
850 | "kind": "bigquery-conversational-analytics",
851 | "source": "my-client-auth-source",
852 | "description": "Tool to ask BigQuery conversational analytics",
853 | }
854 | tools["my-search-catalog-tool"] = map[string]any{
855 | "kind": "bigquery-search-catalog",
856 | "source": "my-instance",
857 | "description": "Tool to search the BiqQuery catalog",
858 | }
859 | tools["my-auth-search-catalog-tool"] = map[string]any{
860 | "kind": "bigquery-search-catalog",
861 | "source": "my-instance",
862 | "description": "Tool to search the BiqQuery catalog",
863 | "authRequired": []string{
864 | "my-google-auth",
865 | },
866 | }
867 | tools["my-client-auth-search-catalog-tool"] = map[string]any{
868 | "kind": "bigquery-search-catalog",
869 | "source": "my-client-auth-source",
870 | "description": "Tool to search the BiqQuery catalog",
871 | }
872 | config["tools"] = tools
873 | return config
874 | }
875 |
876 | func addClientAuthSourceConfig(t *testing.T, config map[string]any) map[string]any {
877 | sources, ok := config["sources"].(map[string]any)
878 | if !ok {
879 | t.Fatalf("unable to get sources from config")
880 | }
881 | sources["my-client-auth-source"] = map[string]any{
882 | "kind": BigquerySourceKind,
883 | "project": BigqueryProject,
884 | "useClientOAuth": true,
885 | }
886 | config["sources"] = sources
887 | return config
888 | }
889 |
890 | func addBigQuerySqlToolConfig(t *testing.T, config map[string]any, toolStatement, arrayToolStatement string) map[string]any {
891 | tools, ok := config["tools"].(map[string]any)
892 | if !ok {
893 | t.Fatalf("unable to get tools from config")
894 | }
895 | tools["my-scalar-datatype-tool"] = map[string]any{
896 | "kind": "bigquery-sql",
897 | "source": "my-instance",
898 | "description": "Tool to test various scalar data types.",
899 | "statement": toolStatement,
900 | "parameters": []any{
901 | map[string]any{"name": "int_val", "type": "integer", "description": "an integer value"},
902 | map[string]any{"name": "string_val", "type": "string", "description": "a string value"},
903 | map[string]any{"name": "float_val", "type": "float", "description": "a float value"},
904 | map[string]any{"name": "bool_val", "type": "boolean", "description": "a boolean value"},
905 | },
906 | }
907 | tools["my-array-datatype-tool"] = map[string]any{
908 | "kind": "bigquery-sql",
909 | "source": "my-instance",
910 | "description": "Tool to test various array data types.",
911 | "statement": arrayToolStatement,
912 | "parameters": []any{
913 | map[string]any{"name": "int_array", "type": "array", "description": "an array of integer values", "items": map[string]any{"name": "item", "type": "integer", "description": "desc"}},
914 | map[string]any{"name": "string_array", "type": "array", "description": "an array of string values", "items": map[string]any{"name": "item", "type": "string", "description": "desc"}},
915 | map[string]any{"name": "float_array", "type": "array", "description": "an array of float values", "items": map[string]any{"name": "item", "type": "float", "description": "desc"}},
916 | map[string]any{"name": "bool_array", "type": "array", "description": "an array of boolean values", "items": map[string]any{"name": "item", "type": "boolean", "description": "desc"}},
917 | },
918 | }
919 | tools["my-client-auth-tool"] = map[string]any{
920 | "kind": "bigquery-sql",
921 | "source": "my-client-auth-source",
922 | "description": "Tool to test client authorization.",
923 | "statement": "SELECT 1",
924 | }
925 | config["tools"] = tools
926 | return config
927 | }
928 |
929 | func runBigQueryExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamWant, tableNameParam, ddlWant string) {
930 | // Get ID token
931 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
932 | if err != nil {
933 | t.Fatalf("error getting Google ID token: %s", err)
934 | }
935 |
936 | // Get access token
937 | accessToken, err := sources.GetIAMAccessToken(t.Context())
938 | if err != nil {
939 | t.Fatalf("error getting access token from ADC: %s", err)
940 | }
941 | accessToken = "Bearer " + accessToken
942 |
943 | // Test tool invoke endpoint
944 | invokeTcs := []struct {
945 | name string
946 | api string
947 | requestHeader map[string]string
948 | requestBody io.Reader
949 | want string
950 | isErr bool
951 | }{
952 | {
953 | name: "invoke my-exec-sql-tool without body",
954 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
955 | requestHeader: map[string]string{},
956 | requestBody: bytes.NewBuffer([]byte(`{}`)),
957 | isErr: true,
958 | },
959 | {
960 | name: "invoke my-exec-sql-tool",
961 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
962 | requestHeader: map[string]string{},
963 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
964 | want: select1Want,
965 | isErr: false,
966 | },
967 | {
968 | name: "invoke my-exec-sql-tool create table",
969 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
970 | requestHeader: map[string]string{},
971 | requestBody: bytes.NewBuffer([]byte(`{"sql":"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"}`)),
972 | want: ddlWant,
973 | isErr: true,
974 | },
975 | {
976 | name: "invoke my-exec-sql-tool with data present in table",
977 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
978 | requestHeader: map[string]string{},
979 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"SELECT id, name FROM %s WHERE id = 3 OR name = 'Alice' ORDER BY id\"}", tableNameParam))),
980 | want: invokeParamWant,
981 | isErr: false,
982 | },
983 | {
984 | name: "invoke my-exec-sql-tool with no matching rows",
985 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
986 | requestHeader: map[string]string{},
987 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"SELECT * FROM %s WHERE id = 999\"}", tableNameParam))),
988 | want: `"The query returned 0 rows."`,
989 | isErr: false,
990 | },
991 | {
992 | name: "invoke my-exec-sql-tool drop table",
993 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
994 | requestHeader: map[string]string{},
995 | requestBody: bytes.NewBuffer([]byte(`{"sql":"DROP TABLE t"}`)),
996 | want: ddlWant,
997 | isErr: true,
998 | },
999 | {
1000 | name: "invoke my-exec-sql-tool insert entry",
1001 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
1002 | requestHeader: map[string]string{},
1003 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"INSERT INTO %s (id, name) VALUES (4, 'test_name')\"}", tableNameParam))),
1004 | want: ddlWant,
1005 | isErr: false,
1006 | },
1007 | {
1008 | name: "invoke my-exec-sql-tool without body",
1009 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
1010 | requestHeader: map[string]string{},
1011 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1012 | isErr: true,
1013 | },
1014 | {
1015 | name: "Invoke my-auth-exec-sql-tool with auth token",
1016 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1017 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1018 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1019 | isErr: false,
1020 | want: select1Want,
1021 | },
1022 | {
1023 | name: "Invoke my-auth-exec-sql-tool with invalid auth token",
1024 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1025 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
1026 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1027 | isErr: true,
1028 | },
1029 | {
1030 | name: "Invoke my-auth-exec-sql-tool without auth token",
1031 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1032 | requestHeader: map[string]string{},
1033 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1034 | isErr: true,
1035 | },
1036 | {
1037 | name: "Invoke my-client-auth-exec-sql-tool with auth token",
1038 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
1039 | requestHeader: map[string]string{"Authorization": accessToken},
1040 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1041 | want: "[{\"f0_\":1}]",
1042 | isErr: false,
1043 | },
1044 | {
1045 | name: "Invoke my-client-auth-exec-sql-tool without auth token",
1046 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
1047 | requestHeader: map[string]string{},
1048 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1049 | isErr: true,
1050 | },
1051 | {
1052 |
1053 | name: "Invoke my-client-auth-exec-sql-tool with invalid auth token",
1054 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke",
1055 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
1056 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)),
1057 | isErr: true,
1058 | },
1059 | }
1060 | for _, tc := range invokeTcs {
1061 | t.Run(tc.name, func(t *testing.T) {
1062 | // Send Tool invocation request
1063 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1064 | if err != nil {
1065 | t.Fatalf("unable to create request: %s", err)
1066 | }
1067 | req.Header.Add("Content-type", "application/json")
1068 | for k, v := range tc.requestHeader {
1069 | req.Header.Add(k, v)
1070 | }
1071 | resp, err := http.DefaultClient.Do(req)
1072 | if err != nil {
1073 | t.Fatalf("unable to send request: %s", err)
1074 | }
1075 | defer resp.Body.Close()
1076 |
1077 | if resp.StatusCode != http.StatusOK {
1078 | if tc.isErr {
1079 | return
1080 | }
1081 | bodyBytes, _ := io.ReadAll(resp.Body)
1082 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1083 | }
1084 |
1085 | // Check response body
1086 | var body map[string]interface{}
1087 | err = json.NewDecoder(resp.Body).Decode(&body)
1088 | if err != nil {
1089 | t.Fatalf("error parsing response body")
1090 | }
1091 |
1092 | got, ok := body["result"].(string)
1093 | if !ok {
1094 | t.Fatalf("unable to find result in response body")
1095 | }
1096 |
1097 | if got != tc.want {
1098 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
1099 | }
1100 | })
1101 | }
1102 | }
1103 |
1104 | // runInvokeRequest sends a POST request to the given API endpoint and returns the response and parsed JSON body.
1105 | func runInvokeRequest(t *testing.T, api, body string, headers map[string]string) (*http.Response, map[string]interface{}) {
1106 | t.Helper()
1107 | req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(body))
1108 | if err != nil {
1109 | t.Fatalf("failed to create request: %v", err)
1110 | }
1111 | req.Header.Set("Content-Type", "application/json")
1112 | for k, v := range headers {
1113 | req.Header.Add(k, v)
1114 | }
1115 |
1116 | resp, err := http.DefaultClient.Do(req)
1117 | if err != nil {
1118 | t.Fatalf("failed to send request: %v", err)
1119 | }
1120 |
1121 | var result map[string]interface{}
1122 | // Use a TeeReader to be able to read the body multiple times (for logging on failure)
1123 | bodyBytes, err := io.ReadAll(resp.Body)
1124 | if err != nil {
1125 | t.Fatalf("failed to read response body: %v", err)
1126 | }
1127 | resp.Body.Close() // Close original body
1128 | resp.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) // Replace with a new reader
1129 |
1130 | if err := json.Unmarshal(bodyBytes, &result); err != nil {
1131 | t.Logf("Failed to decode response body: %s", string(bodyBytes))
1132 | t.Fatalf("failed to decode response: %v", err)
1133 | }
1134 | return resp, result
1135 | }
1136 |
1137 | func runBigQueryWriteModeAllowedTest(t *testing.T, datasetName string) {
1138 | t.Run("CREATE TABLE should succeed", func(t *testing.T) {
1139 | sql := fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName)
1140 | body := fmt.Sprintf(`{"sql": "%s"}`, sql)
1141 | resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil)
1142 | defer resp.Body.Close()
1143 |
1144 | if resp.StatusCode != http.StatusOK {
1145 | bodyBytes, _ := io.ReadAll(resp.Body)
1146 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, http.StatusOK, string(bodyBytes))
1147 | }
1148 |
1149 | resStr, ok := result["result"].(string)
1150 | if !ok {
1151 | t.Fatalf("expected 'result' field in response, got %v", result)
1152 | }
1153 | if resStr != `"Query executed successfully and returned no content."` {
1154 | t.Errorf("unexpected result: got %q, want %q", resStr, `"Query executed successfully and returned no content."`)
1155 | }
1156 | })
1157 | }
1158 |
1159 | func runBigQueryWriteModeBlockedTest(t *testing.T, tableNameParam, datasetName string) {
1160 | testCases := []struct {
1161 | name string
1162 | sql string
1163 | wantStatusCode int
1164 | wantInError string
1165 | wantResult string
1166 | }{
1167 | {"SELECT statement should succeed", fmt.Sprintf("SELECT id, name FROM %s WHERE id = 1", tableNameParam), http.StatusOK, "", `[{"id":1,"name":"Alice"}]`},
1168 | {"INSERT statement should fail", fmt.Sprintf("INSERT INTO %s (id, name) VALUES (10, 'test')", tableNameParam), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""},
1169 | {"CREATE TABLE statement should fail", fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""},
1170 | }
1171 |
1172 | for _, tc := range testCases {
1173 | t.Run(tc.name, func(t *testing.T) {
1174 | body := fmt.Sprintf(`{"sql": "%s"}`, tc.sql)
1175 | resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil)
1176 | defer resp.Body.Close()
1177 |
1178 | if resp.StatusCode != tc.wantStatusCode {
1179 | bodyBytes, _ := io.ReadAll(resp.Body)
1180 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
1181 | }
1182 |
1183 | if tc.wantInError != "" {
1184 | errStr, ok := result["error"].(string)
1185 | if !ok {
1186 | t.Fatalf("expected 'error' field in response, got %v", result)
1187 | }
1188 | if !strings.Contains(errStr, tc.wantInError) {
1189 | t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr)
1190 | }
1191 | }
1192 | if tc.wantResult != "" {
1193 | resStr, ok := result["result"].(string)
1194 | if !ok {
1195 | t.Fatalf("expected 'result' field in response, got %v", result)
1196 | }
1197 | if resStr != tc.wantResult {
1198 | t.Fatalf("unexpected result: got %q, want %q", resStr, tc.wantResult)
1199 | }
1200 | }
1201 | })
1202 | }
1203 | }
1204 |
1205 | func runBigQueryWriteModeProtectedTest(t *testing.T, permanentDatasetName string) {
1206 | testCases := []struct {
1207 | name string
1208 | toolName string
1209 | requestBody string
1210 | wantStatusCode int
1211 | wantInError string
1212 | wantResult string
1213 | }{
1214 | {
1215 | name: "CREATE TABLE to permanent dataset should fail",
1216 | toolName: "my-exec-sql-tool",
1217 | requestBody: fmt.Sprintf(`{"sql": "CREATE TABLE %s.new_table (x INT64)"}`, permanentDatasetName),
1218 | wantStatusCode: http.StatusBadRequest,
1219 | wantInError: "protected write mode only supports SELECT statements, or write operations in the anonymous dataset",
1220 | wantResult: "",
1221 | },
1222 | {
1223 | name: "CREATE TEMP TABLE should succeed",
1224 | toolName: "my-exec-sql-tool",
1225 | requestBody: `{"sql": "CREATE TEMP TABLE my_shared_temp_table (x INT64)"}`,
1226 | wantStatusCode: http.StatusOK,
1227 | wantInError: "",
1228 | wantResult: `"Query executed successfully and returned no content."`,
1229 | },
1230 | {
1231 | name: "INSERT into TEMP TABLE should succeed",
1232 | toolName: "my-exec-sql-tool",
1233 | requestBody: `{"sql": "INSERT INTO my_shared_temp_table (x) VALUES (42)"}`,
1234 | wantStatusCode: http.StatusOK,
1235 | wantInError: "",
1236 | wantResult: `"Query executed successfully and returned no content."`,
1237 | },
1238 | {
1239 | name: "SELECT from TEMP TABLE with exec-sql should succeed",
1240 | toolName: "my-exec-sql-tool",
1241 | requestBody: `{"sql": "SELECT * FROM my_shared_temp_table"}`,
1242 | wantStatusCode: http.StatusOK,
1243 | wantInError: "",
1244 | wantResult: `[{"x":42}]`,
1245 | },
1246 | {
1247 | name: "SELECT from TEMP TABLE with sql-tool should succeed",
1248 | toolName: "my-sql-tool-protected",
1249 | requestBody: `{}`,
1250 | wantStatusCode: http.StatusOK,
1251 | wantInError: "",
1252 | wantResult: `[{"x":42}]`,
1253 | },
1254 | {
1255 | name: "CREATE TEMP TABLE for forecast should succeed",
1256 | toolName: "my-exec-sql-tool",
1257 | requestBody: `{"sql": "CREATE TEMP TABLE forecast_temp_table (ts TIMESTAMP, data FLOAT64) AS SELECT TIMESTAMP('2025-01-01T00:00:00Z') AS ts, 10.0 AS data UNION ALL SELECT TIMESTAMP('2025-01-01T01:00:00Z'), 11.0 UNION ALL SELECT TIMESTAMP('2025-01-01T02:00:00Z'), 12.0 UNION ALL SELECT TIMESTAMP('2025-01-01T03:00:00Z'), 13.0"}`,
1258 | wantStatusCode: http.StatusOK,
1259 | wantInError: "",
1260 | wantResult: `"Query executed successfully and returned no content."`,
1261 | },
1262 | {
1263 | name: "Forecast from TEMP TABLE should succeed",
1264 | toolName: "my-forecast-tool-protected",
1265 | requestBody: `{"history_data": "SELECT * FROM forecast_temp_table", "timestamp_col": "ts", "data_col": "data", "horizon": 1}`,
1266 | wantStatusCode: http.StatusOK,
1267 | wantInError: "",
1268 | wantResult: `"forecast_timestamp"`,
1269 | },
1270 | {
1271 | name: "CREATE TEMP TABLE for contribution analysis should succeed",
1272 | toolName: "my-exec-sql-tool",
1273 | requestBody: `{"sql": "CREATE TEMP TABLE contribution_temp_table (dim1 STRING, is_test BOOL, metric FLOAT64) AS SELECT 'a' as dim1, true as is_test, 100.0 as metric UNION ALL SELECT 'b', false, 120.0"}`,
1274 | wantStatusCode: http.StatusOK,
1275 | wantInError: "",
1276 | wantResult: `"Query executed successfully and returned no content."`,
1277 | },
1278 | {
1279 | name: "Analyze contribution from TEMP TABLE should succeed",
1280 | toolName: "my-analyze-contribution-tool-protected",
1281 | requestBody: `{"input_data": "SELECT * FROM contribution_temp_table", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1"]}`,
1282 | wantStatusCode: http.StatusOK,
1283 | wantInError: "",
1284 | wantResult: `"relative_difference"`,
1285 | },
1286 | }
1287 | for _, tc := range testCases {
1288 | t.Run(tc.name, func(t *testing.T) {
1289 | api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
1290 | resp, result := runInvokeRequest(t, api, tc.requestBody, nil)
1291 | defer resp.Body.Close()
1292 |
1293 | if resp.StatusCode != tc.wantStatusCode {
1294 | bodyBytes, _ := io.ReadAll(resp.Body)
1295 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
1296 | }
1297 |
1298 | if tc.wantInError != "" {
1299 | errStr, ok := result["error"].(string)
1300 | if !ok {
1301 | t.Fatalf("expected 'error' field in response, got %v", result)
1302 | }
1303 | if !strings.Contains(errStr, tc.wantInError) {
1304 | t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr)
1305 | }
1306 | }
1307 |
1308 | if tc.wantResult != "" {
1309 | resStr, ok := result["result"].(string)
1310 | if !ok {
1311 | t.Fatalf("expected 'result' field in response, got %v", result)
1312 | }
1313 | if !strings.Contains(resStr, tc.wantResult) {
1314 | t.Fatalf("expected %q to contain %q, but it did not", resStr, tc.wantResult)
1315 | }
1316 | }
1317 | })
1318 | }
1319 | }
1320 |
1321 | func runBigQueryExecuteSqlToolInvokeDryRunTest(t *testing.T, datasetName string) {
1322 | // Get ID token
1323 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
1324 | if err != nil {
1325 | t.Fatalf("error getting Google ID token: %s", err)
1326 | }
1327 |
1328 | newTableName := fmt.Sprintf("%s.new_dry_run_table_%s", datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""))
1329 |
1330 | // Test tool invoke endpoint
1331 | invokeTcs := []struct {
1332 | name string
1333 | api string
1334 | requestHeader map[string]string
1335 | requestBody io.Reader
1336 | want string
1337 | isErr bool
1338 | }{
1339 | {
1340 | name: "invoke my-exec-sql-tool with dryRun",
1341 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
1342 | requestHeader: map[string]string{},
1343 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)),
1344 | want: `\"statementType\": \"SELECT\"`,
1345 | isErr: false,
1346 | },
1347 | {
1348 | name: "invoke my-exec-sql-tool with dryRun create table",
1349 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
1350 | requestHeader: map[string]string{},
1351 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"CREATE TABLE %s (id INT64, name STRING)", "dry_run": true}`, newTableName))),
1352 | want: `\"statementType\": \"CREATE_TABLE\"`,
1353 | isErr: false,
1354 | },
1355 | {
1356 | name: "invoke my-exec-sql-tool with dryRun execute immediate",
1357 | api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke",
1358 | requestHeader: map[string]string{},
1359 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"EXECUTE IMMEDIATE \"CREATE TABLE %s (id INT64, name STRING)\"", "dry_run": true}`, newTableName))),
1360 | want: `\"statementType\": \"SCRIPT\"`,
1361 | isErr: false,
1362 | },
1363 | {
1364 | name: "Invoke my-auth-exec-sql-tool with dryRun and auth token",
1365 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1366 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1367 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)),
1368 | isErr: false,
1369 | want: `\"statementType\": \"SELECT\"`,
1370 | },
1371 | {
1372 | name: "Invoke my-auth-exec-sql-tool with dryRun and invalid auth token",
1373 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1374 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
1375 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1","dry_run": true}`)),
1376 | isErr: true,
1377 | },
1378 | {
1379 | name: "Invoke my-auth-exec-sql-tool with dryRun and without auth token",
1380 | api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke",
1381 | requestHeader: map[string]string{},
1382 | requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)),
1383 | isErr: true,
1384 | },
1385 | }
1386 | for _, tc := range invokeTcs {
1387 | t.Run(tc.name, func(t *testing.T) {
1388 | // Send Tool invocation request
1389 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1390 | if err != nil {
1391 | t.Fatalf("unable to create request: %s", err)
1392 | }
1393 | req.Header.Add("Content-type", "application/json")
1394 | for k, v := range tc.requestHeader {
1395 | req.Header.Add(k, v)
1396 | }
1397 | resp, err := http.DefaultClient.Do(req)
1398 | if err != nil {
1399 | t.Fatalf("unable to send request: %s", err)
1400 | }
1401 | defer resp.Body.Close()
1402 |
1403 | if resp.StatusCode != http.StatusOK {
1404 | if tc.isErr {
1405 | return
1406 | }
1407 | bodyBytes, _ := io.ReadAll(resp.Body)
1408 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1409 | }
1410 |
1411 | // Check response body
1412 | var body map[string]interface{}
1413 | err = json.NewDecoder(resp.Body).Decode(&body)
1414 | if err != nil {
1415 | t.Fatalf("error parsing response body")
1416 | }
1417 |
1418 | got, ok := body["result"].(string)
1419 | if !ok {
1420 | t.Fatalf("unable to find result in response body")
1421 | }
1422 |
1423 | if !strings.Contains(got, tc.want) {
1424 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
1425 | }
1426 | })
1427 | }
1428 | }
1429 |
1430 | func runBigQueryForecastToolInvokeTest(t *testing.T, tableName string) {
1431 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
1432 | if err != nil {
1433 | t.Fatalf("error getting Google ID token: %s", err)
1434 | }
1435 |
1436 | // Get access token
1437 | accessToken, err := sources.GetIAMAccessToken(t.Context())
1438 | if err != nil {
1439 | t.Fatalf("error getting access token from ADC: %s", err)
1440 | }
1441 | accessToken = "Bearer " + accessToken
1442 |
1443 | historyDataTable := strings.ReplaceAll(tableName, "`", "")
1444 | historyDataQuery := fmt.Sprintf("SELECT ts, data, id FROM %s", tableName)
1445 |
1446 | invokeTcs := []struct {
1447 | name string
1448 | api string
1449 | requestHeader map[string]string
1450 | requestBody io.Reader
1451 | want string
1452 | isErr bool
1453 | }{
1454 | {
1455 | name: "invoke my-forecast-tool without required params",
1456 | api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke",
1457 | requestHeader: map[string]string{},
1458 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s"}`, historyDataTable))),
1459 | isErr: true,
1460 | },
1461 | {
1462 | name: "invoke my-forecast-tool with table",
1463 | api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke",
1464 | requestHeader: map[string]string{},
1465 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1466 | want: `"forecast_timestamp"`,
1467 | isErr: false,
1468 | },
1469 | {
1470 | name: "invoke my-forecast-tool with query and horizon",
1471 | api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke",
1472 | requestHeader: map[string]string{},
1473 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data", "horizon": 5}`, historyDataQuery))),
1474 | want: `"forecast_timestamp"`,
1475 | isErr: false,
1476 | },
1477 | {
1478 | name: "invoke my-forecast-tool with id_cols",
1479 | api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke",
1480 | requestHeader: map[string]string{},
1481 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data", "id_cols": ["id"]}`, historyDataTable))),
1482 | want: `"id"`,
1483 | isErr: false,
1484 | },
1485 | {
1486 | name: "invoke my-auth-forecast-tool with auth token",
1487 | api: "http://127.0.0.1:5000/api/tool/my-auth-forecast-tool/invoke",
1488 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1489 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1490 | want: `"forecast_timestamp"`,
1491 | isErr: false,
1492 | },
1493 | {
1494 | name: "invoke my-auth-forecast-tool with invalid auth token",
1495 | api: "http://127.0.0.1:5000/api/tool/my-auth-forecast-tool/invoke",
1496 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
1497 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1498 | isErr: true,
1499 | },
1500 | {
1501 | name: "Invoke my-client-auth-forecast-tool with auth token",
1502 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
1503 | requestHeader: map[string]string{"Authorization": accessToken},
1504 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1505 | want: `"forecast_timestamp"`,
1506 | isErr: false,
1507 | },
1508 | {
1509 | name: "Invoke my-client-auth-forecast-tool without auth token",
1510 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
1511 | requestHeader: map[string]string{},
1512 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1513 | isErr: true,
1514 | },
1515 | {
1516 |
1517 | name: "Invoke my-client-auth-forecast-tool with invalid auth token",
1518 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke",
1519 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
1520 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))),
1521 | isErr: true,
1522 | },
1523 | }
1524 | for _, tc := range invokeTcs {
1525 | t.Run(tc.name, func(t *testing.T) {
1526 | // Send Tool invocation request
1527 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1528 | if err != nil {
1529 | t.Fatalf("unable to create request: %s", err)
1530 | }
1531 | req.Header.Add("Content-type", "application/json")
1532 | for k, v := range tc.requestHeader {
1533 | req.Header.Add(k, v)
1534 | }
1535 | resp, err := http.DefaultClient.Do(req)
1536 | if err != nil {
1537 | t.Fatalf("unable to send request: %s", err)
1538 | }
1539 | defer resp.Body.Close()
1540 |
1541 | if resp.StatusCode != http.StatusOK {
1542 | if tc.isErr {
1543 | return
1544 | }
1545 | bodyBytes, _ := io.ReadAll(resp.Body)
1546 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1547 | }
1548 |
1549 | // Check response body
1550 | var body map[string]interface{}
1551 | err = json.NewDecoder(resp.Body).Decode(&body)
1552 | if err != nil {
1553 | t.Fatalf("error parsing response body")
1554 | }
1555 |
1556 | got, ok := body["result"].(string)
1557 | if !ok {
1558 | t.Fatalf("unable to find result in response body")
1559 | }
1560 |
1561 | if !strings.Contains(got, tc.want) {
1562 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
1563 | }
1564 | })
1565 | }
1566 | }
1567 |
1568 | func runBigQueryAnalyzeContributionToolInvokeTest(t *testing.T, tableName string) {
1569 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
1570 | if err != nil {
1571 | t.Fatalf("error getting Google ID token: %s", err)
1572 | }
1573 |
1574 | // Get access token
1575 | accessToken, err := sources.GetIAMAccessToken(t.Context())
1576 | if err != nil {
1577 | t.Fatalf("error getting access token from ADC: %s", err)
1578 | }
1579 | accessToken = "Bearer " + accessToken
1580 |
1581 | dataTable := strings.ReplaceAll(tableName, "`", "")
1582 |
1583 | invokeTcs := []struct {
1584 | name string
1585 | api string
1586 | requestHeader map[string]string
1587 | requestBody io.Reader
1588 | want string
1589 | isErr bool
1590 | }{
1591 | {
1592 | name: "invoke my-analyze-contribution-tool without required params",
1593 | api: "http://127.0.0.1:5000/api/tool/my-analyze-contribution-tool/invoke",
1594 | requestHeader: map[string]string{},
1595 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s"}`, dataTable))),
1596 | isErr: true,
1597 | },
1598 | {
1599 | name: "invoke my-analyze-contribution-tool with table",
1600 | api: "http://127.0.0.1:5000/api/tool/my-analyze-contribution-tool/invoke",
1601 | requestHeader: map[string]string{},
1602 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1603 | want: `"relative_difference"`,
1604 | isErr: false,
1605 | },
1606 | {
1607 | name: "invoke my-auth-analyze-contribution-tool with auth token",
1608 | api: "http://127.0.0.1:5000/api/tool/my-auth-analyze-contribution-tool/invoke",
1609 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1610 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1611 | want: `"relative_difference"`,
1612 | isErr: false,
1613 | },
1614 | {
1615 | name: "invoke my-auth-analyze-contribution-tool with invalid auth token",
1616 | api: "http://127.0.0.1:5000/api/tool/my-auth-analyze-contribution-tool/invoke",
1617 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
1618 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1619 | isErr: true,
1620 | },
1621 | {
1622 | name: "Invoke my-client-auth-analyze-contribution-tool with auth token",
1623 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke",
1624 | requestHeader: map[string]string{"Authorization": accessToken},
1625 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1626 | want: `"relative_difference"`,
1627 | isErr: false,
1628 | },
1629 | {
1630 | name: "Invoke my-client-auth-analyze-contribution-tool without auth token",
1631 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke",
1632 | requestHeader: map[string]string{},
1633 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1634 | isErr: true,
1635 | },
1636 | {
1637 |
1638 | name: "Invoke my-client-auth-analyze-contribution-tool with invalid auth token",
1639 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke",
1640 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
1641 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))),
1642 | isErr: true,
1643 | },
1644 | }
1645 | for _, tc := range invokeTcs {
1646 | t.Run(tc.name, func(t *testing.T) {
1647 | // Send Tool invocation request
1648 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1649 | if err != nil {
1650 | t.Fatalf("unable to create request: %s", err)
1651 | }
1652 | req.Header.Add("Content-type", "application/json")
1653 | for k, v := range tc.requestHeader {
1654 | req.Header.Add(k, v)
1655 | }
1656 | resp, err := http.DefaultClient.Do(req)
1657 | if err != nil {
1658 | t.Fatalf("unable to send request: %s", err)
1659 | }
1660 | defer resp.Body.Close()
1661 |
1662 | if resp.StatusCode != http.StatusOK {
1663 | if tc.isErr {
1664 | return
1665 | }
1666 | bodyBytes, _ := io.ReadAll(resp.Body)
1667 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1668 | }
1669 |
1670 | // Check response body
1671 | var body map[string]interface{}
1672 | err = json.NewDecoder(resp.Body).Decode(&body)
1673 | if err != nil {
1674 | t.Fatalf("error parsing response body")
1675 | }
1676 |
1677 | got, ok := body["result"].(string)
1678 | if !ok {
1679 | t.Fatalf("unable to find result in response body")
1680 | }
1681 |
1682 | if !strings.Contains(got, tc.want) {
1683 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
1684 | }
1685 | })
1686 | }
1687 | }
1688 |
1689 | func runBigQueryDataTypeTests(t *testing.T) {
1690 | // Test tool invoke endpoint
1691 | invokeTcs := []struct {
1692 | name string
1693 | api string
1694 | requestHeader map[string]string
1695 | requestBody io.Reader
1696 | want string
1697 | isErr bool
1698 | }{
1699 | {
1700 | name: "invoke my-scalar-datatype-tool with values",
1701 | api: "http://127.0.0.1:5000/api/tool/my-scalar-datatype-tool/invoke",
1702 | requestHeader: map[string]string{},
1703 | requestBody: bytes.NewBuffer([]byte(`{"int_val": 123, "string_val": "hello", "float_val": 3.14, "bool_val": true}`)),
1704 | want: `[{"bool_val":true,"float_val":3.14,"id":1,"int_val":123,"string_val":"hello"}]`,
1705 | isErr: false,
1706 | },
1707 | {
1708 | name: "invoke my-scalar-datatype-tool with missing params",
1709 | api: "http://127.0.0.1:5000/api/tool/my-scalar-datatype-tool/invoke",
1710 | requestHeader: map[string]string{},
1711 | requestBody: bytes.NewBuffer([]byte(`{"int_val": 123}`)),
1712 | isErr: true,
1713 | },
1714 | {
1715 | name: "invoke my-array-datatype-tool",
1716 | api: "http://127.0.0.1:5000/api/tool/my-array-datatype-tool/invoke",
1717 | requestHeader: map[string]string{},
1718 | requestBody: bytes.NewBuffer([]byte(`{"int_array": [123, 789], "string_array": ["hello", "test"], "float_array": [3.14, 100.1], "bool_array": [true]}`)),
1719 | want: `[{"bool_val":true,"float_val":3.14,"id":1,"int_val":123,"string_val":"hello"},{"bool_val":true,"float_val":100.1,"id":3,"int_val":789,"string_val":"test"}]`,
1720 | isErr: false,
1721 | },
1722 | }
1723 | for _, tc := range invokeTcs {
1724 | t.Run(tc.name, func(t *testing.T) {
1725 | // Send Tool invocation request
1726 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1727 | if err != nil {
1728 | t.Fatalf("unable to create request: %s", err)
1729 | }
1730 | req.Header.Add("Content-type", "application/json")
1731 | for k, v := range tc.requestHeader {
1732 | req.Header.Add(k, v)
1733 | }
1734 | resp, err := http.DefaultClient.Do(req)
1735 | if err != nil {
1736 | t.Fatalf("unable to send request: %s", err)
1737 | }
1738 | defer resp.Body.Close()
1739 |
1740 | if resp.StatusCode != http.StatusOK {
1741 | if tc.isErr {
1742 | return
1743 | }
1744 | bodyBytes, _ := io.ReadAll(resp.Body)
1745 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1746 | }
1747 |
1748 | // Check response body
1749 | var body map[string]interface{}
1750 | err = json.NewDecoder(resp.Body).Decode(&body)
1751 | if err != nil {
1752 | t.Fatalf("error parsing response body")
1753 | }
1754 |
1755 | got, ok := body["result"].(string)
1756 | if !ok {
1757 | t.Fatalf("unable to find result in response body")
1758 | }
1759 |
1760 | if got != tc.want {
1761 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
1762 | }
1763 | })
1764 | }
1765 | }
1766 |
1767 | func runBigQueryListDatasetToolInvokeTest(t *testing.T, datasetWant string) {
1768 | // Get ID token
1769 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
1770 | if err != nil {
1771 | t.Fatalf("error getting Google ID token: %s", err)
1772 | }
1773 |
1774 | // Get access token
1775 | accessToken, err := sources.GetIAMAccessToken(t.Context())
1776 | if err != nil {
1777 | t.Fatalf("error getting access token from ADC: %s", err)
1778 | }
1779 | accessToken = "Bearer " + accessToken
1780 |
1781 | // Test tool invoke endpoint
1782 | invokeTcs := []struct {
1783 | name string
1784 | api string
1785 | requestHeader map[string]string
1786 | requestBody io.Reader
1787 | want string
1788 | isErr bool
1789 | }{
1790 | {
1791 | name: "invoke my-list-dataset-ids-tool",
1792 | api: "http://127.0.0.1:5000/api/tool/my-list-dataset-ids-tool/invoke",
1793 | requestHeader: map[string]string{},
1794 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1795 | isErr: false,
1796 | want: datasetWant,
1797 | },
1798 | {
1799 | name: "invoke my-list-dataset-ids-tool with project",
1800 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
1801 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1802 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\"}", BigqueryProject))),
1803 | isErr: false,
1804 | want: datasetWant,
1805 | },
1806 | {
1807 | name: "invoke my-list-dataset-ids-tool with non-existent project",
1808 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
1809 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1810 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\"}", BigqueryProject, uuid.NewString()))),
1811 | isErr: true,
1812 | },
1813 | {
1814 | name: "invoke my-auth-list-dataset-ids-tool",
1815 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
1816 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1817 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1818 | isErr: false,
1819 | want: datasetWant,
1820 | },
1821 | {
1822 | name: "Invoke my-client-auth-list-dataset-ids-tool with auth token",
1823 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
1824 | requestHeader: map[string]string{"Authorization": accessToken},
1825 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1826 | isErr: false,
1827 | want: datasetWant,
1828 | },
1829 | {
1830 | name: "Invoke my-client-auth-list-dataset-ids-tool without auth token",
1831 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
1832 | requestHeader: map[string]string{},
1833 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1834 | isErr: true,
1835 | },
1836 | {
1837 |
1838 | name: "Invoke my-client-auth-list-dataset-ids-tool with invalid auth token",
1839 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke",
1840 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
1841 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1842 | isErr: true,
1843 | },
1844 | }
1845 | for _, tc := range invokeTcs {
1846 | t.Run(tc.name, func(t *testing.T) {
1847 | // Send Tool invocation request
1848 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1849 | if err != nil {
1850 | t.Fatalf("unable to create request: %s", err)
1851 | }
1852 | req.Header.Add("Content-type", "application/json")
1853 | for k, v := range tc.requestHeader {
1854 | req.Header.Add(k, v)
1855 | }
1856 | resp, err := http.DefaultClient.Do(req)
1857 | if err != nil {
1858 | t.Fatalf("unable to send request: %s", err)
1859 | }
1860 | defer resp.Body.Close()
1861 |
1862 | if resp.StatusCode != http.StatusOK {
1863 | if tc.isErr {
1864 | return
1865 | }
1866 | bodyBytes, _ := io.ReadAll(resp.Body)
1867 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
1868 | }
1869 |
1870 | // Check response body
1871 | var body map[string]interface{}
1872 | err = json.NewDecoder(resp.Body).Decode(&body)
1873 | if err != nil {
1874 | t.Fatalf("error parsing response body")
1875 | }
1876 |
1877 | got, ok := body["result"].(string)
1878 | if !ok {
1879 | t.Fatalf("unable to find result in response body")
1880 | }
1881 |
1882 | if !strings.Contains(got, tc.want) {
1883 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
1884 | }
1885 | })
1886 | }
1887 | }
1888 |
1889 | func runBigQueryGetDatasetInfoToolInvokeTest(t *testing.T, datasetName, datasetInfoWant string) {
1890 | // Get ID token
1891 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
1892 | if err != nil {
1893 | t.Fatalf("error getting Google ID token: %s", err)
1894 | }
1895 |
1896 | // Get access token
1897 | accessToken, err := sources.GetIAMAccessToken(t.Context())
1898 | if err != nil {
1899 | t.Fatalf("error getting access token from ADC: %s", err)
1900 | }
1901 | accessToken = "Bearer " + accessToken
1902 |
1903 | // Test tool invoke endpoint
1904 | invokeTcs := []struct {
1905 | name string
1906 | api string
1907 | requestHeader map[string]string
1908 | requestBody io.Reader
1909 | want string
1910 | isErr bool
1911 | }{
1912 | {
1913 | name: "invoke my-get-dataset-info-tool without body",
1914 | api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke",
1915 | requestHeader: map[string]string{},
1916 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1917 | isErr: true,
1918 | },
1919 | {
1920 | name: "invoke my-get-dataset-info-tool",
1921 | api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke",
1922 | requestHeader: map[string]string{},
1923 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1924 | want: datasetInfoWant,
1925 | isErr: false,
1926 | },
1927 | {
1928 | name: "Invoke my-auth-get-dataset-info-tool with correct project",
1929 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
1930 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1931 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))),
1932 | want: datasetInfoWant,
1933 | isErr: false,
1934 | },
1935 | {
1936 | name: "Invoke my-auth-get-dataset-info-tool with non-existent project",
1937 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
1938 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1939 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))),
1940 | isErr: true,
1941 | },
1942 | {
1943 | name: "invoke my-auth-get-dataset-info-tool without body",
1944 | api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke",
1945 | requestHeader: map[string]string{},
1946 | requestBody: bytes.NewBuffer([]byte(`{}`)),
1947 | isErr: true,
1948 | },
1949 | {
1950 | name: "Invoke my-auth-get-dataset-info-tool with auth token",
1951 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
1952 | requestHeader: map[string]string{"my-google-auth_token": idToken},
1953 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1954 | want: datasetInfoWant,
1955 | isErr: false,
1956 | },
1957 | {
1958 | name: "Invoke my-auth-get-dataset-info-tool with invalid auth token",
1959 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
1960 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
1961 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1962 | isErr: true,
1963 | },
1964 | {
1965 | name: "Invoke my-auth-get-dataset-info-tool without auth token",
1966 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
1967 | requestHeader: map[string]string{},
1968 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1969 | isErr: true,
1970 | },
1971 | {
1972 | name: "Invoke my-client-auth-get-dataset-info-tool with auth token",
1973 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
1974 | requestHeader: map[string]string{"Authorization": accessToken},
1975 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1976 | want: datasetInfoWant,
1977 | isErr: false,
1978 | },
1979 | {
1980 | name: "Invoke my-client-auth-get-dataset-info-tool without auth token",
1981 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
1982 | requestHeader: map[string]string{},
1983 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1984 | isErr: true,
1985 | },
1986 | {
1987 |
1988 | name: "Invoke my-client-auth-get-dataset-info-tool with invalid auth token",
1989 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke",
1990 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
1991 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
1992 | isErr: true,
1993 | },
1994 | }
1995 | for _, tc := range invokeTcs {
1996 | t.Run(tc.name, func(t *testing.T) {
1997 | // Send Tool invocation request
1998 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
1999 | if err != nil {
2000 | t.Fatalf("unable to create request: %s", err)
2001 | }
2002 | req.Header.Add("Content-type", "application/json")
2003 | for k, v := range tc.requestHeader {
2004 | req.Header.Add(k, v)
2005 | }
2006 | resp, err := http.DefaultClient.Do(req)
2007 | if err != nil {
2008 | t.Fatalf("unable to send request: %s", err)
2009 | }
2010 | defer resp.Body.Close()
2011 |
2012 | if resp.StatusCode != http.StatusOK {
2013 | if tc.isErr {
2014 | return
2015 | }
2016 | bodyBytes, _ := io.ReadAll(resp.Body)
2017 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
2018 | }
2019 |
2020 | // Check response body
2021 | var body map[string]interface{}
2022 | err = json.NewDecoder(resp.Body).Decode(&body)
2023 | if err != nil {
2024 | t.Fatalf("error parsing response body")
2025 | }
2026 |
2027 | got, ok := body["result"].(string)
2028 | if !ok {
2029 | t.Fatalf("unable to find result in response body")
2030 | }
2031 |
2032 | if !strings.Contains(got, tc.want) {
2033 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
2034 | }
2035 | })
2036 | }
2037 | }
2038 |
2039 | func runBigQueryListTableIdsToolInvokeTest(t *testing.T, datasetName, tablename_want string) {
2040 | // Get ID token
2041 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
2042 | if err != nil {
2043 | t.Fatalf("error getting Google ID token: %s", err)
2044 | }
2045 |
2046 | // Get access token
2047 | accessToken, err := sources.GetIAMAccessToken(t.Context())
2048 | if err != nil {
2049 | t.Fatalf("error getting access token from ADC: %s", err)
2050 | }
2051 | accessToken = "Bearer " + accessToken
2052 |
2053 | // Test tool invoke endpoint
2054 | invokeTcs := []struct {
2055 | name string
2056 | api string
2057 | requestHeader map[string]string
2058 | requestBody io.Reader
2059 | want string
2060 | isErr bool
2061 | }{
2062 | {
2063 | name: "invoke my-list-table-ids-tool without body",
2064 | api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke",
2065 | requestHeader: map[string]string{},
2066 | requestBody: bytes.NewBuffer([]byte(`{}`)),
2067 | isErr: true,
2068 | },
2069 | {
2070 | name: "invoke my-list-table-ids-tool",
2071 | api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke",
2072 | requestHeader: map[string]string{},
2073 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2074 | want: tablename_want,
2075 | isErr: false,
2076 | },
2077 | {
2078 | name: "invoke my-list-table-ids-tool without body",
2079 | api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke",
2080 | requestHeader: map[string]string{},
2081 | requestBody: bytes.NewBuffer([]byte(`{}`)),
2082 | isErr: true,
2083 | },
2084 | {
2085 | name: "Invoke my-auth-list-table-ids-tool with auth token",
2086 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
2087 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2088 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2089 | want: tablename_want,
2090 | isErr: false,
2091 | },
2092 | {
2093 | name: "Invoke my-auth-list-table-ids-tool with correct project",
2094 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
2095 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2096 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))),
2097 | want: tablename_want,
2098 | isErr: false,
2099 | },
2100 | {
2101 | name: "Invoke my-auth-list-table-ids-tool with non-existent project",
2102 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
2103 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2104 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))),
2105 | isErr: true,
2106 | },
2107 | {
2108 | name: "Invoke my-auth-list-table-ids-tool with invalid auth token",
2109 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
2110 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
2111 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2112 | isErr: true,
2113 | },
2114 | {
2115 | name: "Invoke my-auth-list-table-ids-tool without auth token",
2116 | api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
2117 | requestHeader: map[string]string{},
2118 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2119 | isErr: true,
2120 | },
2121 | {
2122 | name: "Invoke my-client-auth-list-table-ids-tool with auth token",
2123 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
2124 | requestHeader: map[string]string{"Authorization": accessToken},
2125 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2126 | want: tablename_want,
2127 | isErr: false,
2128 | },
2129 | {
2130 | name: "Invoke my-client-auth-list-table-ids-tool without auth token",
2131 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
2132 | requestHeader: map[string]string{},
2133 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2134 | isErr: true,
2135 | },
2136 | {
2137 |
2138 | name: "Invoke my-client-auth-list-table-ids-tool with invalid auth token",
2139 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke",
2140 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
2141 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))),
2142 | isErr: true,
2143 | },
2144 | }
2145 | for _, tc := range invokeTcs {
2146 | t.Run(tc.name, func(t *testing.T) {
2147 | // Send Tool invocation request
2148 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
2149 | if err != nil {
2150 | t.Fatalf("unable to create request: %s", err)
2151 | }
2152 | req.Header.Add("Content-type", "application/json")
2153 | for k, v := range tc.requestHeader {
2154 | req.Header.Add(k, v)
2155 | }
2156 | resp, err := http.DefaultClient.Do(req)
2157 | if err != nil {
2158 | t.Fatalf("unable to send request: %s", err)
2159 | }
2160 | defer resp.Body.Close()
2161 |
2162 | if resp.StatusCode != http.StatusOK {
2163 | if tc.isErr {
2164 | return
2165 | }
2166 | bodyBytes, _ := io.ReadAll(resp.Body)
2167 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
2168 | }
2169 |
2170 | // Check response body
2171 | var body map[string]interface{}
2172 | err = json.NewDecoder(resp.Body).Decode(&body)
2173 | if err != nil {
2174 | t.Fatalf("error parsing response body")
2175 | }
2176 |
2177 | got, ok := body["result"].(string)
2178 | if !ok {
2179 | t.Fatalf("unable to find result in response body")
2180 | }
2181 |
2182 | if !strings.Contains(got, tc.want) {
2183 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
2184 | }
2185 | })
2186 | }
2187 | }
2188 |
2189 | func runBigQueryGetTableInfoToolInvokeTest(t *testing.T, datasetName, tableName, tableInfoWant string) {
2190 | // Get ID token
2191 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
2192 | if err != nil {
2193 | t.Fatalf("error getting Google ID token: %s", err)
2194 | }
2195 |
2196 | // Get access token
2197 | accessToken, err := sources.GetIAMAccessToken(t.Context())
2198 | if err != nil {
2199 | t.Fatalf("error getting access token from ADC: %s", err)
2200 | }
2201 | accessToken = "Bearer " + accessToken
2202 |
2203 | // Test tool invoke endpoint
2204 | invokeTcs := []struct {
2205 | name string
2206 | api string
2207 | requestHeader map[string]string
2208 | requestBody io.Reader
2209 | want string
2210 | isErr bool
2211 | }{
2212 | {
2213 | name: "invoke my-get-table-info-tool without body",
2214 | api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke",
2215 | requestHeader: map[string]string{},
2216 | requestBody: bytes.NewBuffer([]byte(`{}`)),
2217 | isErr: true,
2218 | },
2219 | {
2220 | name: "invoke my-get-table-info-tool",
2221 | api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke",
2222 | requestHeader: map[string]string{},
2223 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2224 | want: tableInfoWant,
2225 | isErr: false,
2226 | },
2227 | {
2228 | name: "invoke my-auth-get-table-info-tool without body",
2229 | api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke",
2230 | requestHeader: map[string]string{},
2231 | requestBody: bytes.NewBuffer([]byte(`{}`)),
2232 | isErr: true,
2233 | },
2234 | {
2235 | name: "Invoke my-auth-get-table-info-tool with auth token",
2236 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
2237 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2238 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2239 | want: tableInfoWant,
2240 | isErr: false,
2241 | },
2242 | {
2243 | name: "Invoke my-auth-get-table-info-tool with correct project",
2244 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
2245 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2246 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, datasetName, tableName))),
2247 | want: tableInfoWant,
2248 | isErr: false,
2249 | },
2250 | {
2251 | name: "Invoke my-auth-get-table-info-tool with non-existent project",
2252 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
2253 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2254 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName, tableName))),
2255 | isErr: true,
2256 | },
2257 | {
2258 | name: "Invoke my-auth-get-table-info-tool with invalid auth token",
2259 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
2260 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
2261 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2262 | isErr: true,
2263 | },
2264 | {
2265 | name: "Invoke my-auth-get-table-info-tool without auth token",
2266 | api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
2267 | requestHeader: map[string]string{},
2268 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2269 | isErr: true,
2270 | },
2271 | {
2272 | name: "Invoke my-client-auth-get-table-info-tool with auth token",
2273 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
2274 | requestHeader: map[string]string{"Authorization": accessToken},
2275 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2276 | want: tableInfoWant,
2277 | isErr: false,
2278 | },
2279 | {
2280 | name: "Invoke my-client-auth-get-table-info-tool without auth token",
2281 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
2282 | requestHeader: map[string]string{},
2283 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2284 | isErr: true,
2285 | },
2286 | {
2287 |
2288 | name: "Invoke my-client-auth-get-table-info-tool with invalid auth token",
2289 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke",
2290 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
2291 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))),
2292 | isErr: true,
2293 | },
2294 | }
2295 | for _, tc := range invokeTcs {
2296 | t.Run(tc.name, func(t *testing.T) {
2297 | // Send Tool invocation request
2298 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
2299 | if err != nil {
2300 | t.Fatalf("unable to create request: %s", err)
2301 | }
2302 | req.Header.Add("Content-type", "application/json")
2303 | for k, v := range tc.requestHeader {
2304 | req.Header.Add(k, v)
2305 | }
2306 | resp, err := http.DefaultClient.Do(req)
2307 | if err != nil {
2308 | t.Fatalf("unable to send request: %s", err)
2309 | }
2310 | defer resp.Body.Close()
2311 |
2312 | if resp.StatusCode != http.StatusOK {
2313 | if tc.isErr {
2314 | return
2315 | }
2316 | bodyBytes, _ := io.ReadAll(resp.Body)
2317 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
2318 | }
2319 |
2320 | // Check response body
2321 | var body map[string]interface{}
2322 | err = json.NewDecoder(resp.Body).Decode(&body)
2323 | if err != nil {
2324 | t.Fatalf("error parsing response body")
2325 | }
2326 |
2327 | got, ok := body["result"].(string)
2328 | if !ok {
2329 | t.Fatalf("unable to find result in response body")
2330 | }
2331 |
2332 | if !strings.Contains(got, tc.want) {
2333 | t.Fatalf("expected %q to contain %q, but it did not", got, tc.want)
2334 | }
2335 | })
2336 | }
2337 | }
2338 |
2339 | func runBigQueryConversationalAnalyticsInvokeTest(t *testing.T, datasetName, tableName, dataInsightsWant string) {
2340 | // Each test is expected to complete in under 10s, we set a 25s timeout with retries to avoid flaky tests.
2341 | const maxRetries = 3
2342 | const requestTimeout = 25 * time.Second
2343 | // Get ID token
2344 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
2345 | if err != nil {
2346 | t.Fatalf("error getting Google ID token: %s", err)
2347 | }
2348 |
2349 | // Get access token
2350 | accessToken, err := sources.GetIAMAccessToken(t.Context())
2351 | if err != nil {
2352 | t.Fatalf("error getting access token from ADC: %s", err)
2353 | }
2354 | accessToken = "Bearer " + accessToken
2355 |
2356 | tableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, datasetName, tableName)
2357 |
2358 | invokeTcs := []struct {
2359 | name string
2360 | api string
2361 | requestHeader map[string]string
2362 | requestBody io.Reader
2363 | want string
2364 | isErr bool
2365 | }{
2366 | {
2367 | name: "invoke my-conversational-analytics-tool successfully",
2368 | api: "http://127.0.0.1:5000/api/tool/my-conversational-analytics-tool/invoke",
2369 | requestHeader: map[string]string{},
2370 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
2371 | `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
2372 | tableRefsJSON,
2373 | ))),
2374 | want: dataInsightsWant,
2375 | isErr: false,
2376 | },
2377 | {
2378 | name: "invoke my-auth-conversational-analytics-tool with auth token",
2379 | api: "http://127.0.0.1:5000/api/tool/my-auth-conversational-analytics-tool/invoke",
2380 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2381 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
2382 | `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
2383 | tableRefsJSON,
2384 | ))),
2385 | want: dataInsightsWant,
2386 | isErr: false,
2387 | },
2388 | {
2389 | name: "invoke my-auth-conversational-analytics-tool without auth token",
2390 | api: "http://127.0.0.1:5000/api/tool/my-auth-conversational-analytics-tool/invoke",
2391 | requestHeader: map[string]string{},
2392 | requestBody: bytes.NewBuffer([]byte(`{"user_query_with_context": "What are the names in the table?"}`)),
2393 | isErr: true,
2394 | },
2395 | {
2396 | name: "Invoke my-client-auth-conversational-analytics-tool with auth token",
2397 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
2398 | requestHeader: map[string]string{"Authorization": accessToken},
2399 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
2400 | `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
2401 | tableRefsJSON,
2402 | ))),
2403 | want: "[{\"f0_\":1}]",
2404 | isErr: false,
2405 | },
2406 | {
2407 | name: "Invoke my-client-auth-conversational-analytics-tool without auth token",
2408 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
2409 | requestHeader: map[string]string{},
2410 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
2411 | `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
2412 | tableRefsJSON,
2413 | ))),
2414 | isErr: true,
2415 | },
2416 | {
2417 |
2418 | name: "Invoke my-client-auth-conversational-analytics-tool with invalid auth token",
2419 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke",
2420 | requestHeader: map[string]string{"Authorization": "Bearer invalid-token"},
2421 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(
2422 | `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`,
2423 | tableRefsJSON,
2424 | ))),
2425 | isErr: true,
2426 | },
2427 | }
2428 | for _, tc := range invokeTcs {
2429 | t.Run(tc.name, func(t *testing.T) {
2430 | var resp *http.Response
2431 | var err error
2432 |
2433 | bodyBytes, err := io.ReadAll(tc.requestBody)
2434 | if err != nil {
2435 | t.Fatalf("failed to read request body: %v", err)
2436 | }
2437 |
2438 | req, err := http.NewRequest(http.MethodPost, tc.api, nil)
2439 | if err != nil {
2440 | t.Fatalf("unable to create request: %s", err)
2441 | }
2442 | req.Header.Set("Content-type", "application/json")
2443 | for k, v := range tc.requestHeader {
2444 | req.Header.Add(k, v)
2445 | }
2446 |
2447 | for i := 0; i < maxRetries; i++ {
2448 | ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
2449 | defer cancel()
2450 |
2451 | req.Body = io.NopCloser(bytes.NewReader(bodyBytes))
2452 | req.GetBody = func() (io.ReadCloser, error) {
2453 | return io.NopCloser(bytes.NewReader(bodyBytes)), nil
2454 | }
2455 | reqWithCtx := req.WithContext(ctx)
2456 |
2457 | resp, err = http.DefaultClient.Do(reqWithCtx)
2458 | if err != nil {
2459 | // Retry on time out.
2460 | if os.IsTimeout(err) {
2461 | t.Logf("Request timed out (attempt %d/%d), retrying...", i+1, maxRetries)
2462 | time.Sleep(5 * time.Second)
2463 | continue
2464 | }
2465 | t.Fatalf("unable to send request: %s", err)
2466 | }
2467 | if resp.StatusCode == http.StatusServiceUnavailable {
2468 | t.Logf("Received 503 Service Unavailable (attempt %d/%d), retrying...", i+1, maxRetries)
2469 | time.Sleep(15 * time.Second)
2470 | continue
2471 | }
2472 | break
2473 | }
2474 |
2475 | if err != nil {
2476 | t.Fatalf("Request failed after %d retries: %v", maxRetries, err)
2477 | }
2478 | defer resp.Body.Close()
2479 |
2480 | if resp.StatusCode != http.StatusOK {
2481 | if tc.isErr {
2482 | return
2483 | }
2484 | bodyBytes, _ := io.ReadAll(resp.Body)
2485 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
2486 | }
2487 |
2488 | var body map[string]interface{}
2489 | err = json.NewDecoder(resp.Body).Decode(&body)
2490 | if err != nil {
2491 | t.Fatalf("error parsing response body: %v", err)
2492 | }
2493 |
2494 | got, ok := body["result"].(string)
2495 | if !ok {
2496 | t.Fatalf("unable to find result in response body")
2497 | }
2498 |
2499 | wantPattern := regexp.MustCompile(tc.want)
2500 | if !wantPattern.MatchString(got) {
2501 | t.Fatalf("response did not match the expected pattern.\nFull response:\n%s", got)
2502 | }
2503 | })
2504 | }
2505 | }
2506 |
2507 | func runListDatasetIdsWithRestriction(t *testing.T, allowedDatasetName1, allowedDatasetName2 string) {
2508 | testCases := []struct {
2509 | name string
2510 | wantStatusCode int
2511 | wantElements []string
2512 | }{
2513 | {
2514 | name: "invoke list-dataset-ids with restriction",
2515 | wantStatusCode: http.StatusOK,
2516 | wantElements: []string{
2517 | fmt.Sprintf("%s.%s", BigqueryProject, allowedDatasetName1),
2518 | fmt.Sprintf("%s.%s", BigqueryProject, allowedDatasetName2),
2519 | },
2520 | },
2521 | }
2522 |
2523 | for _, tc := range testCases {
2524 | t.Run(tc.name, func(t *testing.T) {
2525 | body := bytes.NewBuffer([]byte(`{}`))
2526 | resp, bodyBytes := tests.RunRequest(t, http.MethodPost, "http://127.0.0.1:5000/api/tool/list-dataset-ids-restricted/invoke", body, nil)
2527 |
2528 | if resp.StatusCode != tc.wantStatusCode {
2529 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2530 | }
2531 |
2532 | var respBody map[string]interface{}
2533 | if err := json.Unmarshal(bodyBytes, &respBody); err != nil {
2534 | t.Fatalf("error parsing response body: %v", err)
2535 | }
2536 |
2537 | gotJSON, ok := respBody["result"].(string)
2538 | if !ok {
2539 | t.Fatalf("unable to find 'result' as a string in response body: %s", string(bodyBytes))
2540 | }
2541 |
2542 | // Unmarshal the result string into a slice to compare contents.
2543 | var gotElements []string
2544 | if err := json.Unmarshal([]byte(gotJSON), &gotElements); err != nil {
2545 | t.Fatalf("error parsing result field JSON %q: %v", gotJSON, err)
2546 | }
2547 |
2548 | sort.Strings(gotElements)
2549 | sort.Strings(tc.wantElements)
2550 | if !reflect.DeepEqual(gotElements, tc.wantElements) {
2551 | t.Errorf("unexpected result:\n got: %v\nwant: %v", gotElements, tc.wantElements)
2552 | }
2553 | })
2554 | }
2555 | }
2556 |
2557 | func runListTableIdsWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName string, allowedTableNames ...string) {
2558 | sort.Strings(allowedTableNames)
2559 | var quotedNames []string
2560 | for _, name := range allowedTableNames {
2561 | quotedNames = append(quotedNames, fmt.Sprintf(`"%s"`, name))
2562 | }
2563 | wantResult := fmt.Sprintf(`[%s]`, strings.Join(quotedNames, ","))
2564 |
2565 | testCases := []struct {
2566 | name string
2567 | dataset string
2568 | wantStatusCode int
2569 | wantInResult string
2570 | wantInError string
2571 | }{
2572 | {
2573 | name: "invoke on allowed dataset",
2574 | dataset: allowedDatasetName,
2575 | wantStatusCode: http.StatusOK,
2576 | wantInResult: wantResult,
2577 | },
2578 | {
2579 | name: "invoke on disallowed dataset",
2580 | dataset: disallowedDatasetName,
2581 | wantStatusCode: http.StatusBadRequest, // Or the specific error code returned
2582 | wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName),
2583 | },
2584 | }
2585 |
2586 | for _, tc := range testCases {
2587 | t.Run(tc.name, func(t *testing.T) {
2588 | body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s"}`, tc.dataset)))
2589 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/list-table-ids-restricted/invoke", body)
2590 | if err != nil {
2591 | t.Fatalf("unable to create request: %s", err)
2592 | }
2593 | req.Header.Add("Content-type", "application/json")
2594 | resp, err := http.DefaultClient.Do(req)
2595 | if err != nil {
2596 | t.Fatalf("unable to send request: %s", err)
2597 | }
2598 | defer resp.Body.Close()
2599 |
2600 | if resp.StatusCode != tc.wantStatusCode {
2601 | bodyBytes, _ := io.ReadAll(resp.Body)
2602 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2603 | }
2604 |
2605 | if tc.wantInResult != "" {
2606 | var respBody map[string]interface{}
2607 | if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
2608 | t.Fatalf("error parsing response body: %v", err)
2609 | }
2610 | got, ok := respBody["result"].(string)
2611 | if !ok {
2612 | t.Fatalf("unable to find result in response body")
2613 | }
2614 |
2615 | var gotSlice []string
2616 | if err := json.Unmarshal([]byte(got), &gotSlice); err != nil {
2617 | t.Fatalf("error unmarshalling result: %v", err)
2618 | }
2619 | sort.Strings(gotSlice)
2620 | sortedGotBytes, err := json.Marshal(gotSlice)
2621 | if err != nil {
2622 | t.Fatalf("error marshalling sorted result: %v", err)
2623 | }
2624 |
2625 | if string(sortedGotBytes) != tc.wantInResult {
2626 | t.Errorf("unexpected result: got %q, want %q", string(sortedGotBytes), tc.wantInResult)
2627 | }
2628 | }
2629 |
2630 | if tc.wantInError != "" {
2631 | bodyBytes, _ := io.ReadAll(resp.Body)
2632 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
2633 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
2634 | }
2635 | }
2636 | })
2637 | }
2638 | }
2639 |
2640 | func runGetDatasetInfoWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName string) {
2641 | testCases := []struct {
2642 | name string
2643 | dataset string
2644 | wantStatusCode int
2645 | wantInError string
2646 | }{
2647 | {
2648 | name: "invoke on allowed dataset",
2649 | dataset: allowedDatasetName,
2650 | wantStatusCode: http.StatusOK,
2651 | },
2652 | {
2653 | name: "invoke on disallowed dataset",
2654 | dataset: disallowedDatasetName,
2655 | wantStatusCode: http.StatusBadRequest,
2656 | wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName),
2657 | },
2658 | }
2659 |
2660 | for _, tc := range testCases {
2661 | t.Run(tc.name, func(t *testing.T) {
2662 | body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s"}`, tc.dataset)))
2663 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/get-dataset-info-restricted/invoke", body)
2664 | if err != nil {
2665 | t.Fatalf("unable to create request: %s", err)
2666 | }
2667 | req.Header.Add("Content-type", "application/json")
2668 | resp, err := http.DefaultClient.Do(req)
2669 | if err != nil {
2670 | t.Fatalf("unable to send request: %s", err)
2671 | }
2672 | defer resp.Body.Close()
2673 |
2674 | if resp.StatusCode != tc.wantStatusCode {
2675 | bodyBytes, _ := io.ReadAll(resp.Body)
2676 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2677 | }
2678 |
2679 | if tc.wantInError != "" {
2680 | bodyBytes, _ := io.ReadAll(resp.Body)
2681 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
2682 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
2683 | }
2684 | }
2685 | })
2686 | }
2687 | }
2688 |
2689 | func runGetTableInfoWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName, allowedTableName, disallowedTableName string) {
2690 | testCases := []struct {
2691 | name string
2692 | dataset string
2693 | table string
2694 | wantStatusCode int
2695 | wantInError string
2696 | }{
2697 | {
2698 | name: "invoke on allowed table",
2699 | dataset: allowedDatasetName,
2700 | table: allowedTableName,
2701 | wantStatusCode: http.StatusOK,
2702 | },
2703 | {
2704 | name: "invoke on disallowed table",
2705 | dataset: disallowedDatasetName,
2706 | table: disallowedTableName,
2707 | wantStatusCode: http.StatusBadRequest,
2708 | wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName),
2709 | },
2710 | }
2711 |
2712 | for _, tc := range testCases {
2713 | t.Run(tc.name, func(t *testing.T) {
2714 | body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s", "table":"%s"}`, tc.dataset, tc.table)))
2715 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/get-table-info-restricted/invoke", body)
2716 | if err != nil {
2717 | t.Fatalf("unable to create request: %s", err)
2718 | }
2719 | req.Header.Add("Content-type", "application/json")
2720 | resp, err := http.DefaultClient.Do(req)
2721 | if err != nil {
2722 | t.Fatalf("unable to send request: %s", err)
2723 | }
2724 | defer resp.Body.Close()
2725 |
2726 | if resp.StatusCode != tc.wantStatusCode {
2727 | bodyBytes, _ := io.ReadAll(resp.Body)
2728 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2729 | }
2730 |
2731 | if tc.wantInError != "" {
2732 | bodyBytes, _ := io.ReadAll(resp.Body)
2733 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
2734 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
2735 | }
2736 | }
2737 | })
2738 | }
2739 | }
2740 |
2741 | func runExecuteSqlWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) {
2742 | allowedTableParts := strings.Split(strings.Trim(allowedTableFullName, "`"), ".")
2743 | if len(allowedTableParts) != 3 {
2744 | t.Fatalf("invalid allowed table name format: %s", allowedTableFullName)
2745 | }
2746 | allowedDatasetID := allowedTableParts[1]
2747 |
2748 | testCases := []struct {
2749 | name string
2750 | sql string
2751 | wantStatusCode int
2752 | wantInError string
2753 | }{
2754 | {
2755 | name: "invoke on allowed table",
2756 | sql: fmt.Sprintf("SELECT * FROM %s", allowedTableFullName),
2757 | wantStatusCode: http.StatusOK,
2758 | },
2759 | {
2760 | name: "invoke on disallowed table",
2761 | sql: fmt.Sprintf("SELECT * FROM %s", disallowedTableFullName),
2762 | wantStatusCode: http.StatusBadRequest,
2763 | wantInError: fmt.Sprintf("query accesses dataset '%s', which is not in the allowed list",
2764 | strings.Join(
2765 | strings.Split(strings.Trim(disallowedTableFullName, "`"), ".")[0:2],
2766 | ".")),
2767 | },
2768 | {
2769 | name: "disallowed create schema",
2770 | sql: "CREATE SCHEMA another_dataset",
2771 | wantStatusCode: http.StatusBadRequest,
2772 | wantInError: "dataset-level operations like 'CREATE_SCHEMA' are not allowed",
2773 | },
2774 | {
2775 | name: "disallowed alter schema",
2776 | sql: fmt.Sprintf("ALTER SCHEMA %s SET OPTIONS(description='new one')", allowedDatasetID),
2777 | wantStatusCode: http.StatusBadRequest,
2778 | wantInError: "dataset-level operations like 'ALTER_SCHEMA' are not allowed",
2779 | },
2780 | {
2781 | name: "disallowed create function",
2782 | sql: fmt.Sprintf("CREATE FUNCTION %s.my_func() RETURNS INT64 AS (1)", allowedDatasetID),
2783 | wantStatusCode: http.StatusBadRequest,
2784 | wantInError: "creating stored routines ('CREATE_FUNCTION') is not allowed",
2785 | },
2786 | {
2787 | name: "disallowed create procedure",
2788 | sql: fmt.Sprintf("CREATE PROCEDURE %s.my_proc() BEGIN SELECT 1; END", allowedDatasetID),
2789 | wantStatusCode: http.StatusBadRequest,
2790 | wantInError: "unanalyzable statements like 'CREATE PROCEDURE' are not allowed",
2791 | },
2792 | {
2793 | name: "disallowed execute immediate",
2794 | sql: "EXECUTE IMMEDIATE 'SELECT 1'",
2795 | wantStatusCode: http.StatusBadRequest,
2796 | wantInError: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
2797 | },
2798 | }
2799 |
2800 | for _, tc := range testCases {
2801 | t.Run(tc.name, func(t *testing.T) {
2802 | body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"%s"}`, tc.sql)))
2803 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/execute-sql-restricted/invoke", body)
2804 | if err != nil {
2805 | t.Fatalf("unable to create request: %s", err)
2806 | }
2807 | req.Header.Add("Content-type", "application/json")
2808 | resp, err := http.DefaultClient.Do(req)
2809 | if err != nil {
2810 | t.Fatalf("unable to send request: %s", err)
2811 | }
2812 | defer resp.Body.Close()
2813 |
2814 | if resp.StatusCode != tc.wantStatusCode {
2815 | bodyBytes, _ := io.ReadAll(resp.Body)
2816 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2817 | }
2818 |
2819 | if tc.wantInError != "" {
2820 | bodyBytes, _ := io.ReadAll(resp.Body)
2821 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
2822 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
2823 | }
2824 | }
2825 | })
2826 | }
2827 | }
2828 |
2829 | func runConversationalAnalyticsWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName, allowedTableName, disallowedTableName string) {
2830 | allowedTableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, allowedDatasetName, allowedTableName)
2831 | disallowedTableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, disallowedDatasetName, disallowedTableName)
2832 |
2833 | testCases := []struct {
2834 | name string
2835 | tableRefs string
2836 | wantStatusCode int
2837 | wantInResult string
2838 | wantInError string
2839 | }{
2840 | {
2841 | name: "invoke with allowed table",
2842 | tableRefs: allowedTableRefsJSON,
2843 | wantStatusCode: http.StatusOK,
2844 | wantInResult: `Answer`,
2845 | },
2846 | {
2847 | name: "invoke with disallowed table",
2848 | tableRefs: disallowedTableRefsJSON,
2849 | wantStatusCode: http.StatusBadRequest,
2850 | wantInError: fmt.Sprintf("access to dataset '%s.%s' (from table '%s') is not allowed", BigqueryProject, disallowedDatasetName, disallowedTableName),
2851 | },
2852 | }
2853 |
2854 | for _, tc := range testCases {
2855 | t.Run(tc.name, func(t *testing.T) {
2856 | requestBodyMap := map[string]any{
2857 | "user_query_with_context": "What is in the table?",
2858 | "table_references": tc.tableRefs,
2859 | }
2860 | bodyBytes, err := json.Marshal(requestBodyMap)
2861 | if err != nil {
2862 | t.Fatalf("failed to marshal request body: %v", err)
2863 | }
2864 | body := bytes.NewBuffer(bodyBytes)
2865 |
2866 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/conversational-analytics-restricted/invoke", body)
2867 | if err != nil {
2868 | t.Fatalf("unable to create request: %s", err)
2869 | }
2870 | req.Header.Add("Content-type", "application/json")
2871 | resp, err := http.DefaultClient.Do(req)
2872 | if err != nil {
2873 | t.Fatalf("unable to send request: %s", err)
2874 | }
2875 | defer resp.Body.Close()
2876 |
2877 | if resp.StatusCode != tc.wantStatusCode {
2878 | bodyBytes, _ := io.ReadAll(resp.Body)
2879 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
2880 | }
2881 |
2882 | if tc.wantInResult != "" {
2883 | var respBody map[string]interface{}
2884 | if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
2885 | t.Fatalf("error parsing response body: %v", err)
2886 | }
2887 | got, ok := respBody["result"].(string)
2888 | if !ok {
2889 | t.Fatalf("unable to find result in response body")
2890 | }
2891 | if !strings.Contains(got, tc.wantInResult) {
2892 | t.Errorf("unexpected result: got %q, want to contain %q", got, tc.wantInResult)
2893 | }
2894 | }
2895 |
2896 | if tc.wantInError != "" {
2897 | bodyBytes, _ := io.ReadAll(resp.Body)
2898 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
2899 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
2900 | }
2901 | }
2902 | })
2903 | }
2904 | }
2905 |
2906 | func runBigQuerySearchCatalogToolInvokeTest(t *testing.T, datasetName string, tableName string) {
2907 | // Get ID token
2908 | idToken, err := tests.GetGoogleIdToken(tests.ClientId)
2909 | if err != nil {
2910 | t.Fatalf("error getting Google ID token: %s", err)
2911 | }
2912 |
2913 | // Get access token
2914 | accessToken, err := sources.GetIAMAccessToken(t.Context())
2915 | if err != nil {
2916 | t.Fatalf("error getting access token from ADC: %s", err)
2917 | }
2918 | accessToken = "Bearer " + accessToken
2919 |
2920 | // Test tool invoke endpoint
2921 | invokeTcs := []struct {
2922 | name string
2923 | api string
2924 | requestHeader map[string]string
2925 | requestBody io.Reader
2926 | wantKey string
2927 | isErr bool
2928 | }{
2929 | {
2930 | name: "invoke my-search-catalog-tool without body",
2931 | api: "http://127.0.0.1:5000/api/tool/my-search-catalog-tool/invoke",
2932 | requestHeader: map[string]string{},
2933 | requestBody: bytes.NewBuffer([]byte(`{}`)),
2934 | isErr: true,
2935 | },
2936 | {
2937 | name: "invoke my-search-catalog-tool",
2938 | api: "http://127.0.0.1:5000/api/tool/my-search-catalog-tool/invoke",
2939 | requestHeader: map[string]string{},
2940 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2941 | wantKey: "DisplayName",
2942 | isErr: false,
2943 | },
2944 | {
2945 | name: "Invoke my-auth-search-catalog-tool with auth token",
2946 | api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke",
2947 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2948 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2949 | wantKey: "DisplayName",
2950 | isErr: false,
2951 | },
2952 | {
2953 | name: "Invoke my-auth-search-catalog-tool with correct project",
2954 | api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke",
2955 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2956 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"projectIds\":[\"%s\"], \"datasetIds\":[\"%s\"]}", tableName, BigqueryProject, datasetName))),
2957 | wantKey: "DisplayName",
2958 | isErr: false,
2959 | },
2960 | {
2961 | name: "Invoke my-auth-search-catalog-tool with non-existent project",
2962 | api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke",
2963 | requestHeader: map[string]string{"my-google-auth_token": idToken},
2964 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"projectIds\":[\"%s-%s\"], \"datasetIds\":[\"%s\"]}", tableName, BigqueryProject, uuid.NewString(), datasetName))),
2965 | isErr: true,
2966 | },
2967 | {
2968 | name: "Invoke my-auth-search-catalog-tool with invalid auth token",
2969 | api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke",
2970 | requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
2971 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2972 | isErr: true,
2973 | },
2974 | {
2975 | name: "Invoke my-auth-search-catalog-tool without auth token",
2976 | api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke",
2977 | requestHeader: map[string]string{},
2978 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2979 | isErr: true,
2980 | },
2981 | {
2982 | name: "Invoke my-client-auth-search-catalog-tool without auth token",
2983 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-search-catalog-tool/invoke",
2984 | requestHeader: map[string]string{},
2985 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2986 | isErr: true,
2987 | },
2988 | {
2989 | name: "Invoke my-client-auth-search-catalog-tool with auth token",
2990 | api: "http://127.0.0.1:5000/api/tool/my-client-auth-search-catalog-tool/invoke",
2991 | requestHeader: map[string]string{"Authorization": accessToken},
2992 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))),
2993 | wantKey: "DisplayName",
2994 | isErr: false,
2995 | },
2996 | }
2997 | for _, tc := range invokeTcs {
2998 | t.Run(tc.name, func(t *testing.T) {
2999 | // Send Tool invocation request
3000 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
3001 | if err != nil {
3002 | t.Fatalf("unable to create request: %s", err)
3003 | }
3004 | req.Header.Add("Content-type", "application/json")
3005 | for k, v := range tc.requestHeader {
3006 | req.Header.Add(k, v)
3007 | }
3008 | resp, err := http.DefaultClient.Do(req)
3009 | if err != nil {
3010 | t.Fatalf("unable to send request: %s", err)
3011 | }
3012 | defer resp.Body.Close()
3013 |
3014 | if resp.StatusCode != http.StatusOK {
3015 | if tc.isErr {
3016 | return
3017 | }
3018 | bodyBytes, _ := io.ReadAll(resp.Body)
3019 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
3020 | }
3021 |
3022 | var result map[string]interface{}
3023 | if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
3024 | t.Fatalf("error parsing response body: %s", err)
3025 | }
3026 | resultStr, ok := result["result"].(string)
3027 | if !ok {
3028 | if result["result"] == nil && tc.isErr {
3029 | return
3030 | }
3031 | t.Fatalf("expected 'result' field to be a string, got %T", result["result"])
3032 | }
3033 | if tc.isErr && (resultStr == "" || resultStr == "[]") {
3034 | return
3035 | }
3036 | var entries []interface{}
3037 | if err := json.Unmarshal([]byte(resultStr), &entries); err != nil {
3038 | t.Fatalf("error unmarshalling result string: %v", err)
3039 | }
3040 |
3041 | if !tc.isErr {
3042 | if len(entries) != 1 {
3043 | t.Fatalf("expected exactly one entry, but got %d", len(entries))
3044 | }
3045 | entry, ok := entries[0].(map[string]interface{})
3046 | if !ok {
3047 | t.Fatalf("expected first entry to be a map, got %T", entries[0])
3048 | }
3049 | respTable, ok := entry[tc.wantKey]
3050 | if !ok {
3051 | t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantKey, entry)
3052 | }
3053 | if respTable != tableName {
3054 | t.Fatalf("expected key '%s' to have value '%s', but got %s", tc.wantKey, tableName, respTable)
3055 | }
3056 | } else {
3057 | if len(entries) != 0 {
3058 | t.Fatalf("expected 0 entries, but got %d", len(entries))
3059 | }
3060 | }
3061 | })
3062 | }
3063 | }
3064 |
3065 | func runForecastWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) {
3066 | allowedTableUnquoted := strings.ReplaceAll(allowedTableFullName, "`", "")
3067 | disallowedTableUnquoted := strings.ReplaceAll(disallowedTableFullName, "`", "")
3068 | disallowedDatasetFQN := strings.Join(strings.Split(disallowedTableUnquoted, ".")[0:2], ".")
3069 |
3070 | testCases := []struct {
3071 | name string
3072 | historyData string
3073 | wantStatusCode int
3074 | wantInResult string
3075 | wantInError string
3076 | }{
3077 | {
3078 | name: "invoke with allowed table name",
3079 | historyData: allowedTableUnquoted,
3080 | wantStatusCode: http.StatusOK,
3081 | wantInResult: `"forecast_timestamp"`,
3082 | },
3083 | {
3084 | name: "invoke with disallowed table name",
3085 | historyData: disallowedTableUnquoted,
3086 | wantStatusCode: http.StatusBadRequest,
3087 | wantInError: fmt.Sprintf("access to dataset '%s' (from table '%s') is not allowed", disallowedDatasetFQN, disallowedTableUnquoted),
3088 | },
3089 | {
3090 | name: "invoke with query on allowed table",
3091 | historyData: fmt.Sprintf("SELECT * FROM %s", allowedTableFullName),
3092 | wantStatusCode: http.StatusOK,
3093 | wantInResult: `"forecast_timestamp"`,
3094 | },
3095 | {
3096 | name: "invoke with query on disallowed table",
3097 | historyData: fmt.Sprintf("SELECT * FROM %s", disallowedTableFullName),
3098 | wantStatusCode: http.StatusBadRequest,
3099 | wantInError: fmt.Sprintf("query in history_data accesses dataset '%s', which is not in the allowed list", disallowedDatasetFQN),
3100 | },
3101 | }
3102 |
3103 | for _, tc := range testCases {
3104 | t.Run(tc.name, func(t *testing.T) {
3105 | requestBodyMap := map[string]any{
3106 | "history_data": tc.historyData,
3107 | "timestamp_col": "ts",
3108 | "data_col": "data",
3109 | }
3110 | bodyBytes, err := json.Marshal(requestBodyMap)
3111 | if err != nil {
3112 | t.Fatalf("failed to marshal request body: %v", err)
3113 | }
3114 | body := bytes.NewBuffer(bodyBytes)
3115 |
3116 | req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/forecast-restricted/invoke", body)
3117 | if err != nil {
3118 | t.Fatalf("unable to create request: %s", err)
3119 | }
3120 | req.Header.Add("Content-type", "application/json")
3121 | resp, err := http.DefaultClient.Do(req)
3122 | if err != nil {
3123 | t.Fatalf("unable to send request: %s", err)
3124 | }
3125 | defer resp.Body.Close()
3126 |
3127 | if resp.StatusCode != tc.wantStatusCode {
3128 | bodyBytes, _ := io.ReadAll(resp.Body)
3129 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
3130 | }
3131 |
3132 | if tc.wantInResult != "" {
3133 | var respBody map[string]interface{}
3134 | if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
3135 | t.Fatalf("error parsing response body: %v", err)
3136 | }
3137 | got, ok := respBody["result"].(string)
3138 | if !ok {
3139 | t.Fatalf("unable to find result in response body")
3140 | }
3141 | if !strings.Contains(got, tc.wantInResult) {
3142 | t.Errorf("unexpected result: got %q, want to contain %q", got, tc.wantInResult)
3143 | }
3144 | }
3145 |
3146 | if tc.wantInError != "" {
3147 | bodyBytes, _ := io.ReadAll(resp.Body)
3148 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
3149 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
3150 | }
3151 | }
3152 | })
3153 | }
3154 | }
3155 |
3156 | func runAnalyzeContributionWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) {
3157 | allowedTableUnquoted := strings.ReplaceAll(allowedTableFullName, "`", "")
3158 | disallowedTableUnquoted := strings.ReplaceAll(disallowedTableFullName, "`", "")
3159 | disallowedDatasetFQN := strings.Join(strings.Split(disallowedTableUnquoted, ".")[0:2], ".")
3160 |
3161 | testCases := []struct {
3162 | name string
3163 | inputData string
3164 | wantStatusCode int
3165 | wantInResult string
3166 | wantInError string
3167 | }{
3168 | {
3169 | name: "invoke with allowed table name",
3170 | inputData: allowedTableUnquoted,
3171 | wantStatusCode: http.StatusOK,
3172 | wantInResult: `"relative_difference"`,
3173 | },
3174 | {
3175 | name: "invoke with disallowed table name",
3176 | inputData: disallowedTableUnquoted,
3177 | wantStatusCode: http.StatusBadRequest,
3178 | wantInError: fmt.Sprintf("access to dataset '%s' (from table '%s') is not allowed", disallowedDatasetFQN, disallowedTableUnquoted),
3179 | },
3180 | {
3181 | name: "invoke with query on allowed table",
3182 | inputData: fmt.Sprintf("SELECT * FROM %s", allowedTableFullName),
3183 | wantStatusCode: http.StatusOK,
3184 | wantInResult: `"relative_difference"`,
3185 | },
3186 | {
3187 | name: "invoke with query on disallowed table",
3188 | inputData: fmt.Sprintf("SELECT * FROM %s", disallowedTableFullName),
3189 | wantStatusCode: http.StatusBadRequest,
3190 | wantInError: fmt.Sprintf("query in input_data accesses dataset '%s', which is not in the allowed list", disallowedDatasetFQN),
3191 | },
3192 | }
3193 |
3194 | for _, tc := range testCases {
3195 | t.Run(tc.name, func(t *testing.T) {
3196 | requestBodyMap := map[string]any{
3197 | "input_data": tc.inputData,
3198 | "contribution_metric": "SUM(metric)",
3199 | "is_test_col": "is_test",
3200 | "dimension_id_cols": []string{"dim1", "dim2"},
3201 | }
3202 | bodyBytes, err := json.Marshal(requestBodyMap)
3203 | if err != nil {
3204 | t.Fatalf("failed to marshal request body: %v", err)
3205 | }
3206 | body := bytes.NewBuffer(bodyBytes)
3207 |
3208 | resp, bodyBytes := tests.RunRequest(t, http.MethodPost, "http://127.0.0.1:5000/api/tool/analyze-contribution-restricted/invoke", body, nil)
3209 |
3210 | if resp.StatusCode != tc.wantStatusCode {
3211 | t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
3212 | }
3213 |
3214 | var respBody map[string]interface{}
3215 | if err := json.Unmarshal(bodyBytes, &respBody); err != nil {
3216 | t.Fatalf("error parsing response body: %v", err)
3217 | }
3218 |
3219 | if tc.wantInResult != "" {
3220 | got, ok := respBody["result"].(string)
3221 | if !ok {
3222 | t.Fatalf("unable to find result in response body")
3223 | }
3224 |
3225 | if !strings.Contains(got, tc.wantInResult) {
3226 | t.Errorf("unexpected result: got %q, want to contain %q", string(bodyBytes), tc.wantInResult)
3227 | }
3228 | }
3229 |
3230 | if tc.wantInError != "" {
3231 | if !strings.Contains(string(bodyBytes), tc.wantInError) {
3232 | t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
3233 | }
3234 | }
3235 | })
3236 | }
3237 | }
3238 |
```