This is page 33 of 33. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context. # Directory Structure ``` ├── .ci │ ├── continuous.release.cloudbuild.yaml │ ├── generate_release_table.sh │ ├── integration.cloudbuild.yaml │ ├── quickstart_test │ │ ├── go.integration.cloudbuild.yaml │ │ ├── js.integration.cloudbuild.yaml │ │ ├── py.integration.cloudbuild.yaml │ │ ├── run_go_tests.sh │ │ ├── run_js_tests.sh │ │ ├── run_py_tests.sh │ │ └── setup_hotels_sample.sql │ ├── test_with_coverage.sh │ └── versioned.release.cloudbuild.yaml ├── .github │ ├── auto-label.yaml │ ├── blunderbuss.yml │ ├── CODEOWNERS │ ├── header-checker-lint.yml │ ├── ISSUE_TEMPLATE │ │ ├── bug_report.yml │ │ ├── config.yml │ │ ├── feature_request.yml │ │ └── question.yml │ ├── label-sync.yml │ ├── labels.yaml │ ├── PULL_REQUEST_TEMPLATE.md │ ├── release-please.yml │ ├── renovate.json5 │ ├── sync-repo-settings.yaml │ └── workflows │ ├── cloud_build_failure_reporter.yml │ ├── deploy_dev_docs.yaml │ ├── deploy_previous_version_docs.yaml │ ├── deploy_versioned_docs.yaml │ ├── docs_deploy.yaml │ ├── docs_preview_clean.yaml │ ├── docs_preview_deploy.yaml │ ├── lint.yaml │ ├── schedule_reporter.yml │ ├── sync-labels.yaml │ └── tests.yaml ├── .gitignore ├── .gitmodules ├── .golangci.yaml ├── .hugo │ ├── archetypes │ │ └── default.md │ ├── assets │ │ ├── icons │ │ │ └── logo.svg │ │ └── scss │ │ ├── _styles_project.scss │ │ └── _variables_project.scss │ ├── go.mod │ ├── go.sum │ ├── hugo.toml │ ├── layouts │ │ ├── _default │ │ │ └── home.releases.releases │ │ ├── index.llms-full.txt │ │ ├── index.llms.txt │ │ ├── partials │ │ │ ├── hooks │ │ │ │ └── head-end.html │ │ │ ├── navbar-version-selector.html │ │ │ ├── page-meta-links.html │ │ │ └── td │ │ │ └── render-heading.html │ │ ├── robot.txt │ │ └── shortcodes │ │ ├── include.html │ │ ├── ipynb.html │ │ └── regionInclude.html │ ├── package-lock.json │ ├── package.json │ └── static │ ├── favicons │ │ ├── android-chrome-192x192.png │ │ ├── android-chrome-512x512.png │ │ ├── apple-touch-icon.png │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ └── favicon.ico │ └── js │ └── w3.js ├── CHANGELOG.md ├── cmd │ ├── options_test.go │ ├── options.go │ ├── root_test.go │ ├── root.go │ └── version.txt ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── DEVELOPER.md ├── Dockerfile ├── docs │ └── en │ ├── _index.md │ ├── about │ │ ├── _index.md │ │ └── faq.md │ ├── concepts │ │ ├── _index.md │ │ └── telemetry │ │ ├── index.md │ │ ├── telemetry_flow.png │ │ └── telemetry_traces.png │ ├── getting-started │ │ ├── _index.md │ │ ├── colab_quickstart.ipynb │ │ ├── configure.md │ │ ├── introduction │ │ │ ├── _index.md │ │ │ └── architecture.png │ │ ├── local_quickstart_go.md │ │ ├── local_quickstart_js.md │ │ ├── local_quickstart.md │ │ ├── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── quickstart │ │ ├── go │ │ │ ├── genAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── genkit │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── langchain │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── openAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ └── quickstart_test.go │ │ ├── golden.txt │ │ ├── js │ │ │ ├── genAI │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── genkit │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── langchain │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── llamaindex │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ └── quickstart.test.js │ │ ├── python │ │ │ ├── __init__.py │ │ │ ├── adk │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── core │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── langchain │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── llamaindex │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ └── quickstart_test.py │ │ └── shared │ │ ├── cloud_setup.md │ │ ├── configure_toolbox.md │ │ └── database_setup.md │ ├── how-to │ │ ├── _index.md │ │ ├── connect_via_geminicli.md │ │ ├── connect_via_mcp.md │ │ ├── connect-ide │ │ │ ├── _index.md │ │ │ ├── alloydb_pg_admin_mcp.md │ │ │ ├── alloydb_pg_mcp.md │ │ │ ├── bigquery_mcp.md │ │ │ ├── cloud_sql_mssql_admin_mcp.md │ │ │ ├── cloud_sql_mssql_mcp.md │ │ │ ├── cloud_sql_mysql_admin_mcp.md │ │ │ ├── cloud_sql_mysql_mcp.md │ │ │ ├── cloud_sql_pg_admin_mcp.md │ │ │ ├── cloud_sql_pg_mcp.md │ │ │ ├── firestore_mcp.md │ │ │ ├── looker_mcp.md │ │ │ ├── mssql_mcp.md │ │ │ ├── mysql_mcp.md │ │ │ ├── neo4j_mcp.md │ │ │ ├── postgres_mcp.md │ │ │ ├── spanner_mcp.md │ │ │ └── sqlite_mcp.md │ │ ├── deploy_docker.md │ │ ├── deploy_gke.md │ │ ├── deploy_toolbox.md │ │ ├── export_telemetry.md │ │ └── toolbox-ui │ │ ├── edit-headers.gif │ │ ├── edit-headers.png │ │ ├── index.md │ │ ├── optional-param-checked.png │ │ ├── optional-param-unchecked.png │ │ ├── run-tool.gif │ │ ├── tools.png │ │ └── toolsets.png │ ├── reference │ │ ├── _index.md │ │ ├── cli.md │ │ └── prebuilt-tools.md │ ├── resources │ │ ├── _index.md │ │ ├── authServices │ │ │ ├── _index.md │ │ │ └── google.md │ │ ├── sources │ │ │ ├── _index.md │ │ │ ├── alloydb-admin.md │ │ │ ├── alloydb-pg.md │ │ │ ├── bigquery.md │ │ │ ├── bigtable.md │ │ │ ├── cassandra.md │ │ │ ├── clickhouse.md │ │ │ ├── cloud-monitoring.md │ │ │ ├── cloud-sql-admin.md │ │ │ ├── cloud-sql-mssql.md │ │ │ ├── cloud-sql-mysql.md │ │ │ ├── cloud-sql-pg.md │ │ │ ├── couchbase.md │ │ │ ├── dataplex.md │ │ │ ├── dgraph.md │ │ │ ├── firebird.md │ │ │ ├── firestore.md │ │ │ ├── http.md │ │ │ ├── looker.md │ │ │ ├── mongodb.md │ │ │ ├── mssql.md │ │ │ ├── mysql.md │ │ │ ├── neo4j.md │ │ │ ├── oceanbase.md │ │ │ ├── oracle.md │ │ │ ├── postgres.md │ │ │ ├── redis.md │ │ │ ├── spanner.md │ │ │ ├── sqlite.md │ │ │ ├── tidb.md │ │ │ ├── trino.md │ │ │ ├── valkey.md │ │ │ └── yugabytedb.md │ │ └── tools │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── alloydb-create-cluster.md │ │ │ ├── alloydb-create-instance.md │ │ │ ├── alloydb-create-user.md │ │ │ ├── alloydb-get-cluster.md │ │ │ ├── alloydb-get-instance.md │ │ │ ├── alloydb-get-user.md │ │ │ ├── alloydb-list-clusters.md │ │ │ ├── alloydb-list-instances.md │ │ │ ├── alloydb-list-users.md │ │ │ └── alloydb-wait-for-operation.md │ │ ├── alloydbainl │ │ │ ├── _index.md │ │ │ └── alloydb-ai-nl.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── bigquery-analyze-contribution.md │ │ │ ├── bigquery-conversational-analytics.md │ │ │ ├── bigquery-execute-sql.md │ │ │ ├── bigquery-forecast.md │ │ │ ├── bigquery-get-dataset-info.md │ │ │ ├── bigquery-get-table-info.md │ │ │ ├── bigquery-list-dataset-ids.md │ │ │ ├── bigquery-list-table-ids.md │ │ │ ├── bigquery-search-catalog.md │ │ │ └── bigquery-sql.md │ │ ├── bigtable │ │ │ ├── _index.md │ │ │ └── bigtable-sql.md │ │ ├── cassandra │ │ │ ├── _index.md │ │ │ └── cassandra-cql.md │ │ ├── clickhouse │ │ │ ├── _index.md │ │ │ ├── clickhouse-execute-sql.md │ │ │ ├── clickhouse-list-databases.md │ │ │ ├── clickhouse-list-tables.md │ │ │ └── clickhouse-sql.md │ │ ├── cloudmonitoring │ │ │ ├── _index.md │ │ │ └── cloud-monitoring-query-prometheus.md │ │ ├── cloudsql │ │ │ ├── _index.md │ │ │ ├── cloudsqlcreatedatabase.md │ │ │ ├── cloudsqlcreateusers.md │ │ │ ├── cloudsqlgetinstances.md │ │ │ ├── cloudsqllistdatabases.md │ │ │ ├── cloudsqllistinstances.md │ │ │ ├── cloudsqlmssqlcreateinstance.md │ │ │ ├── cloudsqlmysqlcreateinstance.md │ │ │ ├── cloudsqlpgcreateinstances.md │ │ │ └── cloudsqlwaitforoperation.md │ │ ├── couchbase │ │ │ ├── _index.md │ │ │ └── couchbase-sql.md │ │ ├── dataform │ │ │ ├── _index.md │ │ │ └── dataform-compile-local.md │ │ ├── dataplex │ │ │ ├── _index.md │ │ │ ├── dataplex-lookup-entry.md │ │ │ ├── dataplex-search-aspect-types.md │ │ │ └── dataplex-search-entries.md │ │ ├── dgraph │ │ │ ├── _index.md │ │ │ └── dgraph-dql.md │ │ ├── firebird │ │ │ ├── _index.md │ │ │ ├── firebird-execute-sql.md │ │ │ └── firebird-sql.md │ │ ├── firestore │ │ │ ├── _index.md │ │ │ ├── firestore-add-documents.md │ │ │ ├── firestore-delete-documents.md │ │ │ ├── firestore-get-documents.md │ │ │ ├── firestore-get-rules.md │ │ │ ├── firestore-list-collections.md │ │ │ ├── firestore-query-collection.md │ │ │ ├── firestore-query.md │ │ │ ├── firestore-update-document.md │ │ │ └── firestore-validate-rules.md │ │ ├── http │ │ │ ├── _index.md │ │ │ └── http.md │ │ ├── looker │ │ │ ├── _index.md │ │ │ ├── looker-add-dashboard-element.md │ │ │ ├── looker-conversational-analytics.md │ │ │ ├── looker-get-dashboards.md │ │ │ ├── looker-get-dimensions.md │ │ │ ├── looker-get-explores.md │ │ │ ├── looker-get-filters.md │ │ │ ├── looker-get-looks.md │ │ │ ├── looker-get-measures.md │ │ │ ├── looker-get-models.md │ │ │ ├── looker-get-parameters.md │ │ │ ├── looker-health-analyze.md │ │ │ ├── looker-health-pulse.md │ │ │ ├── looker-health-vacuum.md │ │ │ ├── looker-make-dashboard.md │ │ │ ├── looker-make-look.md │ │ │ ├── looker-query-sql.md │ │ │ ├── looker-query-url.md │ │ │ ├── looker-query.md │ │ │ └── looker-run-look.md │ │ ├── mongodb │ │ │ ├── _index.md │ │ │ ├── mongodb-aggregate.md │ │ │ ├── mongodb-delete-many.md │ │ │ ├── mongodb-delete-one.md │ │ │ ├── mongodb-find-one.md │ │ │ ├── mongodb-find.md │ │ │ ├── mongodb-insert-many.md │ │ │ ├── mongodb-insert-one.md │ │ │ ├── mongodb-update-many.md │ │ │ └── mongodb-update-one.md │ │ ├── mssql │ │ │ ├── _index.md │ │ │ ├── mssql-execute-sql.md │ │ │ ├── mssql-list-tables.md │ │ │ └── mssql-sql.md │ │ ├── mysql │ │ │ ├── _index.md │ │ │ ├── mysql-execute-sql.md │ │ │ ├── mysql-list-active-queries.md │ │ │ ├── mysql-list-table-fragmentation.md │ │ │ ├── mysql-list-tables-missing-unique-indexes.md │ │ │ ├── mysql-list-tables.md │ │ │ └── mysql-sql.md │ │ ├── neo4j │ │ │ ├── _index.md │ │ │ ├── neo4j-cypher.md │ │ │ ├── neo4j-execute-cypher.md │ │ │ └── neo4j-schema.md │ │ ├── oceanbase │ │ │ ├── _index.md │ │ │ ├── oceanbase-execute-sql.md │ │ │ └── oceanbase-sql.md │ │ ├── oracle │ │ │ ├── _index.md │ │ │ ├── oracle-execute-sql.md │ │ │ └── oracle-sql.md │ │ ├── postgres │ │ │ ├── _index.md │ │ │ ├── postgres-execute-sql.md │ │ │ ├── postgres-list-active-queries.md │ │ │ ├── postgres-list-available-extensions.md │ │ │ ├── postgres-list-installed-extensions.md │ │ │ ├── postgres-list-tables.md │ │ │ └── postgres-sql.md │ │ ├── redis │ │ │ ├── _index.md │ │ │ └── redis.md │ │ ├── spanner │ │ │ ├── _index.md │ │ │ ├── spanner-execute-sql.md │ │ │ ├── spanner-list-tables.md │ │ │ └── spanner-sql.md │ │ ├── sqlite │ │ │ ├── _index.md │ │ │ ├── sqlite-execute-sql.md │ │ │ └── sqlite-sql.md │ │ ├── tidb │ │ │ ├── _index.md │ │ │ ├── tidb-execute-sql.md │ │ │ └── tidb-sql.md │ │ ├── trino │ │ │ ├── _index.md │ │ │ ├── trino-execute-sql.md │ │ │ └── trino-sql.md │ │ ├── utility │ │ │ ├── _index.md │ │ │ └── wait.md │ │ ├── valkey │ │ │ ├── _index.md │ │ │ └── valkey.md │ │ └── yuagbytedb │ │ ├── _index.md │ │ └── yugabytedb-sql.md │ ├── samples │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── ai-nl │ │ │ │ ├── alloydb_ai_nl.ipynb │ │ │ │ └── index.md │ │ │ └── mcp_quickstart.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── colab_quickstart_bigquery.ipynb │ │ │ ├── local_quickstart.md │ │ │ └── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── looker │ │ ├── _index.md │ │ ├── looker_gemini_oauth │ │ │ ├── _index.md │ │ │ ├── authenticated.png │ │ │ ├── authorize.png │ │ │ └── registration.png │ │ ├── looker_gemini.md │ │ └── looker_mcp_inspector │ │ ├── _index.md │ │ ├── inspector_tools.png │ │ └── inspector.png │ └── sdks │ ├── _index.md │ ├── go-sdk.md │ ├── js-sdk.md │ └── python-sdk.md ├── go.mod ├── go.sum ├── internal │ ├── auth │ │ ├── auth.go │ │ └── google │ │ └── google.go │ ├── log │ │ ├── handler.go │ │ ├── log_test.go │ │ ├── log.go │ │ └── logger.go │ ├── prebuiltconfigs │ │ ├── prebuiltconfigs_test.go │ │ ├── prebuiltconfigs.go │ │ └── tools │ │ ├── alloydb-postgres-admin.yaml │ │ ├── alloydb-postgres-observability.yaml │ │ ├── alloydb-postgres.yaml │ │ ├── bigquery.yaml │ │ ├── clickhouse.yaml │ │ ├── cloud-sql-mssql-admin.yaml │ │ ├── cloud-sql-mssql-observability.yaml │ │ ├── cloud-sql-mssql.yaml │ │ ├── cloud-sql-mysql-admin.yaml │ │ ├── cloud-sql-mysql-observability.yaml │ │ ├── cloud-sql-mysql.yaml │ │ ├── cloud-sql-postgres-admin.yaml │ │ ├── cloud-sql-postgres-observability.yaml │ │ ├── cloud-sql-postgres.yaml │ │ ├── dataplex.yaml │ │ ├── firestore.yaml │ │ ├── looker-conversational-analytics.yaml │ │ ├── looker.yaml │ │ ├── mssql.yaml │ │ ├── mysql.yaml │ │ ├── neo4j.yaml │ │ ├── oceanbase.yaml │ │ ├── postgres.yaml │ │ ├── spanner-postgres.yaml │ │ ├── spanner.yaml │ │ └── sqlite.yaml │ ├── server │ │ ├── api_test.go │ │ ├── api.go │ │ ├── common_test.go │ │ ├── config.go │ │ ├── mcp │ │ │ ├── jsonrpc │ │ │ │ ├── jsonrpc_test.go │ │ │ │ └── jsonrpc.go │ │ │ ├── mcp.go │ │ │ ├── util │ │ │ │ └── lifecycle.go │ │ │ ├── v20241105 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ ├── v20250326 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ └── v20250618 │ │ │ ├── method.go │ │ │ └── types.go │ │ ├── mcp_test.go │ │ ├── mcp.go │ │ ├── server_test.go │ │ ├── server.go │ │ ├── static │ │ │ ├── assets │ │ │ │ └── mcptoolboxlogo.png │ │ │ ├── css │ │ │ │ └── style.css │ │ │ ├── index.html │ │ │ ├── js │ │ │ │ ├── auth.js │ │ │ │ ├── loadTools.js │ │ │ │ ├── mainContent.js │ │ │ │ ├── navbar.js │ │ │ │ ├── runTool.js │ │ │ │ ├── toolDisplay.js │ │ │ │ ├── tools.js │ │ │ │ └── toolsets.js │ │ │ ├── tools.html │ │ │ └── toolsets.html │ │ ├── web_test.go │ │ └── web.go │ ├── sources │ │ ├── alloydbadmin │ │ │ ├── alloydbadmin_test.go │ │ │ └── alloydbadmin.go │ │ ├── alloydbpg │ │ │ ├── alloydb_pg_test.go │ │ │ └── alloydb_pg.go │ │ ├── bigquery │ │ │ ├── bigquery_test.go │ │ │ └── bigquery.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ ├── cassandra_test.go │ │ │ └── cassandra.go │ │ ├── clickhouse │ │ │ ├── clickhouse_test.go │ │ │ └── clickhouse.go │ │ ├── cloudmonitoring │ │ │ ├── cloud_monitoring_test.go │ │ │ └── cloud_monitoring.go │ │ ├── cloudsqladmin │ │ │ ├── cloud_sql_admin_test.go │ │ │ └── cloud_sql_admin.go │ │ ├── cloudsqlmssql │ │ │ ├── cloud_sql_mssql_test.go │ │ │ └── cloud_sql_mssql.go │ │ ├── cloudsqlmysql │ │ │ ├── cloud_sql_mysql_test.go │ │ │ └── cloud_sql_mysql.go │ │ ├── cloudsqlpg │ │ │ ├── cloud_sql_pg_test.go │ │ │ └── cloud_sql_pg.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataplex │ │ │ ├── dataplex_test.go │ │ │ └── dataplex.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── dialect.go │ │ ├── firebird │ │ │ ├── firebird_test.go │ │ │ └── firebird.go │ │ ├── firestore │ │ │ ├── firestore_test.go │ │ │ └── firestore.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── ip_type.go │ │ ├── looker │ │ │ ├── looker_test.go │ │ │ └── looker.go │ │ ├── mongodb │ │ │ ├── mongodb_test.go │ │ │ └── mongodb.go │ │ ├── mssql │ │ │ ├── mssql_test.go │ │ │ └── mssql.go │ │ ├── mysql │ │ │ ├── mysql_test.go │ │ │ └── mysql.go │ │ ├── neo4j │ │ │ ├── neo4j_test.go │ │ │ └── neo4j.go │ │ ├── oceanbase │ │ │ ├── oceanbase_test.go │ │ │ └── oceanbase.go │ │ ├── oracle │ │ │ └── oracle.go │ │ ├── postgres │ │ │ ├── postgres_test.go │ │ │ └── postgres.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── sources.go │ │ ├── spanner │ │ │ ├── spanner_test.go │ │ │ └── spanner.go │ │ ├── sqlite │ │ │ ├── sqlite_test.go │ │ │ └── sqlite.go │ │ ├── tidb │ │ │ ├── tidb_test.go │ │ │ └── tidb.go │ │ ├── trino │ │ │ ├── trino_test.go │ │ │ └── trino.go │ │ ├── util.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedb │ │ ├── yugabytedb_test.go │ │ └── yugabytedb.go │ ├── telemetry │ │ ├── instrumentation.go │ │ └── telemetry.go │ ├── testutils │ │ └── testutils.go │ ├── tools │ │ ├── alloydb │ │ │ ├── alloydbcreatecluster │ │ │ │ ├── alloydbcreatecluster_test.go │ │ │ │ └── alloydbcreatecluster.go │ │ │ ├── alloydbcreateinstance │ │ │ │ ├── alloydbcreateinstance_test.go │ │ │ │ └── alloydbcreateinstance.go │ │ │ ├── alloydbcreateuser │ │ │ │ ├── alloydbcreateuser_test.go │ │ │ │ └── alloydbcreateuser.go │ │ │ ├── alloydbgetcluster │ │ │ │ ├── alloydbgetcluster_test.go │ │ │ │ └── alloydbgetcluster.go │ │ │ ├── alloydbgetinstance │ │ │ │ ├── alloydbgetinstance_test.go │ │ │ │ └── alloydbgetinstance.go │ │ │ ├── alloydbgetuser │ │ │ │ ├── alloydbgetuser_test.go │ │ │ │ └── alloydbgetuser.go │ │ │ ├── alloydblistclusters │ │ │ │ ├── alloydblistclusters_test.go │ │ │ │ └── alloydblistclusters.go │ │ │ ├── alloydblistinstances │ │ │ │ ├── alloydblistinstances_test.go │ │ │ │ └── alloydblistinstances.go │ │ │ ├── alloydblistusers │ │ │ │ ├── alloydblistusers_test.go │ │ │ │ └── alloydblistusers.go │ │ │ └── alloydbwaitforoperation │ │ │ ├── alloydbwaitforoperation_test.go │ │ │ └── alloydbwaitforoperation.go │ │ ├── alloydbainl │ │ │ ├── alloydbainl_test.go │ │ │ └── alloydbainl.go │ │ ├── bigquery │ │ │ ├── bigqueryanalyzecontribution │ │ │ │ ├── bigqueryanalyzecontribution_test.go │ │ │ │ └── bigqueryanalyzecontribution.go │ │ │ ├── bigquerycommon │ │ │ │ ├── table_name_parser_test.go │ │ │ │ ├── table_name_parser.go │ │ │ │ └── util.go │ │ │ ├── bigqueryconversationalanalytics │ │ │ │ ├── bigqueryconversationalanalytics_test.go │ │ │ │ └── bigqueryconversationalanalytics.go │ │ │ ├── bigqueryexecutesql │ │ │ │ ├── bigqueryexecutesql_test.go │ │ │ │ └── bigqueryexecutesql.go │ │ │ ├── bigqueryforecast │ │ │ │ ├── bigqueryforecast_test.go │ │ │ │ └── bigqueryforecast.go │ │ │ ├── bigquerygetdatasetinfo │ │ │ │ ├── bigquerygetdatasetinfo_test.go │ │ │ │ └── bigquerygetdatasetinfo.go │ │ │ ├── bigquerygettableinfo │ │ │ │ ├── bigquerygettableinfo_test.go │ │ │ │ └── bigquerygettableinfo.go │ │ │ ├── bigquerylistdatasetids │ │ │ │ ├── bigquerylistdatasetids_test.go │ │ │ │ └── bigquerylistdatasetids.go │ │ │ ├── bigquerylisttableids │ │ │ │ ├── bigquerylisttableids_test.go │ │ │ │ └── bigquerylisttableids.go │ │ │ ├── bigquerysearchcatalog │ │ │ │ ├── bigquerysearchcatalog_test.go │ │ │ │ └── bigquerysearchcatalog.go │ │ │ └── bigquerysql │ │ │ ├── bigquerysql_test.go │ │ │ └── bigquerysql.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ └── cassandracql │ │ │ ├── cassandracql_test.go │ │ │ └── cassandracql.go │ │ ├── clickhouse │ │ │ ├── clickhouseexecutesql │ │ │ │ ├── clickhouseexecutesql_test.go │ │ │ │ └── clickhouseexecutesql.go │ │ │ ├── clickhouselistdatabases │ │ │ │ ├── clickhouselistdatabases_test.go │ │ │ │ └── clickhouselistdatabases.go │ │ │ ├── clickhouselisttables │ │ │ │ ├── clickhouselisttables_test.go │ │ │ │ └── clickhouselisttables.go │ │ │ └── clickhousesql │ │ │ ├── clickhousesql_test.go │ │ │ └── clickhousesql.go │ │ ├── cloudmonitoring │ │ │ ├── cloudmonitoring_test.go │ │ │ └── cloudmonitoring.go │ │ ├── cloudsql │ │ │ ├── cloudsqlcreatedatabase │ │ │ │ ├── cloudsqlcreatedatabase_test.go │ │ │ │ └── cloudsqlcreatedatabase.go │ │ │ ├── cloudsqlcreateusers │ │ │ │ ├── cloudsqlcreateusers_test.go │ │ │ │ └── cloudsqlcreateusers.go │ │ │ ├── cloudsqlgetinstances │ │ │ │ ├── cloudsqlgetinstances_test.go │ │ │ │ └── cloudsqlgetinstances.go │ │ │ ├── cloudsqllistdatabases │ │ │ │ ├── cloudsqllistdatabases_test.go │ │ │ │ └── cloudsqllistdatabases.go │ │ │ ├── cloudsqllistinstances │ │ │ │ ├── cloudsqllistinstances_test.go │ │ │ │ └── cloudsqllistinstances.go │ │ │ └── cloudsqlwaitforoperation │ │ │ ├── cloudsqlwaitforoperation_test.go │ │ │ └── cloudsqlwaitforoperation.go │ │ ├── cloudsqlmssql │ │ │ └── cloudsqlmssqlcreateinstance │ │ │ ├── cloudsqlmssqlcreateinstance_test.go │ │ │ └── cloudsqlmssqlcreateinstance.go │ │ ├── cloudsqlmysql │ │ │ └── cloudsqlmysqlcreateinstance │ │ │ ├── cloudsqlmysqlcreateinstance_test.go │ │ │ └── cloudsqlmysqlcreateinstance.go │ │ ├── cloudsqlpg │ │ │ └── cloudsqlpgcreateinstances │ │ │ ├── cloudsqlpgcreateinstances_test.go │ │ │ └── cloudsqlpgcreateinstances.go │ │ ├── common_test.go │ │ ├── common.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataform │ │ │ └── dataformcompilelocal │ │ │ ├── dataformcompilelocal_test.go │ │ │ └── dataformcompilelocal.go │ │ ├── dataplex │ │ │ ├── dataplexlookupentry │ │ │ │ ├── dataplexlookupentry_test.go │ │ │ │ └── dataplexlookupentry.go │ │ │ ├── dataplexsearchaspecttypes │ │ │ │ ├── dataplexsearchaspecttypes_test.go │ │ │ │ └── dataplexsearchaspecttypes.go │ │ │ └── dataplexsearchentries │ │ │ ├── dataplexsearchentries_test.go │ │ │ └── dataplexsearchentries.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── firebird │ │ │ ├── firebirdexecutesql │ │ │ │ ├── firebirdexecutesql_test.go │ │ │ │ └── firebirdexecutesql.go │ │ │ └── firebirdsql │ │ │ ├── firebirdsql_test.go │ │ │ └── firebirdsql.go │ │ ├── firestore │ │ │ ├── firestoreadddocuments │ │ │ │ ├── firestoreadddocuments_test.go │ │ │ │ └── firestoreadddocuments.go │ │ │ ├── firestoredeletedocuments │ │ │ │ ├── firestoredeletedocuments_test.go │ │ │ │ └── firestoredeletedocuments.go │ │ │ ├── firestoregetdocuments │ │ │ │ ├── firestoregetdocuments_test.go │ │ │ │ └── firestoregetdocuments.go │ │ │ ├── firestoregetrules │ │ │ │ ├── firestoregetrules_test.go │ │ │ │ └── firestoregetrules.go │ │ │ ├── firestorelistcollections │ │ │ │ ├── firestorelistcollections_test.go │ │ │ │ └── firestorelistcollections.go │ │ │ ├── firestorequery │ │ │ │ ├── firestorequery_test.go │ │ │ │ └── firestorequery.go │ │ │ ├── firestorequerycollection │ │ │ │ ├── firestorequerycollection_test.go │ │ │ │ └── firestorequerycollection.go │ │ │ ├── firestoreupdatedocument │ │ │ │ ├── firestoreupdatedocument_test.go │ │ │ │ └── firestoreupdatedocument.go │ │ │ ├── firestorevalidaterules │ │ │ │ ├── firestorevalidaterules_test.go │ │ │ │ └── firestorevalidaterules.go │ │ │ └── util │ │ │ ├── converter_test.go │ │ │ ├── converter.go │ │ │ ├── validator_test.go │ │ │ └── validator.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── http_method.go │ │ ├── looker │ │ │ ├── lookeradddashboardelement │ │ │ │ ├── lookeradddashboardelement_test.go │ │ │ │ └── lookeradddashboardelement.go │ │ │ ├── lookercommon │ │ │ │ ├── lookercommon_test.go │ │ │ │ └── lookercommon.go │ │ │ ├── lookerconversationalanalytics │ │ │ │ ├── lookerconversationalanalytics_test.go │ │ │ │ └── lookerconversationalanalytics.go │ │ │ ├── lookergetdashboards │ │ │ │ ├── lookergetdashboards_test.go │ │ │ │ └── lookergetdashboards.go │ │ │ ├── lookergetdimensions │ │ │ │ ├── lookergetdimensions_test.go │ │ │ │ └── lookergetdimensions.go │ │ │ ├── lookergetexplores │ │ │ │ ├── lookergetexplores_test.go │ │ │ │ └── lookergetexplores.go │ │ │ ├── lookergetfilters │ │ │ │ ├── lookergetfilters_test.go │ │ │ │ └── lookergetfilters.go │ │ │ ├── lookergetlooks │ │ │ │ ├── lookergetlooks_test.go │ │ │ │ └── lookergetlooks.go │ │ │ ├── lookergetmeasures │ │ │ │ ├── lookergetmeasures_test.go │ │ │ │ └── lookergetmeasures.go │ │ │ ├── lookergetmodels │ │ │ │ ├── lookergetmodels_test.go │ │ │ │ └── lookergetmodels.go │ │ │ ├── lookergetparameters │ │ │ │ ├── lookergetparameters_test.go │ │ │ │ └── lookergetparameters.go │ │ │ ├── lookerhealthanalyze │ │ │ │ ├── lookerhealthanalyze_test.go │ │ │ │ └── lookerhealthanalyze.go │ │ │ ├── lookerhealthpulse │ │ │ │ ├── lookerhealthpulse_test.go │ │ │ │ └── lookerhealthpulse.go │ │ │ ├── lookerhealthvacuum │ │ │ │ ├── lookerhealthvacuum_test.go │ │ │ │ └── lookerhealthvacuum.go │ │ │ ├── lookermakedashboard │ │ │ │ ├── lookermakedashboard_test.go │ │ │ │ └── lookermakedashboard.go │ │ │ ├── lookermakelook │ │ │ │ ├── lookermakelook_test.go │ │ │ │ └── lookermakelook.go │ │ │ ├── lookerquery │ │ │ │ ├── lookerquery_test.go │ │ │ │ └── lookerquery.go │ │ │ ├── lookerquerysql │ │ │ │ ├── lookerquerysql_test.go │ │ │ │ └── lookerquerysql.go │ │ │ ├── lookerqueryurl │ │ │ │ ├── lookerqueryurl_test.go │ │ │ │ └── lookerqueryurl.go │ │ │ └── lookerrunlook │ │ │ ├── lookerrunlook_test.go │ │ │ └── lookerrunlook.go │ │ ├── mongodb │ │ │ ├── mongodbaggregate │ │ │ │ ├── mongodbaggregate_test.go │ │ │ │ └── mongodbaggregate.go │ │ │ ├── mongodbdeletemany │ │ │ │ ├── mongodbdeletemany_test.go │ │ │ │ └── mongodbdeletemany.go │ │ │ ├── mongodbdeleteone │ │ │ │ ├── mongodbdeleteone_test.go │ │ │ │ └── mongodbdeleteone.go │ │ │ ├── mongodbfind │ │ │ │ ├── mongodbfind_test.go │ │ │ │ └── mongodbfind.go │ │ │ ├── mongodbfindone │ │ │ │ ├── mongodbfindone_test.go │ │ │ │ └── mongodbfindone.go │ │ │ ├── mongodbinsertmany │ │ │ │ ├── mongodbinsertmany_test.go │ │ │ │ └── mongodbinsertmany.go │ │ │ ├── mongodbinsertone │ │ │ │ ├── mongodbinsertone_test.go │ │ │ │ └── mongodbinsertone.go │ │ │ ├── mongodbupdatemany │ │ │ │ ├── mongodbupdatemany_test.go │ │ │ │ └── mongodbupdatemany.go │ │ │ └── mongodbupdateone │ │ │ ├── mongodbupdateone_test.go │ │ │ └── mongodbupdateone.go │ │ ├── mssql │ │ │ ├── mssqlexecutesql │ │ │ │ ├── mssqlexecutesql_test.go │ │ │ │ └── mssqlexecutesql.go │ │ │ ├── mssqllisttables │ │ │ │ ├── mssqllisttables_test.go │ │ │ │ └── mssqllisttables.go │ │ │ └── mssqlsql │ │ │ ├── mssqlsql_test.go │ │ │ └── mssqlsql.go │ │ ├── mysql │ │ │ ├── mysqlcommon │ │ │ │ └── mysqlcommon.go │ │ │ ├── mysqlexecutesql │ │ │ │ ├── mysqlexecutesql_test.go │ │ │ │ └── mysqlexecutesql.go │ │ │ ├── mysqllistactivequeries │ │ │ │ ├── mysqllistactivequeries_test.go │ │ │ │ └── mysqllistactivequeries.go │ │ │ ├── mysqllisttablefragmentation │ │ │ │ ├── mysqllisttablefragmentation_test.go │ │ │ │ └── mysqllisttablefragmentation.go │ │ │ ├── mysqllisttables │ │ │ │ ├── mysqllisttables_test.go │ │ │ │ └── mysqllisttables.go │ │ │ ├── mysqllisttablesmissinguniqueindexes │ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go │ │ │ │ └── mysqllisttablesmissinguniqueindexes.go │ │ │ └── mysqlsql │ │ │ ├── mysqlsql_test.go │ │ │ └── mysqlsql.go │ │ ├── neo4j │ │ │ ├── neo4jcypher │ │ │ │ ├── neo4jcypher_test.go │ │ │ │ └── neo4jcypher.go │ │ │ ├── neo4jexecutecypher │ │ │ │ ├── classifier │ │ │ │ │ ├── classifier_test.go │ │ │ │ │ └── classifier.go │ │ │ │ ├── neo4jexecutecypher_test.go │ │ │ │ └── neo4jexecutecypher.go │ │ │ └── neo4jschema │ │ │ ├── cache │ │ │ │ ├── cache_test.go │ │ │ │ └── cache.go │ │ │ ├── helpers │ │ │ │ ├── helpers_test.go │ │ │ │ └── helpers.go │ │ │ ├── neo4jschema_test.go │ │ │ ├── neo4jschema.go │ │ │ └── types │ │ │ └── types.go │ │ ├── oceanbase │ │ │ ├── oceanbaseexecutesql │ │ │ │ ├── oceanbaseexecutesql_test.go │ │ │ │ └── oceanbaseexecutesql.go │ │ │ └── oceanbasesql │ │ │ ├── oceanbasesql_test.go │ │ │ └── oceanbasesql.go │ │ ├── oracle │ │ │ ├── oracleexecutesql │ │ │ │ └── oracleexecutesql.go │ │ │ └── oraclesql │ │ │ └── oraclesql.go │ │ ├── parameters_test.go │ │ ├── parameters.go │ │ ├── postgres │ │ │ ├── postgresexecutesql │ │ │ │ ├── postgresexecutesql_test.go │ │ │ │ └── postgresexecutesql.go │ │ │ ├── postgreslistactivequeries │ │ │ │ ├── postgreslistactivequeries_test.go │ │ │ │ └── postgreslistactivequeries.go │ │ │ ├── postgreslistavailableextensions │ │ │ │ ├── postgreslistavailableextensions_test.go │ │ │ │ └── postgreslistavailableextensions.go │ │ │ ├── postgreslistinstalledextensions │ │ │ │ ├── postgreslistinstalledextensions_test.go │ │ │ │ └── postgreslistinstalledextensions.go │ │ │ ├── postgreslisttables │ │ │ │ ├── postgreslisttables_test.go │ │ │ │ └── postgreslisttables.go │ │ │ └── postgressql │ │ │ ├── postgressql_test.go │ │ │ └── postgressql.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── spanner │ │ │ ├── spannerexecutesql │ │ │ │ ├── spannerexecutesql_test.go │ │ │ │ └── spannerexecutesql.go │ │ │ ├── spannerlisttables │ │ │ │ ├── spannerlisttables_test.go │ │ │ │ └── spannerlisttables.go │ │ │ └── spannersql │ │ │ ├── spanner_test.go │ │ │ └── spannersql.go │ │ ├── sqlite │ │ │ ├── sqliteexecutesql │ │ │ │ ├── sqliteexecutesql_test.go │ │ │ │ └── sqliteexecutesql.go │ │ │ └── sqlitesql │ │ │ ├── sqlitesql_test.go │ │ │ └── sqlitesql.go │ │ ├── tidb │ │ │ ├── tidbexecutesql │ │ │ │ ├── tidbexecutesql_test.go │ │ │ │ └── tidbexecutesql.go │ │ │ └── tidbsql │ │ │ ├── tidbsql_test.go │ │ │ └── tidbsql.go │ │ ├── tools_test.go │ │ ├── tools.go │ │ ├── toolsets.go │ │ ├── trino │ │ │ ├── trinoexecutesql │ │ │ │ ├── trinoexecutesql_test.go │ │ │ │ └── trinoexecutesql.go │ │ │ └── trinosql │ │ │ ├── trinosql_test.go │ │ │ └── trinosql.go │ │ ├── utility │ │ │ └── wait │ │ │ ├── wait_test.go │ │ │ └── wait.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedbsql │ │ ├── yugabytedbsql_test.go │ │ └── yugabytedbsql.go │ └── util │ └── util.go ├── LICENSE ├── logo.png ├── main.go ├── README.md └── tests ├── alloydb │ ├── alloydb_integration_test.go │ └── alloydb_wait_for_operation_test.go ├── alloydbainl │ └── alloydb_ai_nl_integration_test.go ├── alloydbpg │ └── alloydb_pg_integration_test.go ├── auth.go ├── bigquery │ └── bigquery_integration_test.go ├── bigtable │ └── bigtable_integration_test.go ├── cassandra │ └── cassandra_integration_test.go ├── clickhouse │ └── clickhouse_integration_test.go ├── cloudmonitoring │ └── cloud_monitoring_integration_test.go ├── cloudsql │ ├── cloud_sql_create_database_test.go │ ├── cloud_sql_create_users_test.go │ ├── cloud_sql_get_instances_test.go │ ├── cloud_sql_list_databases_test.go │ ├── cloudsql_list_instances_test.go │ └── cloudsql_wait_for_operation_test.go ├── cloudsqlmssql │ ├── cloud_sql_mssql_create_instance_integration_test.go │ └── cloud_sql_mssql_integration_test.go ├── cloudsqlmysql │ ├── cloud_sql_mysql_create_instance_integration_test.go │ └── cloud_sql_mysql_integration_test.go ├── cloudsqlpg │ ├── cloud_sql_pg_create_instances_test.go │ └── cloud_sql_pg_integration_test.go ├── common.go ├── couchbase │ └── couchbase_integration_test.go ├── dataform │ └── dataform_integration_test.go ├── dataplex │ └── dataplex_integration_test.go ├── dgraph │ └── dgraph_integration_test.go ├── firebird │ └── firebird_integration_test.go ├── firestore │ └── firestore_integration_test.go ├── http │ └── http_integration_test.go ├── looker │ └── looker_integration_test.go ├── mongodb │ └── mongodb_integration_test.go ├── mssql │ └── mssql_integration_test.go ├── mysql │ └── mysql_integration_test.go ├── neo4j │ └── neo4j_integration_test.go ├── oceanbase │ └── oceanbase_integration_test.go ├── option.go ├── oracle │ └── oracle_integration_test.go ├── postgres │ └── postgres_integration_test.go ├── redis │ └── redis_test.go ├── server.go ├── source.go ├── spanner │ └── spanner_integration_test.go ├── sqlite │ └── sqlite_integration_test.go ├── tidb │ └── tidb_integration_test.go ├── tool.go ├── trino │ └── trino_integration_test.go ├── utility │ └── wait_integration_test.go ├── valkey │ └── valkey_test.go └── yugabytedb └── yugabytedb_integration_test.go ``` # Files -------------------------------------------------------------------------------- /tests/bigquery/bigquery_integration_test.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package bigquery import ( "bytes" "context" "encoding/json" "fmt" "io" "net/http" "os" "reflect" "regexp" "sort" "strings" "testing" "time" bigqueryapi "cloud.google.com/go/bigquery" "github.com/google/uuid" "github.com/googleapis/genai-toolbox/internal/sources" "github.com/googleapis/genai-toolbox/internal/testutils" "github.com/googleapis/genai-toolbox/tests" "golang.org/x/oauth2/google" "google.golang.org/api/googleapi" "google.golang.org/api/iterator" "google.golang.org/api/option" ) var ( BigquerySourceKind = "bigquery" BigqueryToolKind = "bigquery-sql" BigqueryProject = os.Getenv("BIGQUERY_PROJECT") ) func getBigQueryVars(t *testing.T) map[string]any { switch "" { case BigqueryProject: t.Fatal("'BIGQUERY_PROJECT' not set") } return map[string]any{ "kind": BigquerySourceKind, "project": BigqueryProject, } } // Copied over from bigquery.go func initBigQueryConnection(project string) (*bigqueryapi.Client, error) { ctx := context.Background() cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope) if err != nil { return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err) } client, err := bigqueryapi.NewClient(ctx, project, option.WithCredentials(cred)) if err != nil { return nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err) } return client, nil } func TestBigQueryToolEndpoints(t *testing.T) { sourceConfig := getBigQueryVars(t) ctx, cancel := context.WithTimeout(context.Background(), 7*time.Minute) defer cancel() var args []string client, err := initBigQueryConnection(BigqueryProject) if err != nil { t.Fatalf("unable to create Cloud SQL connection pool: %s", err) } // create table name with UUID datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) tableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, datasetName, tableName, ) tableNameAuth := fmt.Sprintf("`%s.%s.auth_table_%s`", BigqueryProject, datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""), ) tableNameTemplateParam := fmt.Sprintf("`%s.%s.template_param_table_%s`", BigqueryProject, datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""), ) tableNameDataType := fmt.Sprintf("`%s.%s.datatype_table_%s`", BigqueryProject, datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""), ) tableNameForecast := fmt.Sprintf("`%s.%s.forecast_table_%s`", BigqueryProject, datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""), ) tableNameAnalyzeContribution := fmt.Sprintf("`%s.%s.analyze_contribution_table_%s`", BigqueryProject, datasetName, strings.ReplaceAll(uuid.New().String(), "-", ""), ) // set up data for param tool createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam) teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams) defer teardownTable1(t) // set up data for auth tool createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth) teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams) defer teardownTable2(t) // set up data for data type test tool createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType) teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams) defer teardownTable3(t) // set up data for forecast tool createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast) teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams) defer teardownTable4(t) // set up data for analyze contribution tool createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution) teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams) defer teardownTable5(t) // Write config into a file and pass it to command toolsFile := tests.GetToolsConfig(sourceConfig, BigqueryToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt) toolsFile = addClientAuthSourceConfig(t, toolsFile) toolsFile = addBigQuerySqlToolConfig(t, toolsFile, dataTypeToolStmt, arrayDataTypeToolStmt) toolsFile = addBigQueryPrebuiltToolsConfig(t, toolsFile) tmplSelectCombined, tmplSelectFilterCombined := getBigQueryTmplToolStatement() toolsFile = tests.AddTemplateParamConfig(t, toolsFile, BigqueryToolKind, tmplSelectCombined, tmplSelectFilterCombined, "") cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } // Get configs for tests select1Want := "[{\"f0_\":1}]" invokeParamWant := "[{\"id\":1,\"name\":\"Alice\"},{\"id\":3,\"name\":\"Sid\"}]" datasetInfoWant := "\"Location\":\"US\",\"DefaultTableExpiration\":0,\"Labels\":null,\"Access\":" tableInfoWant := "{\"Name\":\"\",\"Location\":\"US\",\"Description\":\"\",\"Schema\":[{\"Name\":\"id\"" ddlWant := `"Query executed successfully and returned no content."` dataInsightsWant := `(?s)Schema Resolved.*Retrieval Query.*SQL Generated.*Answer` // Partial message; the full error message is too long. mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"query validation failed: failed to insert dry run job: googleapi: Error 400: Syntax error: Unexpected identifier \"SELEC\" at [1:1]` mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"f0_\":1}"}]}}` createColArray := `["id INT64", "name STRING", "age INT64"]` selectEmptyWant := `"The query returned 0 rows."` // Run tests tests.RunToolGetTest(t) tests.RunToolInvokeTest(t, select1Want, tests.DisableOptionalNullParamTest(), tests.EnableClientAuthTest()) tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want, tests.EnableMcpClientAuthTest()) tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithCreateColArray(createColArray), tests.WithDdlWant(ddlWant), tests.WithSelectEmptyWant(selectEmptyWant), tests.WithInsert1Want(ddlWant), ) runBigQueryExecuteSqlToolInvokeTest(t, select1Want, invokeParamWant, tableNameParam, ddlWant) runBigQueryExecuteSqlToolInvokeDryRunTest(t, datasetName) runBigQueryForecastToolInvokeTest(t, tableNameForecast) runBigQueryAnalyzeContributionToolInvokeTest(t, tableNameAnalyzeContribution) runBigQueryDataTypeTests(t) runBigQueryListDatasetToolInvokeTest(t, datasetName) runBigQueryGetDatasetInfoToolInvokeTest(t, datasetName, datasetInfoWant) runBigQueryListTableIdsToolInvokeTest(t, datasetName, tableName) runBigQueryGetTableInfoToolInvokeTest(t, datasetName, tableName, tableInfoWant) runBigQueryConversationalAnalyticsInvokeTest(t, datasetName, tableName, dataInsightsWant) runBigQuerySearchCatalogToolInvokeTest(t, datasetName, tableName) } func TestBigQueryToolWithDatasetRestriction(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) defer cancel() client, err := initBigQueryConnection(BigqueryProject) if err != nil { t.Fatalf("unable to create BigQuery client: %s", err) } // Create two datasets, one allowed, one not. baseName := strings.ReplaceAll(uuid.New().String(), "-", "") allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", baseName) allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", baseName) disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", baseName) allowedTableName1 := "allowed_table_1" allowedTableName2 := "allowed_table_2" disallowedTableName := "disallowed_table" allowedForecastTableName1 := "allowed_forecast_table_1" allowedForecastTableName2 := "allowed_forecast_table_2" disallowedForecastTableName := "disallowed_forecast_table" // Setup allowed table allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1) createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1) teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil) defer teardownAllowed1(t) allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2) createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2) teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil) defer teardownAllowed2(t) // Setup allowed forecast table allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1) createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1) teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1) defer teardownAllowedForecast1(t) allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2) createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2) teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2) defer teardownAllowedForecast2(t) // Setup disallowed table disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName) createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam) teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil) defer teardownDisallowed(t) // Setup disallowed forecast table disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName) createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName) teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams) defer teardownDisallowedForecast(t) // Configure source with dataset restriction. sourceConfig := getBigQueryVars(t) sourceConfig["allowedDatasets"] = []string{allowedDatasetName1, allowedDatasetName2} // Configure tool toolsConfig := map[string]any{ "list-dataset-ids-restricted": map[string]any{ "kind": "bigquery-list-dataset-ids", "source": "my-instance", "description": "Tool to list dataset ids", }, "list-table-ids-restricted": map[string]any{ "kind": "bigquery-list-table-ids", "source": "my-instance", "description": "Tool to list table within a dataset", }, "get-table-info-restricted": map[string]any{ "kind": "bigquery-get-table-info", "source": "my-instance", "description": "Tool to get table info", }, "execute-sql-restricted": map[string]any{ "kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute SQL", }, "conversational-analytics-restricted": map[string]any{ "kind": "bigquery-conversational-analytics", "source": "my-instance", "description": "Tool to ask BigQuery conversational analytics", }, "forecast-restricted": map[string]any{ "kind": "bigquery-forecast", "source": "my-instance", "description": "Tool to forecast", }, } // Create config file config := map[string]any{ "sources": map[string]any{ "my-instance": sourceConfig, }, "tools": toolsConfig, } // Start server cmd, cleanup, err := tests.StartCmd(ctx, config) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } // Run tests runListDatasetIdsWithRestriction(t, allowedDatasetName1, allowedDatasetName2) runListTableIdsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, allowedForecastTableName1) runListTableIdsWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, allowedForecastTableName2) runGetTableInfoWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName) runGetTableInfoWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, disallowedTableName) runExecuteSqlWithRestriction(t, allowedTableNameParam1, disallowedTableNameParam) runExecuteSqlWithRestriction(t, allowedTableNameParam2, disallowedTableNameParam) runConversationalAnalyticsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName) runConversationalAnalyticsWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, disallowedTableName) runForecastWithRestriction(t, allowedForecastTableFullName1, disallowedForecastTableFullName) runForecastWithRestriction(t, allowedForecastTableFullName2, disallowedForecastTableFullName) } func TestBigQueryWriteModeAllowed(t *testing.T) { sourceConfig := getBigQueryVars(t) sourceConfig["writeMode"] = "allowed" ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() datasetName := fmt.Sprintf("temp_toolbox_test_allowed_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) client, err := initBigQueryConnection(BigqueryProject) if err != nil { t.Fatalf("unable to create BigQuery connection: %s", err) } dataset := client.Dataset(datasetName) if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: datasetName}); err != nil { t.Fatalf("Failed to create dataset %q: %v", datasetName, err) } defer func() { if err := dataset.DeleteWithContents(ctx); err != nil { t.Logf("failed to cleanup dataset %s: %v", datasetName, err) } }() toolsFile := map[string]any{ "sources": map[string]any{ "my-instance": sourceConfig, }, "tools": map[string]any{ "my-exec-sql-tool": map[string]any{ "kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql", }, }, } cmd, cleanup, err := tests.StartCmd(ctx, toolsFile) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } runBigQueryWriteModeAllowedTest(t, datasetName) } func TestBigQueryWriteModeBlocked(t *testing.T) { sourceConfig := getBigQueryVars(t) sourceConfig["writeMode"] = "blocked" ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() datasetName := fmt.Sprintf("temp_toolbox_test_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) tableName := fmt.Sprintf("param_table_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) tableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, datasetName, tableName) client, err := initBigQueryConnection(BigqueryProject) if err != nil { t.Fatalf("unable to create BigQuery connection: %s", err) } createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam) teardownTable := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams) defer teardownTable(t) toolsFile := map[string]any{ "sources": map[string]any{"my-instance": sourceConfig}, "tools": map[string]any{ "my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"}, }, } cmd, cleanup, err := tests.StartCmd(ctx, toolsFile) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } runBigQueryWriteModeBlockedTest(t, tableNameParam, datasetName) } func TestBigQueryWriteModeProtected(t *testing.T) { sourceConfig := getBigQueryVars(t) sourceConfig["writeMode"] = "protected" ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() permanentDatasetName := fmt.Sprintf("perm_dataset_protected_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) client, err := initBigQueryConnection(BigqueryProject) if err != nil { t.Fatalf("unable to create BigQuery connection: %s", err) } dataset := client.Dataset(permanentDatasetName) if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: permanentDatasetName}); err != nil { t.Fatalf("Failed to create dataset %q: %v", permanentDatasetName, err) } defer func() { if err := dataset.DeleteWithContents(ctx); err != nil { t.Logf("failed to cleanup dataset %s: %v", permanentDatasetName, err) } }() toolsFile := map[string]any{ "sources": map[string]any{"my-instance": sourceConfig}, "tools": map[string]any{ "my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"}, "my-sql-tool-protected": map[string]any{ "kind": "bigquery-sql", "source": "my-instance", "description": "Tool to query from the session", "statement": "SELECT * FROM my_shared_temp_table", }, "my-forecast-tool-protected": map[string]any{ "kind": "bigquery-forecast", "source": "my-instance", "description": "Tool to forecast from session temp table", }, "my-analyze-contribution-tool-protected": map[string]any{ "kind": "bigquery-analyze-contribution", "source": "my-instance", "description": "Tool to analyze contribution from session temp table", }, }, } cmd, cleanup, err := tests.StartCmd(ctx, toolsFile) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } runBigQueryWriteModeProtectedTest(t, permanentDatasetName) } // getBigQueryParamToolInfo returns statements and param for my-tool for bigquery kind func getBigQueryParamToolInfo(tableName string) (string, string, string, string, string, string, []bigqueryapi.QueryParameter) { createStatement := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS %s (id INT64, name STRING);`, tableName) insertStatement := fmt.Sprintf(` INSERT INTO %s (id, name) VALUES (?, ?), (?, ?), (?, ?), (?, NULL);`, tableName) toolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE id = ? OR name = ? ORDER BY id;`, tableName) idToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE id = ? ORDER BY id;`, tableName) nameToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE name = ? ORDER BY id;`, tableName) arrayToolStatememt := fmt.Sprintf(`SELECT * FROM %s WHERE id IN UNNEST(@idArray) AND name IN UNNEST(@nameArray) ORDER BY id;`, tableName) params := []bigqueryapi.QueryParameter{ {Value: int64(1)}, {Value: "Alice"}, {Value: int64(2)}, {Value: "Jane"}, {Value: int64(3)}, {Value: "Sid"}, {Value: int64(4)}, } return createStatement, insertStatement, toolStatement, idToolStatement, nameToolStatement, arrayToolStatememt, params } // getBigQueryAuthToolInfo returns statements and param of my-auth-tool for bigquery kind func getBigQueryAuthToolInfo(tableName string) (string, string, string, []bigqueryapi.QueryParameter) { createStatement := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS %s (id INT64, name STRING, email STRING)`, tableName) insertStatement := fmt.Sprintf(` INSERT INTO %s (id, name, email) VALUES (?, ?, ?), (?, ?, ?)`, tableName) toolStatement := fmt.Sprintf(` SELECT name FROM %s WHERE email = ?`, tableName) params := []bigqueryapi.QueryParameter{ {Value: int64(1)}, {Value: "Alice"}, {Value: tests.ServiceAccountEmail}, {Value: int64(2)}, {Value: "Jane"}, {Value: "[email protected]"}, } return createStatement, insertStatement, toolStatement, params } // getBigQueryDataTypeTestInfo returns statements and params for data type tests. func getBigQueryDataTypeTestInfo(tableName string) (string, string, string, string, []bigqueryapi.QueryParameter) { createStatement := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS %s (id INT64, int_val INT64, string_val STRING, float_val FLOAT64, bool_val BOOL);`, tableName) insertStatement := fmt.Sprintf(` INSERT INTO %s (id, int_val, string_val, float_val, bool_val) VALUES (?, ?, ?, ?, ?), (?, ?, ?, ?, ?), (?, ?, ?, ?, ?);`, tableName) toolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE int_val = ? AND string_val = ? AND float_val = ? AND bool_val = ?;`, tableName) arrayToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE int_val IN UNNEST(@int_array) AND string_val IN UNNEST(@string_array) AND float_val IN UNNEST(@float_array) AND bool_val IN UNNEST(@bool_array) ORDER BY id;`, tableName) params := []bigqueryapi.QueryParameter{ {Value: int64(1)}, {Value: int64(123)}, {Value: "hello"}, {Value: 3.14}, {Value: true}, {Value: int64(2)}, {Value: int64(-456)}, {Value: "world"}, {Value: -0.55}, {Value: false}, {Value: int64(3)}, {Value: int64(789)}, {Value: "test"}, {Value: 100.1}, {Value: true}, } return createStatement, insertStatement, toolStatement, arrayToolStatement, params } // getBigQueryForecastToolInfo returns statements and params for the forecast tool. func getBigQueryForecastToolInfo(tableName string) (string, string, []bigqueryapi.QueryParameter) { createStatement := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS %s (ts TIMESTAMP, data FLOAT64, id STRING);`, tableName) insertStatement := fmt.Sprintf(` INSERT INTO %s (ts, data, id) VALUES (?, ?, ?), (?, ?, ?), (?, ?, ?), (?, ?, ?), (?, ?, ?), (?, ?, ?);`, tableName) params := []bigqueryapi.QueryParameter{ {Value: "2025-01-01T00:00:00Z"}, {Value: 10.0}, {Value: "a"}, {Value: "2025-01-01T01:00:00Z"}, {Value: 11.0}, {Value: "a"}, {Value: "2025-01-01T02:00:00Z"}, {Value: 12.0}, {Value: "a"}, {Value: "2025-01-01T00:00:00Z"}, {Value: 20.0}, {Value: "b"}, {Value: "2025-01-01T01:00:00Z"}, {Value: 21.0}, {Value: "b"}, {Value: "2025-01-01T02:00:00Z"}, {Value: 22.0}, {Value: "b"}, } return createStatement, insertStatement, params } // getBigQueryAnalyzeContributionToolInfo returns statements and params for the analyze-contribution tool. func getBigQueryAnalyzeContributionToolInfo(tableName string) (string, string, []bigqueryapi.QueryParameter) { createStatement := fmt.Sprintf(` CREATE TABLE IF NOT EXISTS %s (dim1 STRING, dim2 STRING, is_test BOOL, metric FLOAT64);`, tableName) insertStatement := fmt.Sprintf(` INSERT INTO %s (dim1, dim2, is_test, metric) VALUES (?, ?, ?, ?), (?, ?, ?, ?), (?, ?, ?, ?), (?, ?, ?, ?);`, tableName) params := []bigqueryapi.QueryParameter{ {Value: "a"}, {Value: "x"}, {Value: true}, {Value: 100.0}, {Value: "a"}, {Value: "x"}, {Value: false}, {Value: 110.0}, {Value: "a"}, {Value: "y"}, {Value: true}, {Value: 120.0}, {Value: "a"}, {Value: "y"}, {Value: false}, {Value: 100.0}, {Value: "b"}, {Value: "x"}, {Value: true}, {Value: 40.0}, {Value: "b"}, {Value: "x"}, {Value: false}, {Value: 100.0}, {Value: "b"}, {Value: "y"}, {Value: true}, {Value: 60.0}, {Value: "b"}, {Value: "y"}, {Value: false}, {Value: 60.0}, } return createStatement, insertStatement, params } // getBigQueryTmplToolStatement returns statements for template parameter test cases for bigquery kind func getBigQueryTmplToolStatement() (string, string) { tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = ? ORDER BY id" tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = ? ORDER BY id" return tmplSelectCombined, tmplSelectFilterCombined } func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) func(*testing.T) { // Create dataset dataset := client.Dataset(datasetName) _, err := dataset.Metadata(ctx) if err != nil { apiErr, ok := err.(*googleapi.Error) if !ok || apiErr.Code != 404 { t.Fatalf("Failed to check dataset %q existence: %v", datasetName, err) } metadataToCreate := &bigqueryapi.DatasetMetadata{Name: datasetName} if err := dataset.Create(ctx, metadataToCreate); err != nil { t.Fatalf("Failed to create dataset %q: %v", datasetName, err) } } // Create table createJob, err := client.Query(createStatement).Run(ctx) if err != nil { t.Fatalf("Failed to start create table job for %s: %v", tableName, err) } createStatus, err := createJob.Wait(ctx) if err != nil { t.Fatalf("Failed to wait for create table job for %s: %v", tableName, err) } if err := createStatus.Err(); err != nil { t.Fatalf("Create table job for %s failed: %v", tableName, err) } if len(params) > 0 { // Insert test data insertQuery := client.Query(insertStatement) insertQuery.Parameters = params insertJob, err := insertQuery.Run(ctx) if err != nil { t.Fatalf("Failed to start insert job for %s: %v", tableName, err) } insertStatus, err := insertJob.Wait(ctx) if err != nil { t.Fatalf("Failed to wait for insert job for %s: %v", tableName, err) } if err := insertStatus.Err(); err != nil { t.Fatalf("Insert job for %s failed: %v", tableName, err) } } return func(t *testing.T) { // tear down table dropSQL := fmt.Sprintf("drop table %s", tableName) dropJob, err := client.Query(dropSQL).Run(ctx) if err != nil { t.Errorf("Failed to start drop table job for %s: %v", tableName, err) return } dropStatus, err := dropJob.Wait(ctx) if err != nil { t.Errorf("Failed to wait for drop table job for %s: %v", tableName, err) return } if err := dropStatus.Err(); err != nil { t.Errorf("Error dropping table %s: %v", tableName, err) } // tear down dataset datasetToTeardown := client.Dataset(datasetName) tablesIterator := datasetToTeardown.Tables(ctx) _, err = tablesIterator.Next() if err == iterator.Done { if err := datasetToTeardown.Delete(ctx); err != nil { t.Errorf("Failed to delete dataset %s: %v", datasetName, err) } } else if err != nil { t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err) } } } func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[string]any { tools, ok := config["tools"].(map[string]any) if !ok { t.Fatalf("unable to get tools from config") } tools["my-exec-sql-tool"] = map[string]any{ "kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql", } tools["my-auth-exec-sql-tool"] = map[string]any{ "kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-exec-sql-tool"] = map[string]any{ "kind": "bigquery-execute-sql", "source": "my-client-auth-source", "description": "Tool to execute sql", } tools["my-forecast-tool"] = map[string]any{ "kind": "bigquery-forecast", "source": "my-instance", "description": "Tool to forecast time series data.", } tools["my-auth-forecast-tool"] = map[string]any{ "kind": "bigquery-forecast", "source": "my-instance", "description": "Tool to forecast time series data with auth.", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-forecast-tool"] = map[string]any{ "kind": "bigquery-forecast", "source": "my-client-auth-source", "description": "Tool to forecast time series data with auth.", } tools["my-analyze-contribution-tool"] = map[string]any{ "kind": "bigquery-analyze-contribution", "source": "my-instance", "description": "Tool to analyze contribution.", } tools["my-auth-analyze-contribution-tool"] = map[string]any{ "kind": "bigquery-analyze-contribution", "source": "my-instance", "description": "Tool to analyze contribution with auth.", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-analyze-contribution-tool"] = map[string]any{ "kind": "bigquery-analyze-contribution", "source": "my-client-auth-source", "description": "Tool to analyze contribution with auth.", } tools["my-list-dataset-ids-tool"] = map[string]any{ "kind": "bigquery-list-dataset-ids", "source": "my-instance", "description": "Tool to list dataset", } tools["my-auth-list-dataset-ids-tool"] = map[string]any{ "kind": "bigquery-list-dataset-ids", "source": "my-instance", "description": "Tool to list dataset", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-list-dataset-ids-tool"] = map[string]any{ "kind": "bigquery-list-dataset-ids", "source": "my-client-auth-source", "description": "Tool to list dataset", } tools["my-get-dataset-info-tool"] = map[string]any{ "kind": "bigquery-get-dataset-info", "source": "my-instance", "description": "Tool to show dataset metadata", } tools["my-auth-get-dataset-info-tool"] = map[string]any{ "kind": "bigquery-get-dataset-info", "source": "my-instance", "description": "Tool to show dataset metadata", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-get-dataset-info-tool"] = map[string]any{ "kind": "bigquery-get-dataset-info", "source": "my-client-auth-source", "description": "Tool to show dataset metadata", } tools["my-list-table-ids-tool"] = map[string]any{ "kind": "bigquery-list-table-ids", "source": "my-instance", "description": "Tool to list table within a dataset", } tools["my-auth-list-table-ids-tool"] = map[string]any{ "kind": "bigquery-list-table-ids", "source": "my-instance", "description": "Tool to list table within a dataset", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-list-table-ids-tool"] = map[string]any{ "kind": "bigquery-list-table-ids", "source": "my-client-auth-source", "description": "Tool to list table within a dataset", } tools["my-get-table-info-tool"] = map[string]any{ "kind": "bigquery-get-table-info", "source": "my-instance", "description": "Tool to show dataset metadata", } tools["my-auth-get-table-info-tool"] = map[string]any{ "kind": "bigquery-get-table-info", "source": "my-instance", "description": "Tool to show dataset metadata", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-get-table-info-tool"] = map[string]any{ "kind": "bigquery-get-table-info", "source": "my-client-auth-source", "description": "Tool to show dataset metadata", } tools["my-conversational-analytics-tool"] = map[string]any{ "kind": "bigquery-conversational-analytics", "source": "my-instance", "description": "Tool to ask BigQuery conversational analytics", } tools["my-auth-conversational-analytics-tool"] = map[string]any{ "kind": "bigquery-conversational-analytics", "source": "my-instance", "description": "Tool to ask BigQuery conversational analytics", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-conversational-analytics-tool"] = map[string]any{ "kind": "bigquery-conversational-analytics", "source": "my-client-auth-source", "description": "Tool to ask BigQuery conversational analytics", } tools["my-search-catalog-tool"] = map[string]any{ "kind": "bigquery-search-catalog", "source": "my-instance", "description": "Tool to search the BiqQuery catalog", } tools["my-auth-search-catalog-tool"] = map[string]any{ "kind": "bigquery-search-catalog", "source": "my-instance", "description": "Tool to search the BiqQuery catalog", "authRequired": []string{ "my-google-auth", }, } tools["my-client-auth-search-catalog-tool"] = map[string]any{ "kind": "bigquery-search-catalog", "source": "my-client-auth-source", "description": "Tool to search the BiqQuery catalog", } config["tools"] = tools return config } func addClientAuthSourceConfig(t *testing.T, config map[string]any) map[string]any { sources, ok := config["sources"].(map[string]any) if !ok { t.Fatalf("unable to get sources from config") } sources["my-client-auth-source"] = map[string]any{ "kind": BigquerySourceKind, "project": BigqueryProject, "useClientOAuth": true, } config["sources"] = sources return config } func addBigQuerySqlToolConfig(t *testing.T, config map[string]any, toolStatement, arrayToolStatement string) map[string]any { tools, ok := config["tools"].(map[string]any) if !ok { t.Fatalf("unable to get tools from config") } tools["my-scalar-datatype-tool"] = map[string]any{ "kind": "bigquery-sql", "source": "my-instance", "description": "Tool to test various scalar data types.", "statement": toolStatement, "parameters": []any{ map[string]any{"name": "int_val", "type": "integer", "description": "an integer value"}, map[string]any{"name": "string_val", "type": "string", "description": "a string value"}, map[string]any{"name": "float_val", "type": "float", "description": "a float value"}, map[string]any{"name": "bool_val", "type": "boolean", "description": "a boolean value"}, }, } tools["my-array-datatype-tool"] = map[string]any{ "kind": "bigquery-sql", "source": "my-instance", "description": "Tool to test various array data types.", "statement": arrayToolStatement, "parameters": []any{ map[string]any{"name": "int_array", "type": "array", "description": "an array of integer values", "items": map[string]any{"name": "item", "type": "integer", "description": "desc"}}, map[string]any{"name": "string_array", "type": "array", "description": "an array of string values", "items": map[string]any{"name": "item", "type": "string", "description": "desc"}}, map[string]any{"name": "float_array", "type": "array", "description": "an array of float values", "items": map[string]any{"name": "item", "type": "float", "description": "desc"}}, map[string]any{"name": "bool_array", "type": "array", "description": "an array of boolean values", "items": map[string]any{"name": "item", "type": "boolean", "description": "desc"}}, }, } tools["my-client-auth-tool"] = map[string]any{ "kind": "bigquery-sql", "source": "my-client-auth-source", "description": "Tool to test client authorization.", "statement": "SELECT 1", } config["tools"] = tools return config } func runBigQueryExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamWant, tableNameParam, ddlWant string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-exec-sql-tool without body", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "invoke my-exec-sql-tool", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), want: select1Want, isErr: false, }, { name: "invoke my-exec-sql-tool create table", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"CREATE TABLE t (id SERIAL PRIMARY KEY, name TEXT)"}`)), want: ddlWant, isErr: true, }, { name: "invoke my-exec-sql-tool with data present in table", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"SELECT * FROM %s WHERE id = 3 OR name = 'Alice' ORDER BY id\"}", tableNameParam))), want: invokeParamWant, isErr: false, }, { name: "invoke my-exec-sql-tool with no matching rows", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"SELECT * FROM %s WHERE id = 999\"}", tableNameParam))), want: `"The query returned 0 rows."`, isErr: false, }, { name: "invoke my-exec-sql-tool drop table", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"DROP TABLE t"}`)), want: ddlWant, isErr: true, }, { name: "invoke my-exec-sql-tool insert entry", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"sql\":\"INSERT INTO %s (id, name) VALUES (4, 'test_name')\"}", tableNameParam))), want: ddlWant, isErr: false, }, { name: "invoke my-exec-sql-tool without body", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-auth-exec-sql-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), isErr: false, want: select1Want, }, { name: "Invoke my-auth-exec-sql-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), isErr: true, }, { name: "Invoke my-auth-exec-sql-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), isErr: true, }, { name: "Invoke my-client-auth-exec-sql-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), want: "[{\"f0_\":1}]", isErr: false, }, { name: "Invoke my-client-auth-exec-sql-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), isErr: true, }, { name: "Invoke my-client-auth-exec-sql-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1"}`)), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if got != tc.want { t.Fatalf("unexpected value: got %q, want %q", got, tc.want) } }) } } // runInvokeRequest sends a POST request to the given API endpoint and returns the response and parsed JSON body. func runInvokeRequest(t *testing.T, api, body string, headers map[string]string) (*http.Response, map[string]interface{}) { t.Helper() req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(body)) if err != nil { t.Fatalf("failed to create request: %v", err) } req.Header.Set("Content-Type", "application/json") for k, v := range headers { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("failed to send request: %v", err) } var result map[string]interface{} // Use a TeeReader to be able to read the body multiple times (for logging on failure) bodyBytes, err := io.ReadAll(resp.Body) if err != nil { t.Fatalf("failed to read response body: %v", err) } resp.Body.Close() // Close original body resp.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) // Replace with a new reader if err := json.Unmarshal(bodyBytes, &result); err != nil { t.Logf("Failed to decode response body: %s", string(bodyBytes)) t.Fatalf("failed to decode response: %v", err) } return resp, result } func runBigQueryWriteModeAllowedTest(t *testing.T, datasetName string) { t.Run("CREATE TABLE should succeed", func(t *testing.T) { sql := fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName) body := fmt.Sprintf(`{"sql": "%s"}`, sql) resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil) defer resp.Body.Close() if resp.StatusCode != http.StatusOK { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, http.StatusOK, string(bodyBytes)) } resStr, ok := result["result"].(string) if !ok { t.Fatalf("expected 'result' field in response, got %v", result) } if resStr != `"Query executed successfully and returned no content."` { t.Errorf("unexpected result: got %q, want %q", resStr, `"Query executed successfully and returned no content."`) } }) } func runBigQueryWriteModeBlockedTest(t *testing.T, tableNameParam, datasetName string) { testCases := []struct { name string sql string wantStatusCode int wantInError string wantResult string }{ {"SELECT statement should succeed", fmt.Sprintf("SELECT * FROM %s WHERE id = 1", tableNameParam), http.StatusOK, "", `[{"id":1,"name":"Alice"}]`}, {"INSERT statement should fail", fmt.Sprintf("INSERT INTO %s (id, name) VALUES (10, 'test')", tableNameParam), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""}, {"CREATE TABLE statement should fail", fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""}, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { body := fmt.Sprintf(`{"sql": "%s"}`, tc.sql) resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil) defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInError != "" { errStr, ok := result["error"].(string) if !ok { t.Fatalf("expected 'error' field in response, got %v", result) } if !strings.Contains(errStr, tc.wantInError) { t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr) } } if tc.wantResult != "" { resStr, ok := result["result"].(string) if !ok { t.Fatalf("expected 'result' field in response, got %v", result) } if resStr != tc.wantResult { t.Fatalf("unexpected result: got %q, want %q", resStr, tc.wantResult) } } }) } } func runBigQueryWriteModeProtectedTest(t *testing.T, permanentDatasetName string) { testCases := []struct { name string toolName string requestBody string wantStatusCode int wantInError string wantResult string }{ { name: "CREATE TABLE to permanent dataset should fail", toolName: "my-exec-sql-tool", requestBody: fmt.Sprintf(`{"sql": "CREATE TABLE %s.new_table (x INT64)"}`, permanentDatasetName), wantStatusCode: http.StatusBadRequest, wantInError: "protected write mode only supports SELECT statements, or write operations in the anonymous dataset", wantResult: "", }, { name: "CREATE TEMP TABLE should succeed", toolName: "my-exec-sql-tool", requestBody: `{"sql": "CREATE TEMP TABLE my_shared_temp_table (x INT64)"}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"Query executed successfully and returned no content."`, }, { name: "INSERT into TEMP TABLE should succeed", toolName: "my-exec-sql-tool", requestBody: `{"sql": "INSERT INTO my_shared_temp_table (x) VALUES (42)"}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"Query executed successfully and returned no content."`, }, { name: "SELECT from TEMP TABLE with exec-sql should succeed", toolName: "my-exec-sql-tool", requestBody: `{"sql": "SELECT * FROM my_shared_temp_table"}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `[{"x":42}]`, }, { name: "SELECT from TEMP TABLE with sql-tool should succeed", toolName: "my-sql-tool-protected", requestBody: `{}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `[{"x":42}]`, }, { name: "CREATE TEMP TABLE for forecast should succeed", toolName: "my-exec-sql-tool", requestBody: `{"sql": "CREATE TEMP TABLE forecast_temp_table (ts TIMESTAMP, data FLOAT64) AS SELECT TIMESTAMP('2025-01-01T00:00:00Z') AS ts, 10.0 AS data UNION ALL SELECT TIMESTAMP('2025-01-01T01:00:00Z'), 11.0 UNION ALL SELECT TIMESTAMP('2025-01-01T02:00:00Z'), 12.0 UNION ALL SELECT TIMESTAMP('2025-01-01T03:00:00Z'), 13.0"}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"Query executed successfully and returned no content."`, }, { name: "Forecast from TEMP TABLE should succeed", toolName: "my-forecast-tool-protected", requestBody: `{"history_data": "SELECT * FROM forecast_temp_table", "timestamp_col": "ts", "data_col": "data", "horizon": 1}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"forecast_timestamp"`, }, { name: "CREATE TEMP TABLE for contribution analysis should succeed", toolName: "my-exec-sql-tool", requestBody: `{"sql": "CREATE TEMP TABLE contribution_temp_table (dim1 STRING, is_test BOOL, metric FLOAT64) AS SELECT 'a' as dim1, true as is_test, 100.0 as metric UNION ALL SELECT 'b', false, 120.0"}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"Query executed successfully and returned no content."`, }, { name: "Analyze contribution from TEMP TABLE should succeed", toolName: "my-analyze-contribution-tool-protected", requestBody: `{"input_data": "SELECT * FROM contribution_temp_table", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1"]}`, wantStatusCode: http.StatusOK, wantInError: "", wantResult: `"relative_difference"`, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName) resp, result := runInvokeRequest(t, api, tc.requestBody, nil) defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInError != "" { errStr, ok := result["error"].(string) if !ok { t.Fatalf("expected 'error' field in response, got %v", result) } if !strings.Contains(errStr, tc.wantInError) { t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr) } } if tc.wantResult != "" { resStr, ok := result["result"].(string) if !ok { t.Fatalf("expected 'result' field in response, got %v", result) } if !strings.Contains(resStr, tc.wantResult) { t.Fatalf("expected %q to contain %q, but it did not", resStr, tc.wantResult) } } }) } } func runBigQueryExecuteSqlToolInvokeDryRunTest(t *testing.T, datasetName string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } newTableName := fmt.Sprintf("%s.new_dry_run_table_%s", datasetName, strings.ReplaceAll(uuid.New().String(), "-", "")) // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-exec-sql-tool with dryRun", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)), want: `\"statementType\": \"SELECT\"`, isErr: false, }, { name: "invoke my-exec-sql-tool with dryRun create table", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"CREATE TABLE %s (id INT64, name STRING)", "dry_run": true}`, newTableName))), want: `\"statementType\": \"CREATE_TABLE\"`, isErr: false, }, { name: "invoke my-exec-sql-tool with dryRun execute immediate", api: "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"EXECUTE IMMEDIATE \"CREATE TABLE %s (id INT64, name STRING)\"", "dry_run": true}`, newTableName))), want: `\"statementType\": \"SCRIPT\"`, isErr: false, }, { name: "Invoke my-auth-exec-sql-tool with dryRun and auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)), isErr: false, want: `\"statementType\": \"SELECT\"`, }, { name: "Invoke my-auth-exec-sql-tool with dryRun and invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1","dry_run": true}`)), isErr: true, }, { name: "Invoke my-auth-exec-sql-tool with dryRun and without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-exec-sql-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"sql":"SELECT 1", "dry_run": true}`)), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryForecastToolInvokeTest(t *testing.T, tableName string) { idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken historyDataTable := strings.ReplaceAll(tableName, "`", "") historyDataQuery := fmt.Sprintf("SELECT ts, data, id FROM %s", tableName) invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-forecast-tool without required params", api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s"}`, historyDataTable))), isErr: true, }, { name: "invoke my-forecast-tool with table", api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), want: `"forecast_timestamp"`, isErr: false, }, { name: "invoke my-forecast-tool with query and horizon", api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data", "horizon": 5}`, historyDataQuery))), want: `"forecast_timestamp"`, isErr: false, }, { name: "invoke my-forecast-tool with id_cols", api: "http://127.0.0.1:5000/api/tool/my-forecast-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data", "id_cols": ["id"]}`, historyDataTable))), want: `"id"`, isErr: false, }, { name: "invoke my-auth-forecast-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-forecast-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), want: `"forecast_timestamp"`, isErr: false, }, { name: "invoke my-auth-forecast-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-forecast-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), isErr: true, }, { name: "Invoke my-client-auth-forecast-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), want: `"forecast_timestamp"`, isErr: false, }, { name: "Invoke my-client-auth-forecast-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), isErr: true, }, { name: "Invoke my-client-auth-forecast-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-forecast-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"history_data": "%s", "timestamp_col": "ts", "data_col": "data"}`, historyDataTable))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryAnalyzeContributionToolInvokeTest(t *testing.T, tableName string) { idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken dataTable := strings.ReplaceAll(tableName, "`", "") invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-analyze-contribution-tool without required params", api: "http://127.0.0.1:5000/api/tool/my-analyze-contribution-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s"}`, dataTable))), isErr: true, }, { name: "invoke my-analyze-contribution-tool with table", api: "http://127.0.0.1:5000/api/tool/my-analyze-contribution-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), want: `"relative_difference"`, isErr: false, }, { name: "invoke my-auth-analyze-contribution-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-analyze-contribution-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), want: `"relative_difference"`, isErr: false, }, { name: "invoke my-auth-analyze-contribution-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-analyze-contribution-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), isErr: true, }, { name: "Invoke my-client-auth-analyze-contribution-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), want: `"relative_difference"`, isErr: false, }, { name: "Invoke my-client-auth-analyze-contribution-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), isErr: true, }, { name: "Invoke my-client-auth-analyze-contribution-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-analyze-contribution-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"input_data": "%s", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1", "dim2"]}`, dataTable))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryDataTypeTests(t *testing.T) { // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-scalar-datatype-tool with values", api: "http://127.0.0.1:5000/api/tool/my-scalar-datatype-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"int_val": 123, "string_val": "hello", "float_val": 3.14, "bool_val": true}`)), want: `[{"bool_val":true,"float_val":3.14,"id":1,"int_val":123,"string_val":"hello"}]`, isErr: false, }, { name: "invoke my-scalar-datatype-tool with missing params", api: "http://127.0.0.1:5000/api/tool/my-scalar-datatype-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"int_val": 123}`)), isErr: true, }, { name: "invoke my-array-datatype-tool", api: "http://127.0.0.1:5000/api/tool/my-array-datatype-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"int_array": [123, 789], "string_array": ["hello", "test"], "float_array": [3.14, 100.1], "bool_array": [true]}`)), want: `[{"bool_val":true,"float_val":3.14,"id":1,"int_val":123,"string_val":"hello"},{"bool_val":true,"float_val":100.1,"id":3,"int_val":789,"string_val":"test"}]`, isErr: false, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if got != tc.want { t.Fatalf("unexpected value: got %q, want %q", got, tc.want) } }) } } func runBigQueryListDatasetToolInvokeTest(t *testing.T, datasetWant string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-list-dataset-ids-tool", api: "http://127.0.0.1:5000/api/tool/my-list-dataset-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: false, want: datasetWant, }, { name: "invoke my-list-dataset-ids-tool with project", api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\"}", BigqueryProject))), isErr: false, want: datasetWant, }, { name: "invoke my-list-dataset-ids-tool with non-existent project", api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\"}", BigqueryProject, uuid.NewString()))), isErr: true, }, { name: "invoke my-auth-list-dataset-ids-tool", api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: false, want: datasetWant, }, { name: "Invoke my-client-auth-list-dataset-ids-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: false, want: datasetWant, }, { name: "Invoke my-client-auth-list-dataset-ids-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-client-auth-list-dataset-ids-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-dataset-ids-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryGetDatasetInfoToolInvokeTest(t *testing.T, datasetName, datasetInfoWant string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-get-dataset-info-tool without body", api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "invoke my-get-dataset-info-tool", api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: datasetInfoWant, isErr: false, }, { name: "Invoke my-auth-get-dataset-info-tool with correct project", api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))), want: datasetInfoWant, isErr: false, }, { name: "Invoke my-auth-get-dataset-info-tool with non-existent project", api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))), isErr: true, }, { name: "invoke my-auth-get-dataset-info-tool without body", api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-auth-get-dataset-info-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: datasetInfoWant, isErr: false, }, { name: "Invoke my-auth-get-dataset-info-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-auth-get-dataset-info-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-client-auth-get-dataset-info-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: datasetInfoWant, isErr: false, }, { name: "Invoke my-client-auth-get-dataset-info-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-client-auth-get-dataset-info-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-dataset-info-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryListTableIdsToolInvokeTest(t *testing.T, datasetName, tablename_want string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-list-table-ids-tool without body", api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "invoke my-list-table-ids-tool", api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: tablename_want, isErr: false, }, { name: "invoke my-list-table-ids-tool without body", api: "http://127.0.0.1:5000/api/tool/my-list-table-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-auth-list-table-ids-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: tablename_want, isErr: false, }, { name: "Invoke my-auth-list-table-ids-tool with correct project", api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))), want: tablename_want, isErr: false, }, { name: "Invoke my-auth-list-table-ids-tool with non-existent project", api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))), isErr: true, }, { name: "Invoke my-auth-list-table-ids-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-auth-list-table-ids-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-client-auth-list-table-ids-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), want: tablename_want, isErr: false, }, { name: "Invoke my-client-auth-list-table-ids-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, { name: "Invoke my-client-auth-list-table-ids-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-list-table-ids-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\"}", datasetName))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryGetTableInfoToolInvokeTest(t *testing.T, datasetName, tableName, tableInfoWant string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-get-table-info-tool without body", api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "invoke my-get-table-info-tool", api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), want: tableInfoWant, isErr: false, }, { name: "invoke my-auth-get-table-info-tool without body", api: "http://127.0.0.1:5000/api/tool/my-get-table-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-auth-get-table-info-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), want: tableInfoWant, isErr: false, }, { name: "Invoke my-auth-get-table-info-tool with correct project", api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, datasetName, tableName))), want: tableInfoWant, isErr: false, }, { name: "Invoke my-auth-get-table-info-tool with non-existent project", api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName, tableName))), isErr: true, }, { name: "Invoke my-auth-get-table-info-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), isErr: true, }, { name: "Invoke my-auth-get-table-info-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), isErr: true, }, { name: "Invoke my-client-auth-get-table-info-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), want: tableInfoWant, isErr: false, }, { name: "Invoke my-client-auth-get-table-info-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), isErr: true, }, { name: "Invoke my-client-auth-get-table-info-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-get-table-info-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"dataset\":\"%s\", \"table\":\"%s\"}", datasetName, tableName))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.want) { t.Fatalf("expected %q to contain %q, but it did not", got, tc.want) } }) } } func runBigQueryConversationalAnalyticsInvokeTest(t *testing.T, datasetName, tableName, dataInsightsWant string) { // Each test is expected to complete in under 10s, we set a 25s timeout with retries to avoid flaky tests. const maxRetries = 3 const requestTimeout = 25 * time.Second // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken tableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, datasetName, tableName) invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-conversational-analytics-tool successfully", api: "http://127.0.0.1:5000/api/tool/my-conversational-analytics-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf( `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`, tableRefsJSON, ))), want: dataInsightsWant, isErr: false, }, { name: "invoke my-auth-conversational-analytics-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-conversational-analytics-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf( `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`, tableRefsJSON, ))), want: dataInsightsWant, isErr: false, }, { name: "invoke my-auth-conversational-analytics-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-conversational-analytics-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"user_query_with_context": "What are the names in the table?"}`)), isErr: true, }, { name: "Invoke my-client-auth-conversational-analytics-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf( `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`, tableRefsJSON, ))), want: "[{\"f0_\":1}]", isErr: false, }, { name: "Invoke my-client-auth-conversational-analytics-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf( `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`, tableRefsJSON, ))), isErr: true, }, { name: "Invoke my-client-auth-conversational-analytics-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-conversational-analytics-tool/invoke", requestHeader: map[string]string{"Authorization": "Bearer invalid-token"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf( `{"user_query_with_context": "What are the names in the table?", "table_references": %q}`, tableRefsJSON, ))), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { var resp *http.Response var err error bodyBytes, err := io.ReadAll(tc.requestBody) if err != nil { t.Fatalf("failed to read request body: %v", err) } req, err := http.NewRequest(http.MethodPost, tc.api, nil) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Set("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } for i := 0; i < maxRetries; i++ { ctx, cancel := context.WithTimeout(context.Background(), requestTimeout) defer cancel() req.Body = io.NopCloser(bytes.NewReader(bodyBytes)) req.GetBody = func() (io.ReadCloser, error) { return io.NopCloser(bytes.NewReader(bodyBytes)), nil } reqWithCtx := req.WithContext(ctx) resp, err = http.DefaultClient.Do(reqWithCtx) if err != nil { // Retry on time out. if os.IsTimeout(err) { t.Logf("Request timed out (attempt %d/%d), retrying...", i+1, maxRetries) time.Sleep(5 * time.Second) continue } t.Fatalf("unable to send request: %s", err) } if resp.StatusCode == http.StatusServiceUnavailable { t.Logf("Received 503 Service Unavailable (attempt %d/%d), retrying...", i+1, maxRetries) time.Sleep(15 * time.Second) continue } break } if err != nil { t.Fatalf("Request failed after %d retries: %v", maxRetries, err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body: %v", err) } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } wantPattern := regexp.MustCompile(tc.want) if !wantPattern.MatchString(got) { t.Fatalf("response did not match the expected pattern.\nFull response:\n%s", got) } }) } } func runListDatasetIdsWithRestriction(t *testing.T, allowedDatasetName1, allowedDatasetName2 string) { testCases := []struct { name string wantStatusCode int wantElements []string }{ { name: "invoke list-dataset-ids with restriction", wantStatusCode: http.StatusOK, wantElements: []string{ fmt.Sprintf("%s.%s", BigqueryProject, allowedDatasetName1), fmt.Sprintf("%s.%s", BigqueryProject, allowedDatasetName2), }, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { body := bytes.NewBuffer([]byte(`{}`)) resp, bodyBytes := tests.RunRequest(t, http.MethodPost, "http://127.0.0.1:5000/api/tool/list-dataset-ids-restricted/invoke", body, nil) if resp.StatusCode != tc.wantStatusCode { t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } var respBody map[string]interface{} if err := json.Unmarshal(bodyBytes, &respBody); err != nil { t.Fatalf("error parsing response body: %v", err) } gotJSON, ok := respBody["result"].(string) if !ok { t.Fatalf("unable to find 'result' as a string in response body: %s", string(bodyBytes)) } // Unmarshal the result string into a slice to compare contents. var gotElements []string if err := json.Unmarshal([]byte(gotJSON), &gotElements); err != nil { t.Fatalf("error parsing result field JSON %q: %v", gotJSON, err) } sort.Strings(gotElements) sort.Strings(tc.wantElements) if !reflect.DeepEqual(gotElements, tc.wantElements) { t.Errorf("unexpected result:\n got: %v\nwant: %v", gotElements, tc.wantElements) } }) } } func runListTableIdsWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName string, allowedTableNames ...string) { sort.Strings(allowedTableNames) var quotedNames []string for _, name := range allowedTableNames { quotedNames = append(quotedNames, fmt.Sprintf(`"%s"`, name)) } wantResult := fmt.Sprintf(`[%s]`, strings.Join(quotedNames, ",")) testCases := []struct { name string dataset string wantStatusCode int wantInResult string wantInError string }{ { name: "invoke on allowed dataset", dataset: allowedDatasetName, wantStatusCode: http.StatusOK, wantInResult: wantResult, }, { name: "invoke on disallowed dataset", dataset: disallowedDatasetName, wantStatusCode: http.StatusBadRequest, // Or the specific error code returned wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName), }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s"}`, tc.dataset))) req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/list-table-ids-restricted/invoke", body) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInResult != "" { var respBody map[string]interface{} if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { t.Fatalf("error parsing response body: %v", err) } got, ok := respBody["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } var gotSlice []string if err := json.Unmarshal([]byte(got), &gotSlice); err != nil { t.Fatalf("error unmarshalling result: %v", err) } sort.Strings(gotSlice) sortedGotBytes, err := json.Marshal(gotSlice) if err != nil { t.Fatalf("error marshalling sorted result: %v", err) } if string(sortedGotBytes) != tc.wantInResult { t.Errorf("unexpected result: got %q, want %q", string(sortedGotBytes), tc.wantInResult) } } if tc.wantInError != "" { bodyBytes, _ := io.ReadAll(resp.Body) if !strings.Contains(string(bodyBytes), tc.wantInError) { t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError) } } }) } } func runGetTableInfoWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName, allowedTableName, disallowedTableName string) { testCases := []struct { name string dataset string table string wantStatusCode int wantInError string }{ { name: "invoke on allowed table", dataset: allowedDatasetName, table: allowedTableName, wantStatusCode: http.StatusOK, }, { name: "invoke on disallowed table", dataset: disallowedDatasetName, table: disallowedTableName, wantStatusCode: http.StatusBadRequest, wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName), }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s", "table":"%s"}`, tc.dataset, tc.table))) req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/get-table-info-restricted/invoke", body) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInError != "" { bodyBytes, _ := io.ReadAll(resp.Body) if !strings.Contains(string(bodyBytes), tc.wantInError) { t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError) } } }) } } func runExecuteSqlWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) { allowedTableParts := strings.Split(strings.Trim(allowedTableFullName, "`"), ".") if len(allowedTableParts) != 3 { t.Fatalf("invalid allowed table name format: %s", allowedTableFullName) } allowedDatasetID := allowedTableParts[1] testCases := []struct { name string sql string wantStatusCode int wantInError string }{ { name: "invoke on allowed table", sql: fmt.Sprintf("SELECT * FROM %s", allowedTableFullName), wantStatusCode: http.StatusOK, }, { name: "invoke on disallowed table", sql: fmt.Sprintf("SELECT * FROM %s", disallowedTableFullName), wantStatusCode: http.StatusBadRequest, wantInError: fmt.Sprintf("query accesses dataset '%s', which is not in the allowed list", strings.Join( strings.Split(strings.Trim(disallowedTableFullName, "`"), ".")[0:2], ".")), }, { name: "disallowed create schema", sql: "CREATE SCHEMA another_dataset", wantStatusCode: http.StatusBadRequest, wantInError: "dataset-level operations like 'CREATE_SCHEMA' are not allowed", }, { name: "disallowed alter schema", sql: fmt.Sprintf("ALTER SCHEMA %s SET OPTIONS(description='new one')", allowedDatasetID), wantStatusCode: http.StatusBadRequest, wantInError: "dataset-level operations like 'ALTER_SCHEMA' are not allowed", }, { name: "disallowed create function", sql: fmt.Sprintf("CREATE FUNCTION %s.my_func() RETURNS INT64 AS (1)", allowedDatasetID), wantStatusCode: http.StatusBadRequest, wantInError: "creating stored routines ('CREATE_FUNCTION') is not allowed", }, { name: "disallowed create procedure", sql: fmt.Sprintf("CREATE PROCEDURE %s.my_proc() BEGIN SELECT 1; END", allowedDatasetID), wantStatusCode: http.StatusBadRequest, wantInError: "unanalyzable statements like 'CREATE PROCEDURE' are not allowed", }, { name: "disallowed execute immediate", sql: "EXECUTE IMMEDIATE 'SELECT 1'", wantStatusCode: http.StatusBadRequest, wantInError: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"sql":"%s"}`, tc.sql))) req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/execute-sql-restricted/invoke", body) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInError != "" { bodyBytes, _ := io.ReadAll(resp.Body) if !strings.Contains(string(bodyBytes), tc.wantInError) { t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError) } } }) } } func runConversationalAnalyticsWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName, allowedTableName, disallowedTableName string) { allowedTableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, allowedDatasetName, allowedTableName) disallowedTableRefsJSON := fmt.Sprintf(`[{"projectId":"%s","datasetId":"%s","tableId":"%s"}]`, BigqueryProject, disallowedDatasetName, disallowedTableName) testCases := []struct { name string tableRefs string wantStatusCode int wantInResult string wantInError string }{ { name: "invoke with allowed table", tableRefs: allowedTableRefsJSON, wantStatusCode: http.StatusOK, wantInResult: `Answer`, }, { name: "invoke with disallowed table", tableRefs: disallowedTableRefsJSON, wantStatusCode: http.StatusBadRequest, wantInError: fmt.Sprintf("access to dataset '%s.%s' (from table '%s') is not allowed", BigqueryProject, disallowedDatasetName, disallowedTableName), }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { requestBodyMap := map[string]any{ "user_query_with_context": "What is in the table?", "table_references": tc.tableRefs, } bodyBytes, err := json.Marshal(requestBodyMap) if err != nil { t.Fatalf("failed to marshal request body: %v", err) } body := bytes.NewBuffer(bodyBytes) req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/conversational-analytics-restricted/invoke", body) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInResult != "" { var respBody map[string]interface{} if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { t.Fatalf("error parsing response body: %v", err) } got, ok := respBody["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.wantInResult) { t.Errorf("unexpected result: got %q, want to contain %q", got, tc.wantInResult) } } if tc.wantInError != "" { bodyBytes, _ := io.ReadAll(resp.Body) if !strings.Contains(string(bodyBytes), tc.wantInError) { t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError) } } }) } } func runBigQuerySearchCatalogToolInvokeTest(t *testing.T, datasetName string, tableName string) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Get access token accessToken, err := sources.GetIAMAccessToken(t.Context()) if err != nil { t.Fatalf("error getting access token from ADC: %s", err) } accessToken = "Bearer " + accessToken // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader wantKey string isErr bool }{ { name: "invoke my-search-catalog-tool without body", api: "http://127.0.0.1:5000/api/tool/my-search-catalog-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "invoke my-search-catalog-tool", api: "http://127.0.0.1:5000/api/tool/my-search-catalog-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), wantKey: "DisplayName", isErr: false, }, { name: "Invoke my-auth-search-catalog-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), wantKey: "DisplayName", isErr: false, }, { name: "Invoke my-auth-search-catalog-tool with correct project", api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"projectIds\":[\"%s\"], \"datasetIds\":[\"%s\"]}", tableName, BigqueryProject, datasetName))), wantKey: "DisplayName", isErr: false, }, { name: "Invoke my-auth-search-catalog-tool with non-existent project", api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"projectIds\":[\"%s-%s\"], \"datasetIds\":[\"%s\"]}", tableName, BigqueryProject, uuid.NewString(), datasetName))), isErr: true, }, { name: "Invoke my-auth-search-catalog-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), isErr: true, }, { name: "Invoke my-auth-search-catalog-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-search-catalog-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), isErr: true, }, { name: "Invoke my-client-auth-search-catalog-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-search-catalog-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), isErr: true, }, { name: "Invoke my-client-auth-search-catalog-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-client-auth-search-catalog-tool/invoke", requestHeader: map[string]string{"Authorization": accessToken}, requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"prompt\":\"%s\", \"types\":[\"TABLE\"], \"datasetIds\":[\"%s\"]}", tableName, datasetName))), wantKey: "DisplayName", isErr: false, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } var result map[string]interface{} if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { t.Fatalf("error parsing response body: %s", err) } resultStr, ok := result["result"].(string) if !ok { if result["result"] == nil && tc.isErr { return } t.Fatalf("expected 'result' field to be a string, got %T", result["result"]) } if tc.isErr && (resultStr == "" || resultStr == "[]") { return } var entries []interface{} if err := json.Unmarshal([]byte(resultStr), &entries); err != nil { t.Fatalf("error unmarshalling result string: %v", err) } if !tc.isErr { if len(entries) != 1 { t.Fatalf("expected exactly one entry, but got %d", len(entries)) } entry, ok := entries[0].(map[string]interface{}) if !ok { t.Fatalf("expected first entry to be a map, got %T", entries[0]) } respTable, ok := entry[tc.wantKey] if !ok { t.Fatalf("expected entry to have key '%s', but it was not found in %v", tc.wantKey, entry) } if respTable != tableName { t.Fatalf("expected key '%s' to have value '%s', but got %s", tc.wantKey, tableName, respTable) } } else { if len(entries) != 0 { t.Fatalf("expected 0 entries, but got %d", len(entries)) } } }) } } func runForecastWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) { allowedTableUnquoted := strings.ReplaceAll(allowedTableFullName, "`", "") disallowedTableUnquoted := strings.ReplaceAll(disallowedTableFullName, "`", "") disallowedDatasetFQN := strings.Join(strings.Split(disallowedTableUnquoted, ".")[0:2], ".") testCases := []struct { name string historyData string wantStatusCode int wantInResult string wantInError string }{ { name: "invoke with allowed table name", historyData: allowedTableUnquoted, wantStatusCode: http.StatusOK, wantInResult: `"forecast_timestamp"`, }, { name: "invoke with disallowed table name", historyData: disallowedTableUnquoted, wantStatusCode: http.StatusBadRequest, wantInError: fmt.Sprintf("access to dataset '%s' (from table '%s') is not allowed", disallowedDatasetFQN, disallowedTableUnquoted), }, { name: "invoke with query on allowed table", historyData: fmt.Sprintf("SELECT * FROM %s", allowedTableFullName), wantStatusCode: http.StatusOK, wantInResult: `"forecast_timestamp"`, }, { name: "invoke with query on disallowed table", historyData: fmt.Sprintf("SELECT * FROM %s", disallowedTableFullName), wantStatusCode: http.StatusBadRequest, wantInError: fmt.Sprintf("query in history_data accesses dataset '%s', which is not in the allowed list", disallowedDatasetFQN), }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { requestBodyMap := map[string]any{ "history_data": tc.historyData, "timestamp_col": "ts", "data_col": "data", } bodyBytes, err := json.Marshal(requestBodyMap) if err != nil { t.Fatalf("failed to marshal request body: %v", err) } body := bytes.NewBuffer(bodyBytes) req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/forecast-restricted/invoke", body) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != tc.wantStatusCode { bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes)) } if tc.wantInResult != "" { var respBody map[string]interface{} if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { t.Fatalf("error parsing response body: %v", err) } got, ok := respBody["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if !strings.Contains(got, tc.wantInResult) { t.Errorf("unexpected result: got %q, want to contain %q", got, tc.wantInResult) } } if tc.wantInError != "" { bodyBytes, _ := io.ReadAll(resp.Body) if !strings.Contains(string(bodyBytes), tc.wantInError) { t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError) } } }) } } ```