#
tokens: 46729/50000 7/807 files (page 25/35)
lines: off (toggle) GitHub
raw markdown copy
This is page 25 of 35. Use http://codebase.md/googleapis/genai-toolbox?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-create-project-file.md
│       │       │   ├── looker-delete-project-file.md
│       │       │   ├── looker-dev-mode.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-get-project-file.md
│       │       │   ├── looker-get-project-files.md
│       │       │   ├── looker-get-projects.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   ├── looker-run-look.md
│       │       │   └── looker-update-project-file.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-tables.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   └── bigquery.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookercreateprojectfile
│   │   │   │   ├── lookercreateprojectfile_test.go
│   │   │   │   └── lookercreateprojectfile.go
│   │   │   ├── lookerdeleteprojectfile
│   │   │   │   ├── lookerdeleteprojectfile_test.go
│   │   │   │   └── lookerdeleteprojectfile.go
│   │   │   ├── lookerdevmode
│   │   │   │   ├── lookerdevmode_test.go
│   │   │   │   └── lookerdevmode.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookergetprojectfile
│   │   │   │   ├── lookergetprojectfile_test.go
│   │   │   │   └── lookergetprojectfile.go
│   │   │   ├── lookergetprojectfiles
│   │   │   │   ├── lookergetprojectfiles_test.go
│   │   │   │   └── lookergetprojectfiles.go
│   │   │   ├── lookergetprojects
│   │   │   │   ├── lookergetprojects_test.go
│   │   │   │   └── lookergetprojects.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   ├── lookerrunlook
│   │   │   │   ├── lookerrunlook_test.go
│   │   │   │   └── lookerrunlook.go
│   │   │   └── lookerupdateprojectfile
│   │   │       ├── lookerupdateprojectfile_test.go
│   │   │       └── lookerupdateprojectfile.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/internal/tools/bigquery/bigqueryconversationalanalytics/bigqueryconversationalanalytics.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package bigqueryconversationalanalytics

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"strings"

	bigqueryapi "cloud.google.com/go/bigquery"
	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"golang.org/x/oauth2"
)

const kind string = "bigquery-conversational-analytics"

const instructions = `**INSTRUCTIONS - FOLLOW THESE RULES:**
1. **CONTENT:** Your answer should present the supporting data and then provide a conclusion based on that data.
2. **OUTPUT FORMAT:** Your entire response MUST be in plain text format ONLY.
3. **NO CHARTS:** You are STRICTLY FORBIDDEN from generating any charts, graphs, images, or any other form of visualization.`

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type compatibleSource interface {
	BigQueryClient() *bigqueryapi.Client
	BigQueryTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error)
	BigQueryProject() string
	BigQueryLocation() string
	GetMaxQueryResultRows() int
	UseClientAuthorization() bool
	IsDatasetAllowed(projectID, datasetID string) bool
	BigQueryAllowedDatasets() []string
}

type BQTableReference struct {
	ProjectID string `json:"projectId"`
	DatasetID string `json:"datasetId"`
	TableID   string `json:"tableId"`
}

// Structs for building the JSON payload
type UserMessage struct {
	Text string `json:"text"`
}
type Message struct {
	UserMessage UserMessage `json:"userMessage"`
}
type BQDatasource struct {
	TableReferences []BQTableReference `json:"tableReferences"`
}
type DatasourceReferences struct {
	BQ BQDatasource `json:"bq"`
}
type ImageOptions struct {
	NoImage map[string]any `json:"noImage"`
}
type ChartOptions struct {
	Image ImageOptions `json:"image"`
}
type Options struct {
	Chart ChartOptions `json:"chart"`
}
type InlineContext struct {
	DatasourceReferences DatasourceReferences `json:"datasourceReferences"`
	Options              Options              `json:"options"`
}

type CAPayload struct {
	Project       string        `json:"project"`
	Messages      []Message     `json:"messages"`
	InlineContext InlineContext `json:"inlineContext"`
	ClientIdEnum  string        `json:"clientIdEnum"`
}

// validate compatible sources are still compatible
var _ compatibleSource = &bigqueryds.Source{}

var compatibleSources = [...]string{bigqueryds.SourceKind}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Source       string   `yaml:"source" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

// validate interface
var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	// verify source exists
	rawS, ok := srcs[cfg.Source]
	if !ok {
		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
	}

	// verify the source is compatible
	s, ok := rawS.(compatibleSource)
	if !ok {
		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
	}

	allowedDatasets := s.BigQueryAllowedDatasets()
	tableRefsDescription := `A JSON string of a list of BigQuery tables to use as context. Each object in the list must contain 'projectId', 'datasetId', and 'tableId'. Example: '[{"projectId": "my-gcp-project", "datasetId": "my_dataset", "tableId": "my_table"}]'.`
	if len(allowedDatasets) > 0 {
		datasetIDs := []string{}
		for _, ds := range allowedDatasets {
			datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds))
		}
		tableRefsDescription += fmt.Sprintf(" The tables must only be from datasets in the following list: %s.", strings.Join(datasetIDs, ", "))
	}
	userQueryParameter := tools.NewStringParameter("user_query_with_context", "The user's question, potentially including conversation history and system instructions for context.")
	tableRefsParameter := tools.NewStringParameter("table_references", tableRefsDescription)

	parameters := tools.Parameters{userQueryParameter, tableRefsParameter}
	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)

	// Get cloud-platform token source for Gemini Data Analytics API during initialization
	var bigQueryTokenSourceWithScope oauth2.TokenSource
	if !s.UseClientAuthorization() {
		ctx := context.Background()
		ts, err := s.BigQueryTokenSourceWithScope(ctx, "https://www.googleapis.com/auth/cloud-platform")
		if err != nil {
			return nil, fmt.Errorf("failed to get cloud-platform token source: %w", err)
		}
		bigQueryTokenSourceWithScope = ts
	}

	// finish tool setup
	t := Tool{
		Name:               cfg.Name,
		Kind:               kind,
		Project:            s.BigQueryProject(),
		Location:           s.BigQueryLocation(),
		Parameters:         parameters,
		AuthRequired:       cfg.AuthRequired,
		Client:             s.BigQueryClient(),
		UseClientOAuth:     s.UseClientAuthorization(),
		TokenSource:        bigQueryTokenSourceWithScope,
		manifest:           tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
		mcpManifest:        mcpManifest,
		MaxQueryResultRows: s.GetMaxQueryResultRows(),
		IsDatasetAllowed:   s.IsDatasetAllowed,
		AllowedDatasets:    allowedDatasets,
	}
	return t, nil
}

// validate interface
var _ tools.Tool = Tool{}

type Tool struct {
	Name           string           `yaml:"name"`
	Kind           string           `yaml:"kind"`
	AuthRequired   []string         `yaml:"authRequired"`
	UseClientOAuth bool             `yaml:"useClientOAuth"`
	Parameters     tools.Parameters `yaml:"parameters"`

	Project            string
	Location           string
	Client             *bigqueryapi.Client
	TokenSource        oauth2.TokenSource
	manifest           tools.Manifest
	mcpManifest        tools.McpManifest
	MaxQueryResultRows int
	IsDatasetAllowed   func(projectID, datasetID string) bool
	AllowedDatasets    []string
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	var tokenStr string
	var err error

	// Get credentials for the API call
	if t.UseClientOAuth {
		// Use client-side access token
		if accessToken == "" {
			return nil, fmt.Errorf("tool is configured for client OAuth but no token was provided in the request header: %w", tools.ErrUnauthorized)
		}
		tokenStr, err = accessToken.ParseBearerToken()
		if err != nil {
			return nil, fmt.Errorf("error parsing access token: %w", err)
		}
	} else {
		// Use cloud-platform token source for Gemini Data Analytics API
		if t.TokenSource == nil {
			return nil, fmt.Errorf("cloud-platform token source is missing")
		}
		token, err := t.TokenSource.Token()
		if err != nil {
			return nil, fmt.Errorf("failed to get token from cloud-platform token source: %w", err)
		}
		tokenStr = token.AccessToken
	}

	// Extract parameters from the map
	mapParams := params.AsMap()
	userQuery, _ := mapParams["user_query_with_context"].(string)

	finalQueryText := fmt.Sprintf("%s\n**User Query and Context:**\n%s", instructions, userQuery)

	tableRefsJSON, _ := mapParams["table_references"].(string)
	var tableRefs []BQTableReference
	if tableRefsJSON != "" {
		if err := json.Unmarshal([]byte(tableRefsJSON), &tableRefs); err != nil {
			return nil, fmt.Errorf("failed to parse 'table_references' JSON string: %w", err)
		}
	}

	if len(t.AllowedDatasets) > 0 {
		for _, tableRef := range tableRefs {
			if !t.IsDatasetAllowed(tableRef.ProjectID, tableRef.DatasetID) {
				return nil, fmt.Errorf("access to dataset '%s.%s' (from table '%s') is not allowed", tableRef.ProjectID, tableRef.DatasetID, tableRef.TableID)
			}
		}
	}

	// Construct URL, headers, and payload
	projectID := t.Project
	location := t.Location
	if location == "" {
		location = "us"
	}
	caURL := fmt.Sprintf("https://geminidataanalytics.googleapis.com/v1alpha/projects/%s/locations/%s:chat", projectID, location)

	headers := map[string]string{
		"Authorization": fmt.Sprintf("Bearer %s", tokenStr),
		"Content-Type":  "application/json",
	}

	payload := CAPayload{
		Project:  fmt.Sprintf("projects/%s", projectID),
		Messages: []Message{{UserMessage: UserMessage{Text: finalQueryText}}},
		InlineContext: InlineContext{
			DatasourceReferences: DatasourceReferences{
				BQ: BQDatasource{TableReferences: tableRefs},
			},
			Options: Options{Chart: ChartOptions{Image: ImageOptions{NoImage: map[string]any{}}}},
		},
		ClientIdEnum: "GENAI_TOOLBOX",
	}

	// Call the streaming API
	response, err := getStream(caURL, payload, headers, t.MaxQueryResultRows)
	if err != nil {
		return nil, fmt.Errorf("failed to get response from conversational analytics API: %w", err)
	}

	return response, nil
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.Parameters, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}

func (t Tool) RequiresClientAuthorization() bool {
	return t.UseClientOAuth
}

// StreamMessage represents a single message object from the streaming API response.
type StreamMessage struct {
	SystemMessage *SystemMessage `json:"systemMessage,omitempty"`
	Error         *ErrorResponse `json:"error,omitempty"`
}

// SystemMessage contains different types of system-generated content.
type SystemMessage struct {
	Text   *TextResponse   `json:"text,omitempty"`
	Schema *SchemaResponse `json:"schema,omitempty"`
	Data   *DataResponse   `json:"data,omitempty"`
}

// TextResponse contains textual parts of a message.
type TextResponse struct {
	Parts []string `json:"parts"`
}

// SchemaResponse contains schema-related information.
type SchemaResponse struct {
	Query  *SchemaQuery  `json:"query,omitempty"`
	Result *SchemaResult `json:"result,omitempty"`
}

// SchemaQuery holds the question that prompted a schema lookup.
type SchemaQuery struct {
	Question string `json:"question"`
}

// SchemaResult contains the datasources with their schemas.
type SchemaResult struct {
	Datasources []Datasource `json:"datasources"`
}

// Datasource represents a data source with its reference and schema.
type Datasource struct {
	BigQueryTableReference *BQTableReference `json:"bigqueryTableReference,omitempty"`
	Schema                 *BQSchema         `json:"schema,omitempty"`
}

// BQSchema defines the structure of a BigQuery table.
type BQSchema struct {
	Fields []BQField `json:"fields"`
}

// BQField describes a single column in a BigQuery table.
type BQField struct {
	Name        string `json:"name"`
	Type        string `json:"type"`
	Description string `json:"description"`
	Mode        string `json:"mode"`
}

// DataResponse contains data-related information, like queries and results.
type DataResponse struct {
	Query        *DataQuery  `json:"query,omitempty"`
	GeneratedSQL string      `json:"generatedSql,omitempty"`
	Result       *DataResult `json:"result,omitempty"`
}

// DataQuery holds information about a data retrieval query.
type DataQuery struct {
	Name     string `json:"name"`
	Question string `json:"question"`
}

// DataResult contains the schema and rows of a query result.
type DataResult struct {
	Schema BQSchema         `json:"schema"`
	Data   []map[string]any `json:"data"`
}

// ErrorResponse represents an error message from the API.
type ErrorResponse struct {
	Code    float64 `json:"code"` // JSON numbers are float64 by default
	Message string  `json:"message"`
}

func getStream(url string, payload CAPayload, headers map[string]string, maxRows int) (string, error) {
	payloadBytes, err := json.Marshal(payload)
	if err != nil {
		return "", fmt.Errorf("failed to marshal payload: %w", err)
	}

	req, err := http.NewRequest("POST", url, bytes.NewBuffer(payloadBytes))
	if err != nil {
		return "", fmt.Errorf("failed to create request: %w", err)
	}
	for k, v := range headers {
		req.Header.Set(k, v)
	}

	client := &http.Client{}
	resp, err := client.Do(req)
	if err != nil {
		return "", fmt.Errorf("failed to send request: %w", err)
	}
	defer resp.Body.Close()

	if resp.StatusCode != http.StatusOK {
		body, _ := io.ReadAll(resp.Body)
		return "", fmt.Errorf("API returned non-200 status: %d %s", resp.StatusCode, string(body))
	}

	var messages []map[string]any
	decoder := json.NewDecoder(resp.Body)

	// The response is a JSON array, so we read the opening bracket.
	if _, err := decoder.Token(); err != nil {
		if err == io.EOF {
			return "", nil // Empty response is valid
		}
		return "", fmt.Errorf("error reading start of json array: %w", err)
	}

	for decoder.More() {
		var msg StreamMessage
		if err := decoder.Decode(&msg); err != nil {
			if err == io.EOF {
				break
			}
			return "", fmt.Errorf("error decoding stream message: %w", err)
		}

		var newMessage map[string]any
		if msg.SystemMessage != nil {
			if msg.SystemMessage.Text != nil {
				newMessage = handleTextResponse(msg.SystemMessage.Text)
			} else if msg.SystemMessage.Schema != nil {
				newMessage = handleSchemaResponse(msg.SystemMessage.Schema)
			} else if msg.SystemMessage.Data != nil {
				newMessage = handleDataResponse(msg.SystemMessage.Data, maxRows)
			}
		} else if msg.Error != nil {
			newMessage = handleError(msg.Error)
		}
		messages = appendMessage(messages, newMessage)
	}

	var acc strings.Builder
	for i, msg := range messages {
		jsonBytes, err := json.MarshalIndent(msg, "", "  ")
		if err != nil {
			return "", fmt.Errorf("error marshalling message: %w", err)
		}
		acc.Write(jsonBytes)
		if i < len(messages)-1 {
			acc.WriteString("\n")
		}
	}

	return acc.String(), nil
}

func formatBqTableRef(tableRef *BQTableReference) string {
	return fmt.Sprintf("%s.%s.%s", tableRef.ProjectID, tableRef.DatasetID, tableRef.TableID)
}

func formatSchemaAsDict(data *BQSchema) map[string]any {
	headers := []string{"Column", "Type", "Description", "Mode"}
	if data == nil {
		return map[string]any{"headers": headers, "rows": []any{}}
	}

	var rows [][]any
	for _, field := range data.Fields {
		rows = append(rows, []any{field.Name, field.Type, field.Description, field.Mode})
	}
	return map[string]any{"headers": headers, "rows": rows}
}

func formatDatasourceAsDict(datasource *Datasource) map[string]any {
	var sourceName string
	if datasource.BigQueryTableReference != nil {
		sourceName = formatBqTableRef(datasource.BigQueryTableReference)
	}

	var schema map[string]any
	if datasource.Schema != nil {
		schema = formatSchemaAsDict(datasource.Schema)
	}

	return map[string]any{"source_name": sourceName, "schema": schema}
}

func handleTextResponse(resp *TextResponse) map[string]any {
	return map[string]any{"Answer": strings.Join(resp.Parts, "")}
}

func handleSchemaResponse(resp *SchemaResponse) map[string]any {
	if resp.Query != nil {
		return map[string]any{"Question": resp.Query.Question}
	}
	if resp.Result != nil {
		var formattedSources []map[string]any
		for _, ds := range resp.Result.Datasources {
			formattedSources = append(formattedSources, formatDatasourceAsDict(&ds))
		}
		return map[string]any{"Schema Resolved": formattedSources}
	}
	return nil
}

func handleDataResponse(resp *DataResponse, maxRows int) map[string]any {
	if resp.Query != nil {
		return map[string]any{
			"Retrieval Query": map[string]any{
				"Query Name": resp.Query.Name,
				"Question":   resp.Query.Question,
			},
		}
	}
	if resp.GeneratedSQL != "" {
		return map[string]any{"SQL Generated": resp.GeneratedSQL}
	}
	if resp.Result != nil {
		var headers []string
		for _, f := range resp.Result.Schema.Fields {
			headers = append(headers, f.Name)
		}

		totalRows := len(resp.Result.Data)
		var compactRows [][]any
		numRowsToDisplay := totalRows
		if numRowsToDisplay > maxRows {
			numRowsToDisplay = maxRows
		}

		for _, rowVal := range resp.Result.Data[:numRowsToDisplay] {
			var rowValues []any
			for _, header := range headers {
				rowValues = append(rowValues, rowVal[header])
			}
			compactRows = append(compactRows, rowValues)
		}

		summary := fmt.Sprintf("Showing all %d rows.", totalRows)
		if totalRows > maxRows {
			summary = fmt.Sprintf("Showing the first %d of %d total rows.", numRowsToDisplay, totalRows)
		}

		return map[string]any{
			"Data Retrieved": map[string]any{
				"headers": headers,
				"rows":    compactRows,
				"summary": summary,
			},
		}
	}
	return nil
}

func handleError(resp *ErrorResponse) map[string]any {
	return map[string]any{
		"Error": map[string]any{
			"Code":    int(resp.Code),
			"Message": resp.Message,
		},
	}
}

func appendMessage(messages []map[string]any, newMessage map[string]any) []map[string]any {
	if newMessage == nil {
		return messages
	}
	if len(messages) > 0 {
		if _, ok := messages[len(messages)-1]["Data Retrieved"]; ok {
			messages = messages[:len(messages)-1]
		}
	}
	return append(messages, newMessage)
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/dataplex.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Dataplex"
type: docs
weight: 1
description: >
  Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
---

# Dataplex Source

[Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
powers AI, analytics, and business intelligence at scale.

At the heart of these governance capabilities is a catalog that contains a
centralized inventory of the data assets in your organization. Dataplex
Universal Catalog holds business, technical, and runtime metadata for all of
your data. It helps you discover relationships and semantics in the metadata by
applying artificial intelligence and machine learning.

[dataplex-docs]: https://cloud.google.com/dataplex/docs

## Example

```yaml
sources:
  my-dataplex-source:
    kind: "dataplex"
    project: "my-project-id"
```

## Sample System Prompt

You can use the following system prompt as "Custom Instructions" in your client
application.

```
# Objective
Your primary objective is to help discover, organize and manage metadata related to data assets. 

# Tone and Style
1. Adopt the persona of a senior subject matter expert
2. Your communication style must be:
    1. Concise: Always favor brevity.
    2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.  
        Example (Incorrect): Hi there! I see that you are looking for...  
        Example (Correct): This problem likely stems from...
3. Do not reiterate or summarize the question in the answer.
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
    Example (Correct): "The entry describes..."  
    Example (Correct): "According to catalog,..."  
    Example (Correct): "Based on the metadata,..."  
    Example (Correct): "Based on the search results,..."  
5. Do not make assumptions

# Data Model
## Entries
Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:

- A BigQuery table or dataset
- A Cloud Storage bucket or folder
- An on-premises SQL table

## Aspects
While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.

## Aspect Types
Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry. 

Examples:
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub
- projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
- projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
- projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
- projects/dataplex-types/locations/global/aspectTypes/bigquery-model
- projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
- projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
- projects/dataplex-types/locations/global/aspectTypes/bigquery-table
- projects/dataplex-types/locations/global/aspectTypes/bigquery-view
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
- projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
- projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
- projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
- projects/dataplex-types/locations/global/aspectTypes/contacts
- projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
- projects/dataplex-types/locations/global/aspectTypes/dataform-repository
- projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
- projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
- projects/dataplex-types/locations/global/aspectTypes/data-product
- projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
- projects/dataplex-types/locations/global/aspectTypes/external-connection
- projects/dataplex-types/locations/global/aspectTypes/overview
- projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
- projects/dataplex-types/locations/global/aspectTypes/schema
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
- projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
- projects/dataplex-types/locations/global/aspectTypes/sql-access
- projects/dataplex-types/locations/global/aspectTypes/storage-bucket
- projects/dataplex-types/locations/global/aspectTypes/storage-folder
- projects/dataplex-types/locations/global/aspectTypes/storage
- projects/dataplex-types/locations/global/aspectTypes/usage

## Entry Types
Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type. 

Examples:
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
- projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
- projects/dataplex-types/locations/global/entryTypes/bigquery-connection
- projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
- projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
- projects/dataplex-types/locations/global/entryTypes/bigquery-model
- projects/dataplex-types/locations/global/entryTypes/bigquery-routine
- projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
- projects/dataplex-types/locations/global/entryTypes/bigquery-table
- projects/dataplex-types/locations/global/entryTypes/bigquery-view
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
- projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
- projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
- projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
- projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
- projects/dataplex-types/locations/global/entryTypes/dataform-repository
- projects/dataplex-types/locations/global/entryTypes/dataform-workspace
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
- projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
- projects/dataplex-types/locations/global/entryTypes/pubsub-topic
- projects/dataplex-types/locations/global/entryTypes/storage-bucket
- projects/dataplex-types/locations/global/entryTypes/storage-folder
- projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
- projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store

## Entry Groups
Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.

## Entry Links
Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).

# Tool instructions
## Tool: dataplex_search_entries
## General
- Do not try to search within search results on your own.
- Do not fetch multiple pages of results unless explicitly asked.

## Search syntax

### Simple search
In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:

- A substring of a name, display name, or description of a resource
- A substring of the type of a resource
- A substring of a column name (or nested column name) in the schema of a resource
- A substring of a project ID
- A string from an overview description

For example, the predicate foo matches the following resources:
- Resource with the name foo.bar
- Resource with the display name Foo Bar
- Resource with the description This is the foo script
- Resource with the exact type foo
- Column foo_bar in the schema of a resource
- Nested column foo_bar in the schema of a resource
- Project prod-foo-bar
- Resource with an overview containing the word foo


### Qualified predicates
You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
- An equal sign (=) restricts the search to an exact match.
- A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.

Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
- name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
- description:foo selects resources with the foo token in the description, like bar and foo.
- location=foo matches resources in a specified location with foo as the location name.

The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.

Search syntax supports the following qualifiers:
- "name:x" - Matches x as a substring of the resource ID.
- "displayname:x" - Match x as a substring of the resource display name.
- "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
- "description:x" - Matches x as a token in the resource description.
- "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
- "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
- "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
- "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
- "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
- "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
- "type=TYPE" - Matches resources of a specific entry type or its type alias.
- "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
- "parent:x" - Matches x as a substring of the hierarchical path of a resource. It supports same syntax as `name` predicate.
- "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
- "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
- "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
- "createtime" -
Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01. 
- "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.

### Aspect Search
To search for entries based on their attached aspects, use the following query syntax.

aspect:x	Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect=x	Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect:xOPERATORvalue	
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME

The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
- String: = (exact match) and : (substring)
- All number types: =, :, <, >, <=, >=, =>, =<
- Enum: =
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
- Boolean: =

Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
- aspect:example-project.us-central1.employee-info.is-enrolled=true
- aspect:example-project.us-central1.employee=true
- aspect:employee=true

Example:-
You can use following filters
- dataplex-types.global.bigquery-table.type={BIGLAKE_TABLE, BIGLAKE_OBJECT_TABLE, EXTERNAL_TABLE, TABLE}
- dataplex-types.global.storage.type={STRUCTURED, UNSTRUCTURED}

### Logical operators
A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
Logical AND and logical OR are supported. For example, foo OR bar.

You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.

### Request
1. Always try to rewrite the prompt using search syntax.

### Response
1. If there are multiple search results found
    1. Present the list of search results
    2. Format the output in nested ordered list, for example:  
    Given
    ```
    {
        results: [
            {
                name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
                entrySource: {
                displayName: "Users"
                description: "Table contains list of users."
                location: "aws-us-east-1"
                system: "BigQuery"
                }
            },
            {
                name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
                entrySource: {
                displayName: "Top customers",
                description: "Table contains list of best customers."
                location: "us-central1"
                system: "BigQuery"
                }
            },
        ]
    }
    ```
    Return output formatted as markdown nested list:
    ```
    * Users:
        - projectId: test_project
        - location: aws-us-east-1
        - description: Table contains list of users.
    * Top customers:
        - projectId: another_project
        - location: us-central1
        - description: Table contains list of best customers.
    ```
    3. Ask to select one of the presented search results
2. If there is only one search result found
    1. Present the search result immediately.
3. If there are no search result found
    1. Explain that no search result was found
    2. Suggest to provide a more specific search query.

## Tool: dataplex_lookup_entry
### Request
1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool.
2. If you do not know the name of the entry, use `dataplex_search_entries` tool
### Response
1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
```

## Reference

| **field** | **type** | **required** | **description**                                                                  |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "dataplex".                                                              |
| project   |  string  |     true     | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
```

--------------------------------------------------------------------------------
/tests/http/http_integration_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package http

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"net/http/httptest"
	"reflect"
	"regexp"
	"strings"
	"testing"
	"time"

	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/tests"
)

var (
	HttpSourceKind = "http"
	HttpToolKind   = "http"
)

func getHTTPSourceConfig(t *testing.T) map[string]any {
	idToken, err := tests.GetGoogleIdToken(tests.ClientId)
	if err != nil {
		t.Fatalf("error getting ID token: %s", err)
	}
	idToken = "Bearer " + idToken

	return map[string]any{
		"kind":    HttpSourceKind,
		"headers": map[string]string{"Authorization": idToken},
	}
}

// handler function for the test server
func multiTool(w http.ResponseWriter, r *http.Request) {
	path := r.URL.Path
	path = strings.TrimPrefix(path, "/") // Remove leading slash

	switch path {
	case "tool0":
		handleTool0(w, r)
	case "tool1":
		handleTool1(w, r)
	case "tool1id":
		handleTool1Id(w, r)
	case "tool1name":
		handleTool1Name(w, r)
	case "tool2":
		handleTool2(w, r)
	case "tool3":
		handleTool3(w, r)
	case "toolQueryTest":
		handleQueryTest(w, r)
	default:
		http.NotFound(w, r) // Return 404 for unknown paths
	}
}

// handleQueryTest simply returns the raw query string it received so the test
// can verify it's formatted correctly.
func handleQueryTest(w http.ResponseWriter, r *http.Request) {
    // expect GET method
    if r.Method != http.MethodGet {
        errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
        http.Error(w, errorMessage, http.StatusBadRequest)
        return
    }

    w.WriteHeader(http.StatusOK)
    enc := json.NewEncoder(w)
    enc.SetEscapeHTML(false) 

    err := enc.Encode(r.URL.RawQuery)
    if err != nil {
        http.Error(w, "Failed to write response", http.StatusInternalServerError)
        return
    }
}

// handler function for the test server
func handleTool0(w http.ResponseWriter, r *http.Request) {
	// expect POST method
	if r.Method != http.MethodPost {
		errorMessage := fmt.Sprintf("expected POST method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}
	w.WriteHeader(http.StatusOK)
	response := "hello world"
	err := json.NewEncoder(w).Encode(response)
	if err != nil {
		http.Error(w, "Failed to encode JSON", http.StatusInternalServerError)
		return
	}
}

// handler function for the test server
func handleTool1(w http.ResponseWriter, r *http.Request) {
	// expect GET method
	if r.Method != http.MethodGet {
		errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}
	// Parse request body
	var requestBody map[string]interface{}
	bodyBytes, readErr := io.ReadAll(r.Body)
	if readErr != nil {
		http.Error(w, "Bad Request: Failed to read request body", http.StatusBadRequest)
		return
	}
	defer r.Body.Close()
	err := json.Unmarshal(bodyBytes, &requestBody)
	if err != nil {
		errorMessage := fmt.Sprintf("Bad Request: Error unmarshalling request body: %s, Raw body: %s", err, string(bodyBytes))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	// Extract name
	name, ok := requestBody["name"].(string)
	if !ok || name == "" {
		http.Error(w, "Bad Request: Missing or invalid name", http.StatusBadRequest)
		return
	}

	if name == "Alice" {
		response := `[{"id":1,"name":"Alice"},{"id":3,"name":"Sid"}]`
		_, err := w.Write([]byte(response))
		if err != nil {
			http.Error(w, "Failed to write response", http.StatusInternalServerError)
		}
		return
	}

	http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}

// handler function for the test server
func handleTool1Id(w http.ResponseWriter, r *http.Request) {
	// expect GET method
	if r.Method != http.MethodGet {
		errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	id := r.URL.Query().Get("id")
	if id == "4" {
		response := `[{"id":4,"name":null}]`
		_, err := w.Write([]byte(response))
		if err != nil {
			http.Error(w, "Failed to write response", http.StatusInternalServerError)
		}
		return
	}
	http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}

// handler function for the test server
func handleTool1Name(w http.ResponseWriter, r *http.Request) {
	// expect GET method
	if r.Method != http.MethodGet {
		errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	if !r.URL.Query().Has("name") {
		response := "null"
		_, err := w.Write([]byte(response))
		if err != nil {
			http.Error(w, "Failed to write response", http.StatusInternalServerError)
		}
		return
	}
	http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}

// handler function for the test server
func handleTool2(w http.ResponseWriter, r *http.Request) {
	// expect GET method
	if r.Method != http.MethodGet {
		errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}
	email := r.URL.Query().Get("email")
	if email != "" {
		response := `[{"name":"Alice"}]`
		_, err := w.Write([]byte(response))
		if err != nil {
			http.Error(w, "Failed to write response", http.StatusInternalServerError)
		}
		return
	}

	http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
}

// handler function for the test server
func handleTool3(w http.ResponseWriter, r *http.Request) {
	// expect GET method
	if r.Method != http.MethodGet {
		errorMessage := fmt.Sprintf("expected GET method but got: %s", string(r.Method))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	// Check request headers
	expectedHeaders := map[string]string{
		"Content-Type":    "application/json",
		"X-Custom-Header": "example",
		"X-Other-Header":  "test",
	}
	for header, expectedValue := range expectedHeaders {
		if r.Header.Get(header) != expectedValue {
			errorMessage := fmt.Sprintf("Bad Request: Missing or incorrect header: %s", header)
			http.Error(w, errorMessage, http.StatusBadRequest)
			return
		}
	}

	// Check query parameters
	expectedQueryParams := map[string][]string{
		"id":      []string{"2", "1", "3"},
		"country": []string{"US"},
	}
	query := r.URL.Query()
	for param, expectedValueSlice := range expectedQueryParams {
		values, ok := query[param]
		if ok {
			if !reflect.DeepEqual(expectedValueSlice, values) {
				errorMessage := fmt.Sprintf("Bad Request: Incorrect query parameter: %s, actual: %s", param, query[param])
				http.Error(w, errorMessage, http.StatusBadRequest)
				return
			}
		} else {
			errorMessage := fmt.Sprintf("Bad Request: Missing query parameter: %s, actual: %s", param, query[param])
			http.Error(w, errorMessage, http.StatusBadRequest)
			return
		}
	}

	// Parse request body
	var requestBody map[string]interface{}
	bodyBytes, readErr := io.ReadAll(r.Body)
	if readErr != nil {
		http.Error(w, "Bad Request: Failed to read request body", http.StatusBadRequest)
		return
	}
	defer r.Body.Close()
	err := json.Unmarshal(bodyBytes, &requestBody)
	if err != nil {
		errorMessage := fmt.Sprintf("Bad Request: Error unmarshalling request body: %s, Raw body: %s", err, string(bodyBytes))
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	// Check request body
	expectedBody := map[string]interface{}{
		"place":   "zoo",
		"animals": []any{"rabbit", "ostrich", "whale"},
	}

	if !reflect.DeepEqual(requestBody, expectedBody) {
		errorMessage := fmt.Sprintf("Bad Request: Incorrect request body. Expected: %v, Got: %v", expectedBody, requestBody)
		http.Error(w, errorMessage, http.StatusBadRequest)
		return
	}

	response := "hello world"
	err = json.NewEncoder(w).Encode(response)
	if err != nil {
		http.Error(w, "Failed to encode JSON", http.StatusInternalServerError)
		return
	}
}

func TestHttpToolEndpoints(t *testing.T) {
	// start a test server
	server := httptest.NewServer(http.HandlerFunc(multiTool))
	defer server.Close()

	sourceConfig := getHTTPSourceConfig(t)
	sourceConfig["baseUrl"] = server.URL
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	var args []string

	toolsFile := getHTTPToolsConfig(sourceConfig, HttpToolKind)
	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	// Run tests
	tests.RunToolGetTest(t)
	tests.RunToolInvokeTest(t, `"hello world"`, tests.DisableArrayTest())
	runAdvancedHTTPInvokeTest(t)
	runQueryParamInvokeTest(t)
}

// runQueryParamInvokeTest runs the tool invoke endpoint for the query param test tool
func runQueryParamInvokeTest(t *testing.T) {
	invokeTcs := []struct {
		name        string
		api         string
		requestBody io.Reader
		want        string
		isErr       bool
	}{
		{
			name:        "invoke query-param-tool (optional omitted)",
			api:         "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
			requestBody: bytes.NewBuffer([]byte(`{"reqId": "test1"}`)),
			want:        `"reqId=test1"`,
		},
		{
			name:        "invoke query-param-tool (some optional nil)",
			api:         "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
			requestBody: bytes.NewBuffer([]byte(`{"reqId": "test2", "page": "5", "filter": null}`)),
			want:        `"page=5\u0026reqId=test2"`, // 'filter' omitted
		},
		{
			name:        "invoke query-param-tool (some optional absent)",
			api:         "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
			requestBody: bytes.NewBuffer([]byte(`{"reqId": "test2", "page": "5"}`)),
			want:        `"page=5\u0026reqId=test2"`, // 'filter' omitted
		},
		{
			name:        "invoke query-param-tool (required param nil)",
			api:         "http://127.0.0.1:5000/api/tool/my-query-param-tool/invoke",
			requestBody: bytes.NewBuffer([]byte(`{"reqId": null, "page": "1"}`)),
			want:        `"page=1\u0026reqId="`, // reqId becomes "",
		},
	}
	for _, tc := range invokeTcs {
		t.Run(tc.name, func(t *testing.T) {
			// Send Tool invocation request
			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
			if err != nil {
				t.Fatalf("unable to create request: %s", err)
			}
			req.Header.Add("Content-type", "application/json")

			resp, err := http.DefaultClient.Do(req)
			if err != nil {
				t.Fatalf("unable to send request: %s", err)
			}
			defer resp.Body.Close()

			if resp.StatusCode != http.StatusOK {
				bodyBytes, _ := io.ReadAll(resp.Body)
				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
			}

			// Check response body
			var body map[string]interface{}
			err = json.NewDecoder(resp.Body).Decode(&body)
			if err != nil {
				t.Fatalf("error parsing response body: %v", err)
			}
			got, ok := body["result"].(string)
			if !ok {
				bodyBytes, _ := json.Marshal(body)
				t.Fatalf("unable to find result in response body, got: %s", string(bodyBytes))
			}

			if got != tc.want {
				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
			}
		})
	}
}

// runToolInvoke runs the tool invoke endpoint
func runAdvancedHTTPInvokeTest(t *testing.T) {
	// Test HTTP tool invoke endpoint
	invokeTcs := []struct {
		name          string
		api           string
		requestHeader map[string]string
		requestBody   io.Reader
		want          string
		isErr         bool
	}{
		{
			name:          "invoke my-advanced-tool",
			api:           "http://127.0.0.1:5000/api/tool/my-advanced-tool/invoke",
			requestHeader: map[string]string{},
			requestBody:   bytes.NewBuffer([]byte(`{"animalArray": ["rabbit", "ostrich", "whale"], "id": 3, "path": "tool3", "country": "US", "X-Other-Header": "test"}`)),
			want:          `"hello world"`,
			isErr:         false,
		},
		{
			name:          "invoke my-advanced-tool with wrong params",
			api:           "http://127.0.0.1:5000/api/tool/my-advanced-tool/invoke",
			requestHeader: map[string]string{},
			requestBody:   bytes.NewBuffer([]byte(`{"animalArray": ["rabbit", "ostrich", "whale"], "id": 4, "path": "tool3", "country": "US", "X-Other-Header": "test"}`)),
			isErr:         true,
		},
	}
	for _, tc := range invokeTcs {
		t.Run(tc.name, func(t *testing.T) {
			// Send Tool invocation request
			req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
			if err != nil {
				t.Fatalf("unable to create request: %s", err)
			}
			req.Header.Add("Content-type", "application/json")
			for k, v := range tc.requestHeader {
				req.Header.Add(k, v)
			}
			resp, err := http.DefaultClient.Do(req)
			if err != nil {
				t.Fatalf("unable to send request: %s", err)
			}
			defer resp.Body.Close()

			if resp.StatusCode != http.StatusOK {
				if tc.isErr == true {
					return
				}
				bodyBytes, _ := io.ReadAll(resp.Body)
				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
			}

			// Check response body
			var body map[string]interface{}
			err = json.NewDecoder(resp.Body).Decode(&body)
			if err != nil {
				t.Fatalf("error parsing response body")
			}
			got, ok := body["result"].(string)
			if !ok {
				t.Fatalf("unable to find result in response body")
			}

			if got != tc.want {
				t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
			}
		})
	}
}

// getHTTPToolsConfig returns a mock HTTP tool's config file
func getHTTPToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
	// Write config into a file and pass it to command
	otherSourceConfig := make(map[string]any)
	for k, v := range sourceConfig {
		otherSourceConfig[k] = v
	}
	otherSourceConfig["headers"] = map[string]string{"X-Custom-Header": "unexpected", "Content-Type": "application/json"}
	otherSourceConfig["queryParams"] = map[string]any{"id": 1, "name": "Sid"}

	toolsFile := map[string]any{
		"sources": map[string]any{
			"my-instance":    sourceConfig,
			"other-instance": otherSourceConfig,
		},
		"authServices": map[string]any{
			"my-google-auth": map[string]any{
				"kind":     "google",
				"clientId": tests.ClientId,
			},
		},
		"tools": map[string]any{
			"my-simple-tool": map[string]any{
				"kind":        toolKind,
				"path":        "/tool0",
				"method":      "POST",
				"source":      "my-instance",
				"requestBody": "{}",
				"description": "Simple tool to test end to end functionality.",
			},
			"my-tool": map[string]any{
				"kind":        toolKind,
				"source":      "my-instance",
				"method":      "GET",
				"path":        "/tool1",
				"description": "some description",
				"queryParams": []tools.Parameter{
					tools.NewIntParameter("id", "user ID")},
				"requestBody": `{
"age": 36,
"name": "{{.name}}"
}
`,
				"bodyParams": []tools.Parameter{tools.NewStringParameter("name", "user name")},
				"headers":    map[string]string{"Content-Type": "application/json"},
			},
			"my-tool-by-id": map[string]any{
				"kind":        toolKind,
				"source":      "my-instance",
				"method":      "GET",
				"path":        "/tool1id",
				"description": "some description",
				"queryParams": []tools.Parameter{
					tools.NewIntParameter("id", "user ID")},
				"headers": map[string]string{"Content-Type": "application/json"},
			},
			"my-tool-by-name": map[string]any{
				"kind":        toolKind,
				"source":      "my-instance",
				"method":      "GET",
				"path":        "/tool1name",
				"description": "some description",
				"queryParams": []tools.Parameter{
					tools.NewStringParameterWithRequired("name", "user name", false)},
				"headers": map[string]string{"Content-Type": "application/json"},
			},
			"my-query-param-tool": map[string]any{
				"kind":        toolKind,
				"source":      "my-instance",
				"method":      "GET",
				"path":        "/toolQueryTest",
				"description": "Tool to test optional query parameters.",
				"queryParams": []tools.Parameter{
					tools.NewStringParameterWithRequired("reqId", "required ID", true),
					tools.NewStringParameterWithRequired("page", "optional page number", false),
					tools.NewStringParameterWithRequired("filter", "optional filter string", false),
				},
			},
			"my-auth-tool": map[string]any{
				"kind":        toolKind,
				"source":      "my-instance",
				"method":      "GET",
				"path":        "/tool2",
				"description": "some description",
				"requestBody": "{}",
				"queryParams": []tools.Parameter{
					tools.NewStringParameterWithAuth("email", "some description",
						[]tools.ParamAuthService{{Name: "my-google-auth", Field: "email"}}),
				},
			},
			"my-auth-required-tool": map[string]any{
				"kind":         toolKind,
				"source":       "my-instance",
				"method":       "POST",
				"path":         "/tool0",
				"description":  "some description",
				"requestBody":  "{}",
				"authRequired": []string{"my-google-auth"},
			},
			"my-advanced-tool": map[string]any{
				"kind":        toolKind,
				"source":      "other-instance",
				"method":      "get",
				"path":        "/{{.path}}?id=2",
				"description": "some description",
				"headers": 
					map[string]string{
						"X-Custom-Header": "example",
					},
				"pathParams":
					[]tools.Parameter{
						&tools.StringParameter{
							CommonParameter: tools.CommonParameter{Name: "path", Type: "string", Desc: "path param"},
						},
					},
				"queryParams":
					[]tools.Parameter{
						tools.NewIntParameter("id", "user ID"), tools.NewStringParameter("country", "country"),
					},
				"requestBody": `{
					"place": "zoo",
					"animals": {{json .animalArray }}
					}
					`,
				"bodyParams":   
					[]tools.Parameter{tools.NewArrayParameter("animalArray", "animals in the zoo", tools.NewStringParameter("animals", "desc"))},
				"headerParams": 
					[]tools.Parameter{tools.NewStringParameter("X-Other-Header", "custom header")},
			},
		},
	}
	return toolsFile
}
```

--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/alloydb-postgres-observability.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
  cloud-monitoring-source:
    kind: cloud-monitoring
tools:
  get_system_metrics:
    kind: cloud-monitoring-query-prometheus
    source: cloud-monitoring-source
    description: |
      Fetches system level cloudmonitoring data (timeseries metrics) for an AlloyDB cluster, instance.
      To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.

      Generate the PromQL `query` for AlloyDB system metrics using the provided metrics and rules. Get labels like `cluster_id` and `instance_id` from the user's intent.

      Defaults:
      1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.

      PromQL Query Examples:
      1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
      2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
      3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
      4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`

      Available Metrics List: metricname. description. monitored resource. labels
      1. `alloydb.googleapis.com/instance/cpu/average_utilization`: The percentage of CPU being used on an instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      2. `alloydb.googleapis.com/instance/cpu/maximum_utilization`: Maximum CPU utilization across all currently serving nodes of the instance from 0 to 100. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      3. `alloydb.googleapis.com/cluster/storage/usage`: The total AlloyDB storage in bytes across the entire cluster. `alloydb.googleapis.com/Cluster`. `cluster_id`.
      4. `alloydb.googleapis.com/instance/postgres/replication/replicas`: The number of read replicas connected to the primary instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`, `replica_instance_id`.
      5. `alloydb.googleapis.com/instance/postgres/replication/maximum_lag`: The maximum replication time lag calculated across all serving read replicas of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `replica_instance_id`.
      6. `alloydb.googleapis.com/instance/memory/min_available_memory`: The minimum available memory across all currently serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      7. `alloydb.googleapis.com/instance/postgres/instances`: The number of nodes in the instance, along with their status, which can be either up or down. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `status`.
      8. `alloydb.googleapis.com/database/postgresql/tuples`: Number of tuples (rows) by state per database in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `state`.
      9. `alloydb.googleapis.com/database/postgresql/temp_bytes_written_for_top_databases`: The total amount of data(in bytes) written to temporary files by the queries per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      10. `alloydb.googleapis.com/database/postgresql/temp_files_written_for_top_databases`: The number of temporary files used for writing data per database while performing internal algorithms like join, sort etc for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      11. `alloydb.googleapis.com/database/postgresql/inserted_tuples_count_for_top_databases`: The total number of rows inserted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      12. `alloydb.googleapis.com/database/postgresql/updated_tuples_count_for_top_databases`: The total number of rows updated per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      13. `alloydb.googleapis.com/database/postgresql/deleted_tuples_count_for_top_databases`: The total  number of rows deleted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      14. `alloydb.googleapis.com/database/postgresql/backends_for_top_databases`: The current number of connections per database to the instance for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      15. `alloydb.googleapis.com/instance/postgresql/backends_by_state`: The current number of connections to the instance grouped by the state like idle, active, idle_in_transaction, idle_in_transaction_aborted, disabled, and fastpath_function_call. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`.
      16. `alloydb.googleapis.com/instance/postgresql/backends_for_top_applications`: The current number of connections to the AlloyDB instance, grouped by applications for top 500 applications. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `application_name`.
      17. `alloydb.googleapis.com/database/postgresql/new_connections_for_top_databases`: Total number of new connections added per database for top 500 databases to the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      18. `alloydb.googleapis.com/database/postgresql/deadlock_count_for_top_databases`: Total number of deadlocks detected in the instance per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
      19. `alloydb.googleapis.com/database/postgresql/statements_executed_count`: Total count of statements executed in the instance per database per operation_type. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `operation_type`.
      20. `alloydb.googleapis.com/instance/postgresql/returned_tuples_count`: Number of rows scanned while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      21. `alloydb.googleapis.com/instance/postgresql/fetched_tuples_count`: Number of rows fetched while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      22. `alloydb.googleapis.com/instance/postgresql/updated_tuples_count`: Number of rows updated while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      23. `alloydb.googleapis.com/instance/postgresql/inserted_tuples_count`: Number of rows inserted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      24. `alloydb.googleapis.com/instance/postgresql/deleted_tuples_count`: Number of rows deleted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      25. `alloydb.googleapis.com/instance/postgresql/written_tuples_count`: Number of rows written while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      26. `alloydb.googleapis.com/instance/postgresql/deadlock_count`: Number of deadlocks detected in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      27. `alloydb.googleapis.com/instance/postgresql/blks_read`: Number of blocks read by Postgres that were not in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      28. `alloydb.googleapis.com/instance/postgresql/blks_hit`: Number of times Postgres found the requested block in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      29. `alloydb.googleapis.com/instance/postgresql/temp_bytes_written_count`: The total amount of data(in bytes) written to temporary files by the queries while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      30. `alloydb.googleapis.com/instance/postgresql/temp_files_written_count`: The number of temporary files used for writing data in the instance while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      31. `alloydb.googleapis.com/instance/postgresql/new_connections_count`: The number new connections added to the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
      32. `alloydb.googleapis.com/instance/postgresql/wait_count`: Total number of times processes waited for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
      33. `alloydb.googleapis.com/instance/postgresql/wait_time`: Total elapsed wait time for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
      34. `alloydb.googleapis.com/instance/postgres/transaction_count`: The number of committed and rolled back transactions across all serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.

  get_query_metrics:
    kind: cloud-monitoring-query-prometheus
    source: cloud-monitoring-source
    description: |
      Fetches query level cloudmonitoring data (timeseries metrics) for queries running in an AlloyDB instance.
      To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.

      Generate the PromQL `query` for AlloyDB query metrics using the provided metrics and rules. Get labels like `cluster_id`, `instance_id`, and `query_hash` from the user's intent. If `query_hash` is provided, use the per-query metrics.

      Defaults:
      1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.

      PromQL Query Examples:
      1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
      2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
      3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
      4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
      8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`

      Available Metrics List: metricname. description. monitored resource. labels. aggregate is the aggregated values for all query stats, Use aggregate metrics if query id is not provided. For perquery metrics do not fetch querystring unless specified by user specifically. Have the aggregation on query hash to avoid fetching the querystring. Do not use latency metrics for anything.
      1. `alloydb.googleapis.com/database/postgresql/insights/aggregate/latencies`: Aggregated query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
      2. `alloydb.googleapis.com/database/postgresql/insights/aggregate/execution_time`: Accumulated aggregated query execution time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
      3. `alloydb.googleapis.com/database/postgresql/insights/aggregate/io_time`: Accumulated aggregated IO time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`.
      4. `alloydb.googleapis.com/database/postgresql/insights/aggregate/lock_time`: Accumulated aggregated lock wait time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`.
      5. `alloydb.googleapis.com/database/postgresql/insights/aggregate/row_count`: Aggregated number of retrieved or affected rows since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
      6. `alloydb.googleapis.com/database/postgresql/insights/aggregate/shared_blk_access_count`: Aggregated shared blocks accessed by statement execution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`.
      7. `alloydb.googleapis.com/database/postgresql/insights/perquery/latencies`: Per query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
      8. `alloydb.googleapis.com/database/postgresql/insights/perquery/execution_time`: Accumulated execution times per user per database per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
      9. `alloydb.googleapis.com/database/postgresql/insights/perquery/io_time`: Accumulated IO time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`, `querystring`, `query_hash`.
      10. `alloydb.googleapis.com/database/postgresql/insights/perquery/lock_time`: Accumulated lock wait time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`, `querystring`, `query_hash`.
      11. `alloydb.googleapis.com/database/postgresql/insights/perquery/row_count`: The number of retrieved or affected rows since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
      12. `alloydb.googleapis.com/database/postgresql/insights/perquery/shared_blk_access_count`: Shared blocks accessed by statement execution per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`, `querystring`, `query_hash`.
      13. `alloydb.googleapis.com/database/postgresql/insights/pertag/latencies`: Query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
      14. `alloydb.googleapis.com/database/postgresql/insights/pertag/execution_time`: Accumulated execution times since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
      15. `alloydb.googleapis.com/database/postgresql/insights/pertag/io_time`: Accumulated IO time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `io_type`, `tag_hash`.
      16. `alloydb.googleapis.com/database/postgresql/insights/pertag/lock_time`: Accumulated lock wait time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `lock_type`, `tag_hash`.
      17. `alloydb.googleapis.com/database/postgresql/insights/pertag/shared_blk_access_count`: Shared blocks accessed by statement execution per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `access_type`, `tag_hash`.
      18. `alloydb.googleapis.com/database/postgresql/insights/pertag/row_count`: The number of retrieved or affected rows since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.

toolsets:
  alloydb_postgres_cloud_monitoring_tools:
    - get_system_metrics
    - get_query_metrics

```

--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerycommon/table_name_parser_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package bigquerycommon_test

import (
	"sort"
	"strings"
	"testing"

	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
)

func TestTableParser(t *testing.T) {
	testCases := []struct {
		name             string
		sql              string
		defaultProjectID string
		want             []string
		wantErr          bool
		wantErrMsg       string
	}{
		{
			name:             "single fully qualified table",
			sql:              "SELECT * FROM `my-project.my_dataset.my_table`",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "multiple statements with same table",
			sql:              "select * from proj1.data1.tbl1 limit 1; select A.b from proj1.data1.tbl1 as A limit 1;",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1"},
			wantErr:          false,
		},
		{
			name:             "multiple fully qualified tables",
			sql:              "SELECT * FROM `proj1.data1`.`tbl1` JOIN proj2.`data2.tbl2` ON id",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "duplicate tables",
			sql:              "SELECT * FROM `proj1.data1.tbl1` JOIN proj1.data1.tbl1 ON id",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1"},
			wantErr:          false,
		},
		{
			name:             "partial table with default project",
			sql:              "SELECT * FROM `my_dataset`.my_table",
			defaultProjectID: "default-proj",
			want:             []string{"default-proj.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "partial table without default project",
			sql:              "SELECT * FROM `my_dataset.my_table`",
			defaultProjectID: "",
			want:             nil,
			wantErr:          true,
		},
		{
			name:             "mixed fully qualified and partial tables",
			sql:              "SELECT t1.*, t2.* FROM `proj1.data1.tbl1` AS t1 JOIN `data2.tbl2` AS t2 ON t1.id = t2.id",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "no tables",
			sql:              "SELECT 1+1",
			defaultProjectID: "default-proj",
			want:             []string{},
			wantErr:          false,
		},
		{
			name:             "ignore single part identifiers (like CTEs)",
			sql:              "WITH my_cte AS (SELECT 1) SELECT * FROM `my_cte`",
			defaultProjectID: "default-proj",
			want:             []string{},
			wantErr:          false,
		},
		{
			name:             "complex CTE",
			sql:              "WITH cte1 AS (SELECT * FROM `real.table.one`), cte2 AS (SELECT * FROM cte1) SELECT * FROM cte2 JOIN `real.table.two` ON true",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one", "real.table.two"},
			wantErr:          false,
		},
		{
			name:             "nested subquery should be parsed",
			sql:              "SELECT * FROM (SELECT a FROM (SELECT A.b FROM `real.table.nested` AS A))",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.nested"},
			wantErr:          false,
		},
		{
			name:             "from clause with unnest",
			sql:              "SELECT event.name FROM `my-project.my_dataset.my_table` AS A, UNNEST(A.events) AS event",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "ignore more than 3 parts",
			sql:              "SELECT * FROM `proj.data.tbl.col`",
			defaultProjectID: "default-proj",
			want:             []string{},
			wantErr:          false,
		},
		{
			name:             "complex query",
			sql:              "SELECT name FROM (SELECT name FROM `proj1.data1.tbl1`) UNION ALL SELECT name FROM `data2.tbl2`",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "empty sql",
			sql:              "",
			defaultProjectID: "default-proj",
			want:             []string{},
			wantErr:          false,
		},
		{
			name:             "with comments",
			sql:              "SELECT * FROM `proj1.data1.tbl1`; -- comment `fake.table.one` \n SELECT * FROM `proj2.data2.tbl2`; # comment `fake.table.two`",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "multi-statement with semicolon",
			sql:              "SELECT * FROM `proj1.data1.tbl1`; SELECT * FROM `proj2.data2.tbl2`",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "simple execute immediate",
			sql:              "EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "execute immediate with multiple spaces",
			sql:              "EXECUTE  IMMEDIATE 'SELECT 1'",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "execute immediate with newline",
			sql:              "EXECUTE\nIMMEDIATE 'SELECT 1'",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "execute immediate with comment",
			sql:              "EXECUTE -- some comment\n IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "nested execute immediate",
			sql:              "EXECUTE IMMEDIATE \"EXECUTE IMMEDIATE '''SELECT * FROM `nested.exec.tbl`'''\"",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "begin execute immediate",
			sql:              "BEGIN EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'; END;",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
		},
		{
			name:             "table inside string literal should be ignored",
			sql:              "SELECT * FROM `real.table.one` WHERE name = 'select * from `fake.table.two`'",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one"},
			wantErr:          false,
		},
		{
			name:             "string with escaped single quote",
			sql:              "SELECT 'this is a string with an escaped quote \\' and a fake table `fake.table.one`' FROM `real.table.two`",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.two"},
			wantErr:          false,
		},
		{
			name:             "string with escaped double quote",
			sql:              `SELECT "this is a string with an escaped quote \" and a fake table ` + "`fake.table.one`" + `" FROM ` + "`real.table.two`",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.two"},
			wantErr:          false,
		},
		{
			name:             "multi-line comment",
			sql:              "/* `fake.table.1` */ SELECT * FROM `real.table.2`",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.2"},
			wantErr:          false,
		},
		{
			name:             "raw string with backslash should be ignored",
			sql:              "SELECT * FROM `real.table.one` WHERE name = r'a raw string with a \\ and a fake table `fake.table.two`'",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one"},
			wantErr:          false,
		},
		{
			name:             "capital R raw string with quotes inside should be ignored",
			sql:              `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one"},
			wantErr:          false,
		},
		{
			name:             "triple quoted raw string should be ignored",
			sql:              "SELECT * FROM `real.table.one` WHERE name = r'''a raw string with a ' and a \" and a \\ and a fake table `fake.table.two`'''",
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one"},
			wantErr:          false,
		},
		{
			name:             "triple quoted capital R raw string should be ignored",
			sql:              `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
			defaultProjectID: "default-proj",
			want:             []string{"real.table.one"},
			wantErr:          false,
		},
		{
			name:             "unquoted fully qualified table",
			sql:              "SELECT * FROM my-project.my_dataset.my_table",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "unquoted partial table with default project",
			sql:              "SELECT * FROM my_dataset.my_table",
			defaultProjectID: "default-proj",
			want:             []string{"default-proj.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "unquoted partial table without default project",
			sql:              "SELECT * FROM my_dataset.my_table",
			defaultProjectID: "",
			want:             nil,
			wantErr:          true,
		},
		{
			name:             "mixed quoting style 1",
			sql:              "SELECT * FROM `my-project`.my_dataset.my_table",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "mixed quoting style 2",
			sql:              "SELECT * FROM `my-project`.`my_dataset`.my_table",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "mixed quoting style 3",
			sql:              "SELECT * FROM `my-project`.`my_dataset`.`my_table`",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "mixed quoted and unquoted tables",
			sql:              "SELECT * FROM `proj1.data1.tbl1` JOIN proj2.data2.tbl2 ON id",
			defaultProjectID: "default-proj",
			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
			wantErr:          false,
		},
		{
			name:             "create table statement",
			sql:              "CREATE TABLE `my-project.my_dataset.my_table` (x INT64)",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "insert into statement",
			sql:              "INSERT INTO `my-project.my_dataset.my_table` (x) VALUES (1)",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "update statement",
			sql:              "UPDATE `my-project.my_dataset.my_table` SET x = 2 WHERE true",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "delete from statement",
			sql:              "DELETE FROM `my-project.my_dataset.my_table` WHERE true",
			defaultProjectID: "default-proj",
			want:             []string{"my-project.my_dataset.my_table"},
			wantErr:          false,
		},
		{
			name:             "merge into statement",
			sql:              "MERGE `proj.data.target` T USING `proj.data.source` S ON T.id = S.id WHEN NOT MATCHED THEN INSERT ROW",
			defaultProjectID: "default-proj",
			want:             []string{"proj.data.source", "proj.data.target"},
			wantErr:          false,
		},
		{
			name:             "create schema statement",
			sql:              "CREATE SCHEMA `my-project.my_dataset`",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'CREATE SCHEMA' are not allowed",
		},
		{
			name:             "create dataset statement",
			sql:              "CREATE DATASET `my-project.my_dataset`",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'CREATE DATASET' are not allowed",
		},
		{
			name:             "drop schema statement",
			sql:              "DROP SCHEMA `my-project.my_dataset`",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'DROP SCHEMA' are not allowed",
		},
		{
			name:             "drop dataset statement",
			sql:              "DROP DATASET `my-project.my_dataset`",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'DROP DATASET' are not allowed",
		},
		{
			name:             "alter schema statement",
			sql:              "ALTER SCHEMA my_dataset SET OPTIONS(description='new description')",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'ALTER SCHEMA' are not allowed",
		},
		{
			name:             "alter dataset statement",
			sql:              "ALTER DATASET my_dataset SET OPTIONS(description='new description')",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "dataset-level operations like 'ALTER DATASET' are not allowed",
		},
		{
			name:             "begin...end block",
			sql:              "BEGIN CREATE TABLE `proj.data.tbl1` (x INT64); INSERT `proj.data.tbl2` (y) VALUES (1); END;",
			defaultProjectID: "default-proj",
			want:             []string{"proj.data.tbl1", "proj.data.tbl2"},
			wantErr:          false,
		},
		{
			name: "complex begin...end block with comments and different quoting",
			sql: `
				BEGIN
					-- Create a new table
					CREATE TABLE proj.data.tbl1 (x INT64);
					/* Insert some data from another table */
					INSERT INTO ` + "`proj.data.tbl2`" + ` (y) SELECT y FROM proj.data.source;
				END;`,
			defaultProjectID: "default-proj",
			want:             []string{"proj.data.source", "proj.data.tbl1", "proj.data.tbl2"},
			wantErr:          false,
		},
		{
			name:             "call fully qualified procedure",
			sql:              "CALL my-project.my_dataset.my_procedure()",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
		},
		{
			name:             "call partially qualified procedure",
			sql:              "CALL my_dataset.my_procedure()",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
		},
		{
			name:             "call procedure in begin...end block",
			sql:              "BEGIN CALL proj.data.proc1(); SELECT * FROM proj.data.tbl1; END;",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
		},
		{
			name:             "call procedure with newline",
			sql:              "CALL\nmy_dataset.my_procedure()",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
		},
		{
			name:             "call procedure without default project should fail",
			sql:              "CALL my_dataset.my_procedure()",
			defaultProjectID: "",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
		},
		{
			name:             "create procedure statement",
			sql:              "CREATE PROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "unanalyzable statements like 'CREATE PROCEDURE' are not allowed",
		},
		{
			name:             "create or replace procedure statement",
			sql:              "CREATE\n OR \nREPLACE \nPROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "unanalyzable statements like 'CREATE OR REPLACE PROCEDURE' are not allowed",
		},
		{
			name:             "create function statement",
			sql:              "CREATE FUNCTION my_dataset.my_function() RETURNS INT64 AS (1);",
			defaultProjectID: "default-proj",
			want:             nil,
			wantErr:          true,
			wantErrMsg:       "unanalyzable statements like 'CREATE FUNCTION' are not allowed",
		},
	}

	for _, tc := range testCases {
		t.Run(tc.name, func(t *testing.T) {
			got, err := bigquerycommon.TableParser(tc.sql, tc.defaultProjectID)
			if (err != nil) != tc.wantErr {
				t.Errorf("TableParser() error = %v, wantErr %v", err, tc.wantErr)
				return
			}
			if tc.wantErr && tc.wantErrMsg != "" {
				if err == nil || !strings.Contains(err.Error(), tc.wantErrMsg) {
					t.Errorf("TableParser() error = %v, want err containing %q", err, tc.wantErrMsg)
				}
			}
			// Sort slices to ensure comparison is order-independent.
			sort.Strings(got)
			sort.Strings(tc.want)
			if diff := cmp.Diff(tc.want, got); diff != "" {
				t.Errorf("TableParser() mismatch (-want +got):\n%s", diff)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/looker/looker-query-url.md:
--------------------------------------------------------------------------------

```markdown
---
title: "looker-query-url"
type: docs
weight: 1
description: >
  "looker-query-url" generates a url link to a Looker explore.
aliases:
- /resources/tools/looker-query-url
---

## About

The `looker-query-url` generates a url link to an explore in
Looker so the query can be investigated further.

It's compatible with the following sources:

- [looker](../../sources/looker.md)

`looker-query-url` takes nine parameters:

1. the `model`
2. the `explore`
3. the `fields` list
4. an optional set of `filters`
5. an optional set of `pivots`
6. an optional set of `sorts`
7. an optional `limit`
8. an optional `tz`
9. an optional `vis_config`

## Example

```yaml
tools:
    query_url:
        kind: looker-query-url
        source: looker-source
        description: |
          Query URL Tool

          This tool is used to generate the URL of a query in Looker.
          The user can then explore the query further inside Looker.
          The tool also returns the query_id and slug. The parameters
          are the same as the query tool with an additional vis_config
          parameter.

          The vis_config is optional. If provided, it will be used to
          control the default visualization for the query. Here are
          some notes on making visualizations.

          ### Cartesian Charts (Area, Bar, Column, Line, Scatter)

          These chart types share a large number of configuration options.

          **General**
          *   `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`).
          *   `series_types`: Override the chart type for individual series.
          *   `show_view_names`: Display view names in labels and tooltips (`true`/`false`).
          *   `series_labels`: Provide custom names for series.

          **Styling & Colors**
          *   `colors`: An array of color values to be used for the chart series.
          *   `series_colors`: A mapping of series names to specific color values.
          *   `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.).
          *   `font_size`: Font size for labels (e.g., '12px').

          **Legend**
          *   `hide_legend`: Show or hide the chart legend (`true`/`false`).
          *   `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`).

          **Axes**
          *   `swap_axes`: Swap the X and Y axes (`true`/`false`).
          *   `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`).
          *   `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`).
          *   `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`).
          *   `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`).
          *   `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`).
          *   `x_axis_label`, `y_axis_label`: Set a custom title for an axis.
          *   `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`).
          *   `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis.
          *   `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels.
          *   `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`).
          *   `y_axes`: An array of configuration objects for multiple y-axes.

          **Data & Series**
          *   `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`).
          *   `ordering`: Order of series in a stack (`'none'`, etc.).
          *   `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`).
          *   `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`).
          *   `discontinuous_nulls`: How to render null values in line charts (`true`/`false`).
          *   `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`).
          *   `series_point_styles`: Override point styles for individual series.
          *   `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.).
          *   `show_value_labels`: Display values on data points (`true`/`false`).
          *   `label_value_format`: A format string for value labels.
          *   `show_totals_labels`: Display total labels on stacked charts (`true`/`false`).
          *   `totals_color`: Color for total labels.
          *   `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`).
          *   `hidden_series`: An array of series names to hide from the visualization.

          **Scatter/Bubble Specific**
          *   `size_by_field`: The field used to determine the size of bubbles.
          *   `color_by_field`: The field used to determine the color of bubbles.
          *   `plot_size_by_field`: Whether to display the size-by field in the legend.
          *   `cluster_points`: Group nearby points into clusters (`true`/`false`).
          *   `quadrants_enabled`: Display quadrants on the chart (`true`/`false`).
          *   `quadrant_properties`: Configuration for quadrant labels and colors.
          *   `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage.
          *   `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value.

          **Miscellaneous**
          *   `reference_lines`: Configuration for displaying reference lines.
          *   `trend_lines`: Configuration for displaying trend lines.
          *   `trellis`: Configuration for creating trellis (small multiple) charts.
          *   `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions.

          ### Boxplot

          *   Inherits most of the Cartesian chart options.
          *   `type`: Must be `looker_boxplot`.

          ### Funnel

          *   `type`: Must be `looker_funnel`.
          *   `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`).
          *   `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`).
          *   `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`).
          *   `labelColor`, `labelColorEnabled`: Set a custom color for labels.
          *   `labelOverlap`: Allow labels to overlap (`true`/`false`).
          *   `barColors`: An array of colors for the funnel steps.
          *   `color_application`: Advanced color palette controls.
          *   `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.

          ### Pie / Donut

          *   `type`: Must be `looker_pie`.
          *   `value_labels`: Where to display values (`'legend'`, `'labels'`).
          *   `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`).
          *   `start_angle`, `end_angle`: The start and end angles of the pie chart.
          *   `inner_radius`: The inner radius, used to create a donut chart.
          *   `series_colors`, `series_labels`: Override colors and labels for specific slices.
          *   `color_application`: Advanced color palette controls.
          *   `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.
          *   `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration.

          ### Waterfall

          *   Inherits most of the Cartesian chart options.
          *   `type`: Must be `looker_waterfall`.
          *   `up_color`: Color for positive (increasing) values.
          *   `down_color`: Color for negative (decreasing) values.
          *   `total_color`: Color for the total bar.

          ### Word Cloud

          *   `type`: Must be `looker_wordcloud`.
          *   `rotation`: Enable random word rotation (`true`/`false`).
          *   `colors`: An array of colors for the words.
          *   `color_application`: Advanced color palette controls.
          *   `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering.

          These are some sample vis_config settings.

          A bar chart -
          {{
            "defaults_version": 1,
            "label_density": 25,
            "legend_position": "center",
            "limit_displayed_rows": false,
            "ordering": "none",
            "plot_size_by_field": false,
            "point_style": "none",
            "show_null_labels": false,
            "show_silhouette": false,
            "show_totals_labels": false,
            "show_value_labels": false,
            "show_view_names": false,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "stacking": "normal",
            "totals_color": "#808080",
            "trellis": "",
            "type": "looker_bar",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "x_axis_zoom": true,
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5,
            "y_axis_zoom": true
          }}

          A column chart with an option advanced_vis_config -
          {{
            "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }",
            "colors": [
              "grey"
            ],
            "defaults_version": 1,
            "hidden_fields": [],
            "label_density": 25,
            "legend_position": "center",
            "limit_displayed_rows": false,
            "note_display": "below",
            "note_state": "collapsed",
            "note_text": "Unsold inventory only",
            "ordering": "none",
            "plot_size_by_field": false,
            "point_style": "none",
            "series_colors": {},
            "show_null_labels": false,
            "show_silhouette": false,
            "show_totals_labels": false,
            "show_value_labels": true,
            "show_view_names": false,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "stacking": "normal",
            "totals_color": "#808080",
            "trellis": "",
            "type": "looker_column",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "x_axis_zoom": true,
            "y_axes": [],
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5,
            "y_axis_zoom": true
          }}

          A line chart -
          {{
            "defaults_version": 1,
            "hidden_pivots": {},
            "hidden_series": [],
            "interpolation": "linear",
            "label_density": 25,
            "legend_position": "center",
            "limit_displayed_rows": false,
            "plot_size_by_field": false,
            "point_style": "none",
            "series_types": {},
            "show_null_points": true,
            "show_value_labels": false,
            "show_view_names": false,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "stacking": "",
            "trellis": "",
            "type": "looker_line",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5
          }}

          An area chart -
          {{
            "defaults_version": 1,
            "interpolation": "linear",
            "label_density": 25,
            "legend_position": "center",
            "limit_displayed_rows": false,
            "plot_size_by_field": false,
            "point_style": "none",
            "series_types": {},
            "show_null_points": true,
            "show_silhouette": false,
            "show_totals_labels": false,
            "show_value_labels": false,
            "show_view_names": false,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "stacking": "normal",
            "totals_color": "#808080",
            "trellis": "",
            "type": "looker_area",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "x_axis_zoom": true,
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5,
            "y_axis_zoom": true
          }}

          A scatter plot -
          {{
            "cluster_points": false,
            "custom_quadrant_point_x": 5,
            "custom_quadrant_point_y": 5,
            "custom_value_label_column": "",
            "custom_x_column": "",
            "custom_y_column": "",
            "defaults_version": 1,
            "hidden_fields": [],
            "hidden_pivots": {},
            "hidden_points_if_no": [],
            "hidden_series": [],
            "interpolation": "linear",
            "label_density": 25,
            "legend_position": "center",
            "limit_displayed_rows": false,
            "limit_displayed_rows_values": {
              "first_last": "first",
              "num_rows": 0,
              "show_hide": "hide"
            },
            "plot_size_by_field": false,
            "point_style": "circle",
            "quadrant_properties": {
              "0": {
                "color": "",
                "label": "Quadrant 1"
              },
              "1": {
                "color": "",
                "label": "Quadrant 2"
              },
              "2": {
                "color": "",
                "label": "Quadrant 3"
              },
              "3": {
                "color": "",
                "label": "Quadrant 4"
              }
            },
            "quadrants_enabled": false,
            "series_labels": {},
            "series_types": {},
            "show_null_points": false,
            "show_value_labels": false,
            "show_view_names": true,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "size_by_field": "roi",
            "stacking": "normal",
            "swap_axes": true,
            "trellis": "",
            "type": "looker_scatter",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "x_axis_zoom": true,
            "y_axes": [
              {
                "label": "",
                "orientation": "bottom",
                "series": [
                  {
                    "axisId": "Channel_0 - average_of_roi_first",
                    "id": "Channel_0 - average_of_roi_first",
                    "name": "Channel_0"
                  },
                  {
                    "axisId": "Channel_1 - average_of_roi_first",
                    "id": "Channel_1 - average_of_roi_first",
                    "name": "Channel_1"
                  },
                  {
                    "axisId": "Channel_2 - average_of_roi_first",
                    "id": "Channel_2 - average_of_roi_first",
                    "name": "Channel_2"
                  },
                  {
                    "axisId": "Channel_3 - average_of_roi_first",
                    "id": "Channel_3 - average_of_roi_first",
                    "name": "Channel_3"
                  },
                  {
                    "axisId": "Channel_4 - average_of_roi_first",
                    "id": "Channel_4 - average_of_roi_first",
                    "name": "Channel_4"
                  }
                ],
                "showLabels": true,
                "showValues": true,
                "tickDensity": "custom",
                "tickDensityCustom": 100,
                "type": "linear",
                "unpinAxis": false
              }
            ],
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5,
            "y_axis_zoom": true
          }}

          A single record visualization -
          {{
            "defaults_version": 1,
            "show_view_names": false,
            "type": "looker_single_record"
          }}

          A single value visualization -
          {{
            "comparison_reverse_colors": false,
            "comparison_type": "value",                                                                                                                                            "conditional_formatting_include_nulls": false,                                                                                                                         "conditional_formatting_include_totals": false,
            "custom_color": "#1A73E8",
            "custom_color_enabled": true,
            "defaults_version": 1,
            "enable_conditional_formatting": false,
            "series_types": {},
            "show_comparison": false,
            "show_comparison_label": true,
            "show_single_value_title": true,
            "single_value_title": "Total Clicks",
            "type": "single_value"
          }}

          A Pie chart -
          {{
            "defaults_version": 1,
            "label_density": 25,
            "label_type": "labPer",
            "legend_position": "center",
            "limit_displayed_rows": false,
            "ordering": "none",
            "plot_size_by_field": false,
            "point_style": "none",
            "series_types": {},
            "show_null_labels": false,
            "show_silhouette": false,
            "show_totals_labels": false,
            "show_value_labels": false,
            "show_view_names": false,
            "show_x_axis_label": true,
            "show_x_axis_ticks": true,
            "show_y_axis_labels": true,
            "show_y_axis_ticks": true,
            "stacking": "",
            "totals_color": "#808080",
            "trellis": "",
            "type": "looker_pie",
            "value_labels": "legend",
            "x_axis_gridlines": false,
            "x_axis_reversed": false,
            "x_axis_scale": "auto",
            "y_axis_combined": true,
            "y_axis_gridlines": true,
            "y_axis_reversed": false,
            "y_axis_scale_mode": "linear",
            "y_axis_tick_density": "default",
            "y_axis_tick_density_custom": 5
          }}

          The result is a JSON object with the id, slug, the url, and
          the long_url.
```

## Reference

| **field**   |                  **type**                  | **required** | **description**                                                                                  |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind        |                   string                   |     true     | Must be "looker-query-url"                                                                       |
| source      |                   string                   |     true     | Name of the source the SQL should execute on.                                                    |
| description |                   string                   |     true     | Description of the tool that is passed to the LLM.                                               |

```

--------------------------------------------------------------------------------
/internal/server/static/js/toolDisplay.js:
--------------------------------------------------------------------------------

```javascript
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import { handleRunTool, displayResults } from './runTool.js';
import { createGoogleAuthMethodItem } from './auth.js'

/**
 * Helper function to create form inputs for parameters.
 */
function createParamInput(param, toolId) {
    const paramItem = document.createElement('div');
    paramItem.className = 'param-item';

    const label = document.createElement('label');
    const INPUT_ID = `param-${toolId}-${param.name}`;
    const NAME_TEXT = document.createTextNode(param.name);
    label.setAttribute('for', INPUT_ID);
    label.appendChild(NAME_TEXT);

    const IS_AUTH_PARAM = param.authServices && param.authServices.length > 0;
    let additionalLabelText = '';
    if (IS_AUTH_PARAM) {
        additionalLabelText += ' (auth)';
    }
    if (!param.required) {
        additionalLabelText += ' (optional)';
    }

    if (additionalLabelText) {
        const additionalSpan = document.createElement('span');
        additionalSpan.textContent = additionalLabelText;
        additionalSpan.classList.add('param-label-extras');
        label.appendChild(additionalSpan);
    }
    paramItem.appendChild(label);

    const inputCheckboxWrapper = document.createElement('div');
    const inputContainer = document.createElement('div');
    inputCheckboxWrapper.className = 'input-checkbox-wrapper';
    inputContainer.className = 'param-input-element-container';

    // Build parameter's value input box.
    const PLACEHOLDER_LABEL = param.label;
    let inputElement;
    let boolValueLabel = null;

    if (param.type === 'textarea') {
        inputElement = document.createElement('textarea');
        inputElement.rows = 3;
        inputContainer.appendChild(inputElement);
    } else if(param.type === 'checkbox') {
        inputElement = document.createElement('input');
        inputElement.type = 'checkbox';
        inputElement.title = PLACEHOLDER_LABEL;
        inputElement.checked = false;

        // handle true/false label for boolean params
        boolValueLabel = document.createElement('span');
        boolValueLabel.className = 'checkbox-bool-label';
        boolValueLabel.textContent = inputElement.checked ? ' true' : ' false';

        inputContainer.appendChild(inputElement); 
        inputContainer.appendChild(boolValueLabel); 

        inputElement.addEventListener('change', () => {
            boolValueLabel.textContent = inputElement.checked ? ' true' : ' false';
        });
    } else {
        inputElement = document.createElement('input');
        inputElement.type = param.type;
        inputContainer.appendChild(inputElement);
    }

    inputElement.id = INPUT_ID;
    inputElement.name = param.name;
    inputElement.classList.add('param-input-element');

    if (IS_AUTH_PARAM) {
        inputElement.disabled = true;
        inputElement.classList.add('auth-param-input');
        if (param.type !== 'checkbox') {
            inputElement.placeholder = param.authServices;
        }
    } else if (param.type !== 'checkbox') {
        inputElement.placeholder = PLACEHOLDER_LABEL ? PLACEHOLDER_LABEL.trim() : '';
    }
    inputCheckboxWrapper.appendChild(inputContainer);

    // create the "Include Param" checkbox
    const INCLUDE_CHECKBOX_ID = `include-${INPUT_ID}`;
    const includeContainer = document.createElement('div');
    const includeCheckbox = document.createElement('input');

    includeContainer.className = 'include-param-container';
    includeCheckbox.type = 'checkbox';
    includeCheckbox.id = INCLUDE_CHECKBOX_ID;
    includeCheckbox.name = `include-${param.name}`;
    includeCheckbox.title = 'Include this parameter'; // Add a tooltip

    // default to checked, unless it's an optional parameter
    includeCheckbox.checked = param.required;

    includeContainer.appendChild(includeCheckbox);
    inputCheckboxWrapper.appendChild(includeContainer);

    paramItem.appendChild(inputCheckboxWrapper);

    // function to update UI based on checkbox state
    const updateParamIncludedState = () => {
        const isIncluded = includeCheckbox.checked;
        if (isIncluded) {
            paramItem.classList.remove('disabled-param');
            if (!IS_AUTH_PARAM) {
                 inputElement.disabled = false;
            }
            if (boolValueLabel) {
                boolValueLabel.classList.remove('disabled');
            }
        } else {
            paramItem.classList.add('disabled-param');
            inputElement.disabled = true;
            if (boolValueLabel) {
                boolValueLabel.classList.add('disabled');
            }
        }
    };

    // add event listener to the include checkbox
    includeCheckbox.addEventListener('change', updateParamIncludedState);
    updateParamIncludedState(); 

    return paramItem;
}

/**
 * Function to create the header editor popup modal.
 * @param {string} toolId The unique identifier for the tool.
 * @param {!Object<string, string>} currentHeaders The current headers.
 * @param {function(!Object<string, string>): void} saveCallback A function to be
 *     called when the "Save" button is clicked and the headers are successfully
 *     parsed. The function receives the updated headers object as its argument.
 * @return {!HTMLDivElement} The outermost div element of the created modal.
 */
function createHeaderEditorModal(toolId, currentHeaders, toolParameters, authRequired, saveCallback) {
    const MODAL_ID = `header-modal-${toolId}`;
    let modal = document.getElementById(MODAL_ID);

    if (modal) {
        modal.remove(); 
    }

    modal = document.createElement('div');
    modal.id = MODAL_ID;
    modal.className = 'header-modal';

    const modalContent = document.createElement('div');
    const modalHeader = document.createElement('h5');
    const headersTextarea = document.createElement('textarea');

    modalContent.className = 'header-modal-content';
    modalHeader.textContent = 'Edit Request Headers';
    headersTextarea.id = `headers-textarea-${toolId}`;
    headersTextarea.className = 'headers-textarea';
    headersTextarea.rows = 10;
    headersTextarea.value = JSON.stringify(currentHeaders, null, 2);

    // handle authenticated params
    const authProfileNames = new Set();
    toolParameters.forEach(param => {
        const isAuthParam = param.authServices && param.authServices.length > 0;
        if (isAuthParam && param.authServices) {
             param.authServices.forEach(name => authProfileNames.add(name));
        }
    });

    // handle authorized invocations
    if (authRequired && authRequired.length > 0) {
        authRequired.forEach(name => authProfileNames.add(name));
    }

    modalContent.appendChild(modalHeader);
    modalContent.appendChild(headersTextarea);

    if (authProfileNames.size > 0 || authRequired.length > 0) {
        const authHelperSection = document.createElement('div');
        authHelperSection.className = 'auth-helper-section';
        const authList = document.createElement('div');
        authList.className = 'auth-method-list';

        authProfileNames.forEach(profileName => {
            const authItem = createGoogleAuthMethodItem(toolId, profileName);
            authList.appendChild(authItem);
        });
        authHelperSection.appendChild(authList);
        modalContent.appendChild(authHelperSection);
    }

    const modalActions = document.createElement('div');
    const closeButton = document.createElement('button');
    const saveButton = document.createElement('button');
    const authTokenDropdown = createAuthTokenInfoDropdown();

    modalActions.className = 'header-modal-actions';
    closeButton.textContent = 'Close';
    closeButton.className = 'btn btn--closeHeaders';
    closeButton.addEventListener('click', () => closeHeaderEditor(toolId));
    saveButton.textContent = 'Save';
    saveButton.className = 'btn btn--saveHeaders';
    saveButton.addEventListener('click', () => {
        try {
            const updatedHeaders = JSON.parse(headersTextarea.value);
            saveCallback(updatedHeaders);
            closeHeaderEditor(toolId);
        } catch (e) {
            alert('Invalid JSON format for headers.');
            console.error("Header JSON parse error:", e);
        }
    });

    modalActions.appendChild(closeButton);
    modalActions.appendChild(saveButton);
    modalContent.appendChild(modalActions);
    modalContent.appendChild(authTokenDropdown);
    modal.appendChild(modalContent);

    return modal;
}

/**
 * Function to open the header popup.
 */
function openHeaderEditor(toolId) {
    const modal = document.getElementById(`header-modal-${toolId}`);
    if (modal) {
        modal.style.display = 'block';
    }
}

/**
 * Function to close the header popup.
 */
function closeHeaderEditor(toolId) {
    const modal = document.getElementById(`header-modal-${toolId}`);
    if (modal) {
        modal.style.display = 'none';
    }
}

/**
 * Creates a dropdown element showing information on how to extract Google auth tokens.
 * @return {HTMLDetailsElement} The details element representing the dropdown.
 */
function createAuthTokenInfoDropdown() {
    const details = document.createElement('details');
    const summary = document.createElement('summary');
    const content = document.createElement('div');

    details.className = 'auth-token-details';
    details.appendChild(summary);
    summary.textContent = 'How to extract Google OAuth ID Token manually';
    content.className = 'auth-token-content';

    // auth instruction dropdown
    const tabButtons = document.createElement('div');
    const leftTab = document.createElement('button');
    const rightTab = document.createElement('button');
    
    tabButtons.className = 'auth-tab-group';
    leftTab.className = 'auth-tab-picker active';
    leftTab.textContent = 'With Standard Account';
    leftTab.setAttribute('data-tab', 'standard');
    rightTab.className = 'auth-tab-picker';
    rightTab.textContent = 'With Service Account';
    rightTab.setAttribute('data-tab', 'service');

    tabButtons.appendChild(leftTab);
    tabButtons.appendChild(rightTab);
    content.appendChild(tabButtons);

    const tabContentContainer = document.createElement('div');
    const standardAccInstructions = document.createElement('div');
    const serviceAccInstructions = document.createElement('div');

    standardAccInstructions.id = 'auth-tab-standard';
    standardAccInstructions.className = 'auth-tab-content active'; 
    standardAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_STANDARD;
    serviceAccInstructions.id = 'auth-tab-service';
    serviceAccInstructions.className = 'auth-tab-content';
    serviceAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT;

    tabContentContainer.appendChild(standardAccInstructions);
    tabContentContainer.appendChild(serviceAccInstructions);
    content.appendChild(tabContentContainer);

    // switching tabs logic
    const tabBtns = [leftTab, rightTab];
    const tabContents = [standardAccInstructions, serviceAccInstructions];

    tabBtns.forEach(btn => {
        btn.addEventListener('click', () => {
            // deactivate all buttons and contents
            tabBtns.forEach(b => b.classList.remove('active'));
            tabContents.forEach(c => c.classList.remove('active'));

            btn.classList.add('active');

            const tabId = btn.getAttribute('data-tab');
            const activeContent = content.querySelector(`#auth-tab-${tabId}`);
            if (activeContent) {
                activeContent.classList.add('active');
            }
        });
    });

    details.appendChild(content);
    return details;
}

/**
 * Renders the tool display area.
 */
export function renderToolInterface(tool, containerElement) {
    const TOOL_ID = tool.id;
    containerElement.innerHTML = '';

    let lastResults = null;
    let currentHeaders = {
        "Content-Type": "application/json"
    };

    // function to update lastResults so we can toggle json
    const updateLastResults = (newResults) => {
        lastResults = newResults;
    };
    const updateCurrentHeaders = (newHeaders) => {
        currentHeaders = newHeaders;
        const newModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders);
        containerElement.appendChild(newModal);
    };

    const gridContainer = document.createElement('div');
    gridContainer.className = 'tool-details-grid';

    const toolInfoContainer = document.createElement('div');
    const nameBox = document.createElement('div');
    const descBox = document.createElement('div');

    nameBox.className = 'tool-box tool-name';
    nameBox.innerHTML = `<h5>Name:</h5><p>${tool.name}</p>`;
    descBox.className = 'tool-box tool-description';
    descBox.innerHTML = `<h5>Description:</h5><p>${tool.description}</p>`;

    toolInfoContainer.className = 'tool-info';
    toolInfoContainer.appendChild(nameBox);
    toolInfoContainer.appendChild(descBox);
    gridContainer.appendChild(toolInfoContainer);

    const DISLCAIMER_INFO = "*Checked parameters are sent with the value from their text field. Empty fields will be sent as an empty string. To exclude a parameter, uncheck it."
    const paramsContainer = document.createElement('div');
    const form = document.createElement('form');
    const paramsHeader = document.createElement('div');
    const disclaimerText = document.createElement('div');

    paramsContainer.className = 'tool-params tool-box';
    paramsContainer.innerHTML = '<h5>Parameters:</h5>';
    paramsHeader.className = 'params-header';
    paramsContainer.appendChild(paramsHeader);
    disclaimerText.textContent = DISLCAIMER_INFO;
    disclaimerText.className = 'params-disclaimer'; 
    paramsContainer.appendChild(disclaimerText);

    form.id = `tool-params-form-${TOOL_ID}`;

    tool.parameters.forEach(param => {
        form.appendChild(createParamInput(param, TOOL_ID));
    });
    paramsContainer.appendChild(form);
    gridContainer.appendChild(paramsContainer);

    containerElement.appendChild(gridContainer);

    const RESPONSE_AREA_ID = `tool-response-area-${TOOL_ID}`;
    const runButtonContainer = document.createElement('div');
    const editHeadersButton = document.createElement('button');
    const runButton = document.createElement('button');

    editHeadersButton.className = 'btn btn--editHeaders';
    editHeadersButton.textContent = 'Edit Headers';
    editHeadersButton.addEventListener('click', () => openHeaderEditor(TOOL_ID));
    runButtonContainer.className = 'run-button-container';
    runButtonContainer.appendChild(editHeadersButton);

    runButton.className = 'btn btn--run';
    runButton.textContent = 'Run Tool';
    runButtonContainer.appendChild(runButton);
    containerElement.appendChild(runButtonContainer);

    // response Area (bottom)
    const responseContainer = document.createElement('div');
    const responseHeaderControls = document.createElement('div');
    const responseHeader = document.createElement('h5');
    const responseArea = document.createElement('textarea');

    responseContainer.className = 'tool-response tool-box';
    responseHeaderControls.className = 'response-header-controls';
    responseHeader.textContent = 'Response:';
    responseHeaderControls.appendChild(responseHeader);

    // prettify box
    const PRETTIFY_ID = `prettify-${TOOL_ID}`;
    const prettifyDiv = document.createElement('div');
    const prettifyLabel = document.createElement('label');
    const prettifyCheckbox = document.createElement('input');

    prettifyDiv.className = 'prettify-container';
    prettifyLabel.setAttribute('for', PRETTIFY_ID);
    prettifyLabel.textContent = 'Prettify JSON';
    prettifyLabel.className = 'prettify-label';

    prettifyCheckbox.type = 'checkbox';
    prettifyCheckbox.id = PRETTIFY_ID;
    prettifyCheckbox.checked = true;
    prettifyCheckbox.className = 'prettify-checkbox';

    prettifyDiv.appendChild(prettifyLabel);
    prettifyDiv.appendChild(prettifyCheckbox);

    responseHeaderControls.appendChild(prettifyDiv);
    responseContainer.appendChild(responseHeaderControls);

    responseArea.id = RESPONSE_AREA_ID;
    responseArea.readOnly = true;
    responseArea.placeholder = 'Results will appear here...';
    responseArea.className = 'tool-response-area';
    responseArea.rows = 10;
    responseContainer.appendChild(responseArea);

    containerElement.appendChild(responseContainer);

    // create and append the header editor modal
    const headerModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders);
    containerElement.appendChild(headerModal);

    prettifyCheckbox.addEventListener('change', () => {
        if (lastResults) {
            displayResults(lastResults, responseArea, prettifyCheckbox.checked);
        }
    });

    runButton.addEventListener('click', (event) => {
        event.preventDefault();
        handleRunTool(TOOL_ID, form, responseArea, tool.parameters, prettifyCheckbox, updateLastResults, currentHeaders);
    });
}

/**
 * Checks if a specific parameter is marked as included for a given tool.
 * @param {string} toolId The ID of the tool.
 * @param {string} paramName The name of the parameter.
 * @return {boolean|null} True if the parameter's include checkbox is checked,
 *                         False if unchecked, Null if the checkbox element is not found.
 */
export function isParamIncluded(toolId, paramName) {
    const inputId = `param-${toolId}-${paramName}`;
    const includeCheckboxId = `include-${inputId}`;
    const includeCheckbox = document.getElementById(includeCheckboxId);

    if (includeCheckbox && includeCheckbox.type === 'checkbox') {
        return includeCheckbox.checked;
    }

    console.warn(`Include checkbox not found for ID: ${includeCheckboxId}`);
    return null;
}

// Templates for inserting token retrieval instructions into edit header modal
const AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT = `
        <p>To obtain a Google OAuth ID token using a service account:</p>
        <ol>
            <li>Make sure you are on the intended SERVICE account (typically contain iam.gserviceaccount.com). Verify by running the command below.
                <pre><code>gcloud auth list</code></pre>
            </li>
            <li>Print an id token with the audience set to your clientID defined in tools file:
                <pre><code>gcloud auth print-identity-token --audiences=YOUR_CLIENT_ID_HERE</code></pre>
            </li>
            <li>Copy the output token.</li>
            <li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code>
                <pre><code>{
  "Content-Type": "application/json",
  "my-google-auth_token": "YOUR_ID_TOKEN_HERE"
}               </code></pre>
            </li>
        </ol>
        <p>This token is typically short-lived.</p>`;

const AUTH_TOKEN_INSTRUCTIONS_STANDARD = `
        <p>To obtain a Google OAuth ID token using a standard account:</p>
        <ol>
            <li>Make sure you are on your intended standard account. Verify by running the command below.
                <pre><code>gcloud auth list</code></pre>
            </li>
            <li>Within your Cloud Console, add the following link to the "Authorized Redirect URIs".</li>
            <pre><code>https://developers.google.com/oauthplayground</code></pre>
            <li>Go to the Google OAuth Playground site: <a href="https://developers.google.com/oauthplayground/" target="_blank">https://developers.google.com/oauthplayground/</a></li>
            <li>In the top right settings menu, select "Use your own OAuth Credentials".</li>
            <li>Input your clientID (from tools file), along with the client secret from Cloud Console.</li>
            <li>Inside the Google OAuth Playground, select "Google OAuth2 API v2.</li>
            <ul>
                <li>Select "Authorize APIs".</li>
                <li>Select "Exchange Authorization codes for tokens"</li>
                <li>Copy the id_token field provided in the response.</li>
            </ul>
            <li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code>
                <pre><code>{
  "Content-Type": "application/json",
  "my-google-auth_token": "YOUR_ID_TOKEN_HERE"
}               </code></pre>
            </li>
        </ol>
        <p>This token is typically short-lived.</p>`;
```
Page 25/35FirstPrevNextLast