#
tokens: 48998/50000 40/786 files (page 6/33)
lines: off (toggle) GitHub
raw markdown copy
This is page 6 of 33. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   └── looker-run-look.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-tables.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   └── bigquery.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   └── lookerrunlook
│   │   │       ├── lookerrunlook_test.go
│   │   │       └── lookerrunlook.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/docs/en/resources/tools/bigquery/bigquery-execute-sql.md:
--------------------------------------------------------------------------------

```markdown
---
title: "bigquery-execute-sql"
type: docs
weight: 1
description: >
  A "bigquery-execute-sql" tool executes a SQL statement against BigQuery.
aliases:
- /resources/tools/bigquery-execute-sql
---

## About

A `bigquery-execute-sql` tool executes a SQL statement against BigQuery.
It's compatible with the following sources:

- [bigquery](../../sources/bigquery.md)

`bigquery-execute-sql` accepts the following parameters:
- **`sql`** (required): The GoogleSQL statement to execute.
- **`dry_run`** (optional): If set to `true`, the query is validated but not run,
  returning information about the execution instead. Defaults to `false`.

The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:

- **`allowed` (default):** All SQL statements are permitted.
- **`blocked`:** Only `SELECT` statements are allowed. Any other type of statement (e.g., `INSERT`, `UPDATE`, `CREATE`) will be rejected.
- **`protected`:** This mode enables session-based execution. `SELECT` statements can be used on all tables, while write operations are allowed only for the session's temporary dataset (e.g., `CREATE TEMP TABLE ...`). This prevents modifications to permanent datasets while allowing stateful, multi-step operations within a secure session.

The tool's behavior is influenced by the `allowedDatasets` restriction on the
`bigquery` source. Similar to `writeMode`, this setting provides an additional layer of security by controlling which datasets can be accessed:

- **Without `allowedDatasets` restriction:** The tool can execute any valid GoogleSQL
  query.
- **With `allowedDatasets` restriction:** Before execution, the tool performs a dry run
  to analyze the query.
  It will reject the query if it attempts to access any table outside the
  allowed `datasets` list. To enforce this restriction, the following operations
  are also disallowed:
  - **Dataset-level operations** (e.g., `CREATE SCHEMA`, `ALTER SCHEMA`).
  - **Unanalyzable operations** where the accessed tables cannot be determined
    statically (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`, `CALL`).

> **Note:** This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents.

## Example

```yaml
tools:
 execute_sql_tool:
    kind: bigquery-execute-sql
    source: my-bigquery-source
    description: Use this tool to execute sql statement.
```

## Reference

| **field**   |                  **type**                  | **required** | **description**                                                                                  |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind        |                   string                   |     true     | Must be "bigquery-execute-sql".                                                                  |
| source      |                   string                   |     true     | Name of the source the SQL should execute on.                                                    |
| description |                   string                   |     true     | Description of the tool that is passed to the LLM.                                               |

```

--------------------------------------------------------------------------------
/.hugo/hugo.toml:
--------------------------------------------------------------------------------

```toml
title = 'MCP Toolbox for Databases'
relativeURLs = false

languageCode = 'en-us'
defaultContentLanguage = "en"
defaultContentLanguageInSubdir = false

enableGitInfo = true
enableRobotsTXT = true

ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quickstart/go"]

[languages]
  [languages.en]
    languageName ="English"
    weight = 1

[module]
  proxy = "direct"
  [module.hugoVersion]
    extended = true
    min = "0.146.0"
  [[module.mounts]]
    source = "../docs/en"
    target = 'content'
  [[module.imports]]
    path = "github.com/google/docsy"
    disable = false
  [[module.imports]]
    path = "github.com/martignoni/hugo-notice"

[params]
  description = "MCP Toolbox for Databases is an open source MCP server for databases. It enables you to develop tools easier, faster, and more securely by handling the complexities such as connection pooling, authentication, and more."
  copyright = "Google LLC"
  github_repo = "https://github.com/googleapis/genai-toolbox"
  github_project_repo = "https://github.com/googleapis/genai-toolbox"
  github_subdir = "docs"
  offlineSearch = true
  version_menu = "Releases"
  [params.ui]
    ul_show = 100
    showLightDarkModeMenu = true
    breadcrumb_disable = true
    sidebar_menu_foldable = true
    sidebar_menu_compact = false

[[params.versions]]
  version = "Dev"
  url = "https://googleapis.github.io/genai-toolbox/dev/"

# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown

[[params.versions]]
  version = "v0.17.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.17.0/"
  
[[params.versions]]
  version = "v0.16.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.16.0/"

[[params.versions]]
  version = "v0.15.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.15.0/"

[[params.versions]]
  version = "v0.14.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.14.0/"

[[params.versions]]
  version = "v0.13.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.13.0/"

[[params.versions]]
  version = "v0.12.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.12.0/"

[[params.versions]]
  version = "v0.11.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.11.0/"

[[params.versions]]
  version = "v0.10.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.10.0/"

[[params.versions]]
  version = "v0.9.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.9.0/"

[[params.versions]]
  version = "v0.8.0"
  url = "https://googleapis.github.io/genai-toolbox/v0.8.0/"


[[menu.main]]
  name = "GitHub"
  weight = 50
  url = "https://github.com/googleapis/genai-toolbox"
  pre = "<i class='fa-brands fa-github'></i>"

[markup.goldmark.renderer]
  unsafe= true

[markup.highlight]
  noClasses = false
  style = "tango"

[outputFormats]
  [outputFormats.LLMS]
    mediaType = "text/plain"
    baseName = "llms"
    isPlainText = true
    root = true
  [outputFormats.LLMS-FULL]
    mediaType = "text/plain"
    baseName = "llms-full"
    isPlainText = true
    root = true
  [outputFormats.releases]
    baseName = 'releases'
    isPlainText = true
    mediaType = 'text/releases'

[mediaTypes."text/releases"]
  suffixes = ["releases"]

[outputs]
  home = ["HTML", "RSS", "LLMS", "LLMS-FULL", "releases"]

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/oceanbase.md:
--------------------------------------------------------------------------------

```markdown
---
title: "OceanBase"
type: docs
weight: 1
description: >
  OceanBase is a distributed relational database that provides high availability, scalability, and compatibility with MySQL.
---

## About

[OceanBase][oceanbase-docs] is a distributed relational database management
system (RDBMS) that provides high availability, scalability, and strong
consistency. It's designed to handle large-scale data processing and is
compatible with MySQL, making it easy for developers to migrate from MySQL to
OceanBase.

[oceanbase-docs]: https://www.oceanbase.com/

## Requirements

### Database User

This source only uses standard authentication. You will need to create an
OceanBase user to login to the database with. OceanBase supports
MySQL-compatible user management syntax.

### Network Connectivity

Ensure that your application can connect to the OceanBase cluster. OceanBase
typically runs on ports 2881 (for MySQL protocol) or 3881 (for MySQL protocol
with SSL).

## Example

```yaml
sources:
    my-oceanbase-source:
        kind: oceanbase
        host: 127.0.0.1
        port: 2881
        database: my_db
        user: ${USER_NAME}
        password: ${PASSWORD}
        queryTimeout: 30s # Optional: query timeout duration
```

{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}

## Reference

| **field**    | **type** | **required** | **description**                                                                                 |
| ------------ | :------: | :----------: |-------------------------------------------------------------------------------------------------|
| kind         |  string  |     true     | Must be "oceanbase".                                                                            |
| host         |  string  |     true     | IP address to connect to (e.g. "127.0.0.1").                                                    |
| port         |  string  |     true     | Port to connect to (e.g. "2881").                                                               |
| database     |  string  |     true     | Name of the OceanBase database to connect to (e.g. "my_db").                                    |
| user         |  string  |     true     | Name of the OceanBase user to connect as (e.g. "my-oceanbase-user").                            |
| password     |  string  |     true     | Password of the OceanBase user (e.g. "my-password").                                            |
| queryTimeout |  string  |    false     | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |

## Features

### MySQL Compatibility

OceanBase is highly compatible with MySQL, supporting most MySQL SQL syntax,
data types, and functions. This makes it easy to migrate existing MySQL
applications to OceanBase.

### High Availability

OceanBase provides automatic failover and data replication across multiple
nodes, ensuring high availability and data durability.

### Scalability

OceanBase can scale horizontally by adding more nodes to the cluster, making it
suitable for large-scale applications.

### Strong Consistency

OceanBase provides strong consistency guarantees, ensuring that all transactions
are ACID compliant.
```

--------------------------------------------------------------------------------
/docs/en/resources/sources/clickhouse.md:
--------------------------------------------------------------------------------

```markdown
---
title: "ClickHouse"
type: docs
weight: 1
description: >
  ClickHouse is an open-source, OLTP database.

---

## About

[ClickHouse][clickhouse-docs] is a fast, open-source, column-oriented database

[clickhouse-docs]: https://clickhouse.com/docs

## Available Tools

- [`clickhouse-execute-sql`](../tools/clickhouse/clickhouse-execute-sql.md)  
  Execute parameterized SQL queries in ClickHouse with query logging.

- [`clickhouse-sql`](../tools/clickhouse/clickhouse-sql.md)  
  Execute SQL queries as prepared statements in ClickHouse.


## Requirements

### Database User

This source uses standard ClickHouse authentication. You will need to [create a
ClickHouse user][clickhouse-users] (or with [ClickHouse
Cloud][clickhouse-cloud]) to connect to the database with. The user should have
appropriate permissions for the operations you plan to perform.

[clickhouse-cloud]:
    https://clickhouse.com/docs/getting-started/quick-start/cloud#connect-with-your-app
[clickhouse-users]: https://clickhouse.com/docs/en/sql-reference/statements/create/user

### Network Access

ClickHouse supports multiple protocols:

- **HTTPS protocol** (default port 8443) - Secure HTTP access (default)
- **HTTP protocol** (default port 8123) - Good for web-based access

## Example

### Secure Connection Example

```yaml
sources:
    secure-clickhouse-source:
        kind: clickhouse
        host: clickhouse.example.com
        port: "8443"
        database: analytics
        user: ${CLICKHOUSE_USER}
        password: ${CLICKHOUSE_PASSWORD}
        protocol: https
        secure: true
```

### HTTP Protocol Example

```yaml
sources:
    http-clickhouse-source:
        kind: clickhouse
        host: localhost
        port: "8123"
        database: logs
        user: ${CLICKHOUSE_USER}
        password: ${CLICKHOUSE_PASSWORD}
        protocol: http
        secure: false
```

{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}

## Reference

| **field** | **type** | **required** | **description**                                                                     |
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "clickhouse".                                                               |
| host      |  string  |     true     | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
| port      |  string  |     true     | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP)                         |
| database  |  string  |     true     | Name of the ClickHouse database to connect to (e.g. "my_database").                 |
| user      |  string  |     true     | Name of the ClickHouse user to connect as (e.g. "analytics_user").                  |
| password  |  string  |    false     | Password of the ClickHouse user (e.g. "my-password").                               |
| protocol  |  string  |    false     | Connection protocol: "https" (default) or "http".                                   |
| secure    | boolean  |    false     | Whether to use a secure connection (TLS). Default: false.                           |

```

--------------------------------------------------------------------------------
/internal/tools/utility/wait/wait.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package wait

import (
	"context"
	"fmt"
	"time"

	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

const kind string = "wait"

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	Timeout      string   `yaml:"timeout" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(_ map[string]sources.Source) (tools.Tool, error) {
	durationParameter := tools.NewStringParameter("duration", "The duration to wait for, specified as a string (e.g., '10s', '2m', '1h').")
	parameters := tools.Parameters{durationParameter}

	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)

	t := Tool{
		Name:        cfg.Name,
		Kind:        kind,
		Parameters:  parameters,
		manifest:    tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
		mcpManifest: mcpManifest,
	}
	return t, nil
}

// validate interface
var _ tools.Tool = Tool{}

type Tool struct {
	Name        string
	Kind        string
	Parameters  tools.Parameters
	manifest    tools.Manifest
	mcpManifest tools.McpManifest
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	paramsMap := params.AsMap()

	durationStr, ok := paramsMap["duration"].(string)
	if !ok {
		return nil, fmt.Errorf("duration parameter is not a string")
	}

	totalDuration, err := time.ParseDuration(durationStr)
	if err != nil {
		return nil, fmt.Errorf("invalid duration format: %w", err)
	}

	time.Sleep(totalDuration)

	return fmt.Sprintf("Wait for %v completed successfully.", totalDuration), nil
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.Parameters, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return true
}

func (t Tool) RequiresClientAuthorization() bool {
	return false
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/cassandra.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Cassandra"
type: docs
weight: 1
description: >
  Apache Cassandra is a NoSQL distributed database known for its horizontal scalability, distributed architecture, and flexible schema definition.
---

## About

[Apache Cassandra][cassandra-docs] is a NoSQL distributed database. By design, NoSQL databases are lightweight, open-source, non-relational, and largely distributed. Counted among their strengths are horizontal scalability, distributed architectures, and a flexible approach to schema definition.

[cassandra-docs]: https://cassandra.apache.org/

## Available Tools

- [`cassandra-cql`](../tools/cassandra/cassandra-cql.md)  
  Run parameterized CQL queries in Cassandra.


## Example

```yaml
sources:
    my-cassandra-source:
        kind: cassandra
        hosts:
            - 127.0.0.1
        keyspace: my_keyspace
        protoVersion: 4
        username: ${USER_NAME}
        password: ${PASSWORD}
        caPath: /path/to/ca.crt # Optional: path to CA certificate
        certPath: /path/to/client.crt # Optional: path to client certificate
        keyPath: /path/to/client.key # Optional: path to client key
        enableHostVerification: true # Optional: enable host verification
```

{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}

## Reference

| **field**              | **type**  | **required** | **description**                                                                                       |
|------------------------|:---------:|:------------:|-------------------------------------------------------------------------------------------------------|
| kind                   |  string   |     true     | Must be "cassandra".                                                                                  |
| hosts                  |  string[] |     true     | List of IP addresses to connect to (e.g., ["192.168.1.1:9042", "192.168.1.2:9042","192.168.1.3:9042"]). The default port is 9042 if not specified.                                    |
| keyspace               |  string   |     true     | Name of the Cassandra keyspace to connect to (e.g., "my_keyspace").                                   |
| protoVersion           |  integer  |    false     | Protocol version for the Cassandra connection (e.g., 4).                                              |
| username               |  string   |    false     | Name of the Cassandra user to connect as (e.g., "my-cassandra-user").                                 |
| password               |  string   |    false     | Password of the Cassandra user (e.g., "my-password").                                                 |
| caPath                 |  string   |    false     | Path to the CA certificate for SSL/TLS (e.g., "/path/to/ca.crt").                                     |
| certPath               |  string   |    false     | Path to the client certificate for SSL/TLS (e.g., "/path/to/client.crt").                             |
| keyPath                |  string   |    false     | Path to the client key for SSL/TLS (e.g., "/path/to/client.key").                                     |
| enableHostVerification |  boolean  |    false     | Enable host verification for SSL/TLS (e.g., true). By default, host verification is disabled.         |

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/cloudmonitoring/cloud-monitoring-query-prometheus.md:
--------------------------------------------------------------------------------

```markdown
---
title: cloud-monitoring-query-prometheus
type: docs
weight: 1
description: The "cloud-monitoring-query-prometheus" tool fetches time series metrics for a project using a given prometheus query.
---

The `cloud-monitoring-query-prometheus` tool fetches timeseries metrics data
from Google Cloud Monitoring for a project using a given prometheus query.

## About

The `cloud-monitoring-query-prometheus` tool allows you to query all metrics
available in Google Cloud Monitoring using the Prometheus Query Language
(PromQL).
It's compatible with any of the following sources:

- [cloud-monitoring](../../sources/cloud-monitoring.md)

## Prerequisites

To use this tool, you need to have the following IAM role on your Google Cloud
project:

- `roles/monitoring.viewer`

## Arguments

| Name        | Type   | Description                      |
|-------------|--------|----------------------------------|
| `projectId` | string | The Google Cloud project ID.     |
| `query`     | string | The Prometheus query to execute. |

## Use Cases

- **Ad-hoc analysis:** Quickly investigate performance issues by executing
  direct promql queries for a database instance.
- **Prebuilt Configs:** Use the already added prebuilt tools mentioned in
  prebuilt-tools.md to query the databases system/query level metrics. 

Here are some common use cases for the `cloud-monitoring-query-prometheus` tool:

- **Monitoring resource utilization:** Track CPU, memory, and disk usage for
  your database instance (Can use the [prebuilt
  tools](../../../reference/prebuilt-tools.md)).
- **Monitoring query performance:** Monitor latency, execution_time, wait_time
  for database instance or even for the queries running (Can use the [prebuilt
  tools](../../../reference/prebuilt-tools.md)).
- **System Health:** Get the overall system health for the database instance
  (Can use the [prebuilt tools](../../../reference/prebuilt-tools.md)).

## Examples

Here are some examples of how to use the `cloud-monitoring-query-prometheus`
tool.


```yaml
tools:
 get_wait_time_metrics:
    kind: cloud-monitoring-query-prometheus
    source: cloud-monitoring-source
    description: |
      This tool fetches system wait time information for AlloyDB cluster, instance. Get the `projectID`, `clusterID` and `instanceID` from the user intent. To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
      Generate `query` using these metric details:
      metric: `alloydb.googleapis.com/instance/postgresql/wait_time`,  monitored_resource: `alloydb.googleapis.com/Instance`. labels: `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
      Basic time series example promql query: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/postgresql/wait_time","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
```

## Reference
| **field**   | **type** | **required** | **description**                                      |
|-------------|:--------:|:------------:|------------------------------------------------------|
| kind        |  string  |     true     | Must be cloud-monitoring-query-prometheus.           |
| source      |  string  |     true     | The name of an `cloud-monitoring` source.            |
| description |  string  |     true     | Description of the tool that is passed to the agent. |

```

--------------------------------------------------------------------------------
/internal/sources/valkey/valkey.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//	http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package valkey

import (
	"context"
	"fmt"
	"log"

	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/valkey-io/valkey-go"
	"go.opentelemetry.io/otel/trace"
)

const SourceKind string = "valkey"

// validate interface
var _ sources.SourceConfig = Config{}

func init() {
	if !sources.Register(SourceKind, newConfig) {
		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Address      []string `yaml:"address" validate:"required"`
	Username     string   `yaml:"username"`
	Password     string   `yaml:"password"`
	Database     int      `yaml:"database"`
	UseGCPIAM    bool     `yaml:"useGCPIAM"`
	DisableCache bool     `yaml:"disableCache"`
}

func (r Config) SourceConfigKind() string {
	return SourceKind
}

func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {

	client, err := initValkeyClient(ctx, r)
	if err != nil {
		return nil, fmt.Errorf("error initializing Valkey client: %s", err)
	}
	s := &Source{
		Name:   r.Name,
		Kind:   SourceKind,
		Client: client,
	}
	return s, nil
}

func initValkeyClient(ctx context.Context, r Config) (valkey.Client, error) {
	var authFn func(valkey.AuthCredentialsContext) (valkey.AuthCredentials, error)
	if r.UseGCPIAM {
		// Pass in an access token getter fn for IAM auth
		authFn = func(valkey.AuthCredentialsContext) (valkey.AuthCredentials, error) {
			token, err := sources.GetIAMAccessToken(ctx)
			creds := valkey.AuthCredentials{Username: "default", Password: token}
			if err != nil {
				return creds, err
			}
			return creds, nil
		}
	}

	client, err := valkey.NewClient(valkey.ClientOption{
		InitAddress:       r.Address,
		SelectDB:          r.Database,
		Username:          r.Username,
		Password:          r.Password,
		AuthCredentialsFn: authFn,
		DisableCache:      r.DisableCache,
	})

	if err != nil {
		log.Fatalf("error creating Valkey client: %v", err)
	}

	// Ping the server to check connectivity
	pingCmd := client.B().Ping().Build()
	_, err = client.Do(ctx, pingCmd).ToString()
	if err != nil {
		log.Fatalf("Failed to execute PING command: %v", err)
	}
	return client, nil
}

var _ sources.Source = &Source{}

type Source struct {
	Name   string `yaml:"name"`
	Kind   string `yaml:"kind"`
	Client valkey.Client
}

func (s *Source) SourceKind() string {
	return SourceKind
}

func (s *Source) ValkeyClient() valkey.Client {
	return s.Client
}

```

--------------------------------------------------------------------------------
/.github/labels.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

- name: duplicate
  color: ededed
  description: ""

- name: 'type: bug'
  color: db4437
  description: Error or flaw in code with unintended results or allowing sub-optimal
    usage patterns.
- name: 'type: cleanup'
  color: c5def5
  description: An internal cleanup or hygiene concern.
- name: 'type: docs'
  color: 0000A0
  description: Improvement to the documentation for an API.
- name: 'type: feature request'
  color: c5def5
  description: ‘Nice-to-have’ improvement, new feature or different behavior or design.
- name: 'type: process'
  color: c5def5
  description: A process-related concern. May include testing, release, or the like.
- name: 'type: question'
  color: c5def5
  description: Request for information or clarification.

- name: 'priority: p0'
  color: b60205
  description: Highest priority. Critical issue. P0 implies highest priority.
- name: 'priority: p1'
  color: ffa03e
  description: Important issue which blocks shipping the next release. Will be fixed
    prior to next release.
- name: 'priority: p2'
  color: fef2c0
  description: Moderately-important priority. Fix may not be included in next release.
- name: 'priority: p3'
  color: ffffc7
  description: Desirable enhancement or fix. May not be included in next release.

- name: 'do not merge'
  color: d93f0b
  description: Indicates a pull request not ready for merge, due to either quality
    or timing.

- name: 'autorelease: pending'
  color: ededed
  description: Release please needs to do its work on this.
- name: 'autorelease: triggered'
  color: ededed
  description: Release please has triggered a release for this.
- name: 'autorelease: tagged'
  color: ededed
  description: Release please has completed a release for this.

- name: 'blunderbuss: assign'
  color: 3DED97
  description: Have blunderbuss assign this to someone new.

- name: 'tests: run'
  color: 3DED97
  description: Label to trigger Github Action tests.
  
- name: 'docs: deploy-preview'
  color: BFDADC
  description: Label to trigger Github Action docs preview.

- name: 'status: help wanted'
  color: 8befd7
  description: 'Status: Unplanned work open to contributions from the community.'
- name: 'status: feedback wanted'
  color: 8befd7
  description: 'Status: waiting for feedback from community or issue author.'

- name: 'status: waiting for response'
  color: 8befd7
  description: 'Status: reviewer is awaiting feedback or responses from the author before proceeding.'

- name: 'release candidate'
  color: 32CD32
  description: 'Use label to signal PR should be included in the next release.'

# Product Labels
- name: 'product: bigquery'
  color: 5065c7
  description: 'Product: Assigned to the BigQuery team.'
# Product Labels
- name: 'product: looker'
  color: 5065c7
  description: 'Product: Assigned to the Looker team.'

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/neo4j/neo4j-cypher.md:
--------------------------------------------------------------------------------

```markdown
---
title: "neo4j-cypher"
type: docs
weight: 1
description: >
  A "neo4j-cypher" tool executes a pre-defined cypher statement against a Neo4j
  database.
aliases:
- /resources/tools/neo4j-cypher
---

## About

A `neo4j-cypher` tool executes a pre-defined Cypher statement against a Neo4j
database. It's compatible with any of the following sources:

- [neo4j](../../sources/neo4j.md)

The specified Cypher statement is executed as a [parameterized
statement][neo4j-parameters], and specified parameters will be used according to
their name: e.g. `$id`.

> **Note:** This tool uses parameterized queries to prevent SQL injections.
> Query parameters can be used as substitutes for arbitrary expressions.
> Parameters cannot be used as substitutes for identifiers, column names, table
> names, or other parts of the query.

[neo4j-parameters]:
    https://neo4j.com/docs/cypher-manual/current/syntax/parameters/

## Example

```yaml
tools:
 search_movies_by_actor:
    kind: neo4j-cypher
    source: my-neo4j-movies-instance
    statement: |
      MATCH (m:Movie)<-[:ACTED_IN]-(p:Person)
      WHERE p.name = $name AND m.year > $year
      RETURN m.title, m.year
      LIMIT 10
    description: |
      Use this tool to get a list of movies for a specific actor and a given minimum release year.
      Takes a full actor name, e.g. "Tom Hanks" and a year e.g 1993 and returns a list of movie titles and release years.
      Do NOT use this tool with a movie title. Do NOT guess an actor name, Do NOT guess a year.
      A actor name is a fully qualified name with first and last name separated by a space.
      For example, if given "Hanks, Tom" the actor name is "Tom Hanks".
      If the tool returns more than one option choose the most recent movies.
      Example:
      {{
          "name": "Meg Ryan",
          "year": 1993
      }}
      Example:
      {{
          "name": "Clint Eastwood",
          "year": 2000
      }}
    parameters:
      - name: name
        type: string
        description: Full actor name, "firstname lastname"
      - name: year
        type: integer
        description: 4 digit number starting in 1900 up to the current year
```

## Reference

| **field**   |                  **type**                  | **required** | **description**                                                                                 |
|-------------|:------------------------------------------:|:------------:|-------------------------------------------------------------------------------------------------|
| kind        |                   string                   |     true     | Must be "neo4j-cypher".                                                                         |
| source      |                   string                   |     true     | Name of the source the Cypher query should execute on.                                          |
| description |                   string                   |     true     | Description of the tool that is passed to the LLM.                                              |
| statement   |                   string                   |     true     | Cypher statement to execute                                                                     |
| parameters  | [parameters](../#specifying-parameters) |    false     | List of [parameters](../#specifying-parameters) that will be used with the Cypher statement. |

```

--------------------------------------------------------------------------------
/internal/sources/cloudsqladmin/cloud_sql_admin_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqladmin_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlCloudSQLAdmin(t *testing.T) {
	t.Parallel()
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "basic example",
			in: `
			sources:
				my-cloud-sql-admin-instance:
					kind: cloud-sql-admin
			`,
			want: map[string]sources.SourceConfig{
				"my-cloud-sql-admin-instance": cloudsqladmin.Config{
					Name:           "my-cloud-sql-admin-instance",
					Kind:           cloudsqladmin.SourceKind,
					UseClientOAuth: false,
				},
			},
		},
		{
			desc: "use client auth example",
			in: `
			sources:
				my-cloud-sql-admin-instance:
					kind: cloud-sql-admin
					useClientOAuth: true
			`,
			want: map[string]sources.SourceConfig{
				"my-cloud-sql-admin-instance": cloudsqladmin.Config{
					Name:           "my-cloud-sql-admin-instance",
					Kind:           cloudsqladmin.SourceKind,
					UseClientOAuth: true,
				},
			},
		},
	}
	for _, tc := range tcs {
		tc := tc
		t.Run(tc.desc, func(t *testing.T) {
			t.Parallel()
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if !cmp.Equal(tc.want, got.Sources) {
				t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
			}
		})
	}
}

func TestFailParseFromYaml(t *testing.T) {
	t.Parallel()
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "extra field",
			in: `
			sources:
				my-cloud-sql-admin-instance:
					kind: cloud-sql-admin
					project: test-project
			`,
			err: `unable to parse source "my-cloud-sql-admin-instance" as "cloud-sql-admin": [2:1] unknown field "project"
   1 | kind: cloud-sql-admin
>  2 | project: test-project
       ^
`,
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-cloud-sql-admin-instance:
					useClientOAuth: true
			`,
			err: "missing 'kind' field for source \"my-cloud-sql-admin-instance\"",
		},
	}
	for _, tc := range tcs {
		tc := tc
		t.Run(tc.desc, func(t *testing.T) {
			t.Parallel()
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if errStr != tc.err {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/bigtable.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Bigtable"
type: docs
weight: 1
description: >
  Bigtable is a low-latency NoSQL database service for machine learning, operational analytics, and user-facing operations. It's a wide-column, key-value store that can scale to billions of rows and thousands of columns. With Bigtable, you can replicate your data to regions across the world for high availability and data resiliency.

---

# Bigtable Source

[Bigtable][bigtable-docs] is a low-latency NoSQL database service for machine
learning, operational analytics, and user-facing operations. It's a wide-column,
key-value store that can scale to billions of rows and thousands of columns.
With Bigtable, you can replicate your data to regions across the world for high
availability and data resiliency.

If you are new to Bigtable, you can try to [create an instance and write data
with the cbt CLI][bigtable-quickstart-with-cli].

You can use [GoogleSQL statements][bigtable-googlesql] to query your Bigtable
data. GoogleSQL is an ANSI-compliant structured query language (SQL) that is
also implemented for other Google Cloud services. SQL queries are handled by
cluster nodes in the same way as NoSQL data requests. Therefore, the same best
practices apply when creating SQL queries to run against your Bigtable data,
such as avoiding full table scans or complex filters.

[bigtable-docs]: https://cloud.google.com/bigtable/docs
[bigtable-quickstart-with-cli]:
    https://cloud.google.com/bigtable/docs/create-instance-write-data-cbt-cli

[bigtable-googlesql]:
    https://cloud.google.com/bigtable/docs/googlesql-overview

## Available Tools

- [`bigtable-sql`](../tools/bigtable/bigtable-sql.md)
  Run SQL-like queries over Bigtable rows.

## Requirements

### IAM Permissions

Bigtable uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Bigtable resources at the project, instance, table, and
backup level. Toolbox will use your [Application Default Credentials (ADC)][adc]
to authorize and authenticate when interacting with [Bigtable][bigtable-docs].

In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the query
provided. See [Apply IAM roles][grant-permissions] for more information on
applying IAM permissions and roles to an identity.

[iam-overview]: https://cloud.google.com/bigtable/docs/access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[grant-permissions]: https://cloud.google.com/bigtable/docs/access-control#iam-management-instance

## Example

```yaml
sources:
  my-bigtable-source:
    kind: "bigtable"
    project: "my-project-id"
    instance: "test-instance"
```

## Reference

| **field** | **type** | **required** | **description**                                                               |
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "bigtable".                                                           |
| project   |  string  |     true     | Id of the GCP project that the cluster was created in (e.g. "my-project-id"). |
| instance  |  string  |     true     | Name of the Bigtable instance.                                                |

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/mysql.md:
--------------------------------------------------------------------------------

```markdown
---
title: "MySQL"
type: docs
weight: 1
description: >
  MySQL is a relational database management system that stores and manages data.
---

## About

[MySQL][mysql-docs] is a relational database management system (RDBMS) that
stores and manages data. It's a popular choice for developers because of its
reliability, performance, and ease of use.

[mysql-docs]: https://www.mysql.com/

## Available Tools

- [`mysql-sql`](../tools/mysql/mysql-sql.md)
  Execute pre-defined prepared SQL queries in MySQL.

- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
  Run parameterized SQL queries in MySQL.

- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
  List active queries in MySQL.

- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
  List tables in a MySQL database.

- [`mysql-list-tables-missing-unique-indexes`](../tools/mysql/mysql-list-tables-missing-unique-indexes.md)
  List tables in a MySQL database that do not have primary or unique indices.

- [`mysql-list-table-fragmentation`](../tools/mysql/mysql-list-table-fragmentation.md)
  List table fragmentation in MySQL tables.

## Requirements

### Database User

This source only uses standard authentication. You will need to [create a
MySQL user][mysql-users] to login to the database with.

[mysql-users]: https://dev.mysql.com/doc/refman/8.4/en/user-names.html

## Example

```yaml
sources:
    my-mysql-source:
        kind: mysql
        host: 127.0.0.1
        port: 3306
        database: my_db
        user: ${USER_NAME}
        password: ${PASSWORD}
        # Optional TLS and other driver parameters. For example, enable preferred TLS:
        # queryParams:
        #     tls: preferred
        queryTimeout: 30s # Optional: query timeout duration
```

{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}

## Reference

| **field**    | **type** | **required** | **description**                                                                                 |
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
| kind         |  string  |     true     | Must be "mysql".                                                                                |
| host         |  string  |     true     | IP address to connect to (e.g. "127.0.0.1").                                                    |
| port         |  string  |     true     | Port to connect to (e.g. "3306").                                                               |
| database     |  string  |     true     | Name of the MySQL database to connect to (e.g. "my_db").                                        |
| user         |  string  |     true     | Name of the MySQL user to connect as (e.g. "my-mysql-user").                                    |
| password     |  string  |     true     | Password of the MySQL user (e.g. "my-password").                                                |
| queryTimeout |  string  |    false     | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
| queryParams | map<string,string> | false | Arbitrary DSN parameters passed to the driver (e.g. `tls: preferred`, `charset: utf8mb4`). Useful for enabling TLS or other connection options. |

```

--------------------------------------------------------------------------------
/internal/tools/dataplex/dataplexlookupentry/dataplexlookupentry_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package dataplexlookupentry_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
)

func TestParseFromYamlDataplexLookupEntry(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: dataplex-lookup-entry
					source: my-instance
					description: some description
			`,
			want: server.ToolConfigs{
				"example_tool": dataplexlookupentry.Config{
					Name:         "example_tool",
					Kind:         "dataplex-lookup-entry",
					Source:       "my-instance",
					Description:  "some description",
					AuthRequired: []string{},
				},
			},
		},
		{
			desc: "advanced example",
			in: `
			tools:
				example_tool:
					kind: dataplex-lookup-entry
					source: my-instance
					description: some description
					parameters:
						- name: name
							type: string
							description: some name description
						- name: view
							type: string
							description: some view description
						- name: aspectTypes
							type: array
							description: some aspect types description
							default: []
							items: 
								name: aspectType
								type: string
								description: some aspect type description
						- name: entry
							type: string
							description: some entry description
			`,
			want: server.ToolConfigs{
				"example_tool": dataplexlookupentry.Config{
					Name:         "example_tool",
					Kind:         "dataplex-lookup-entry",
					Source:       "my-instance",
					Description:  "some description",
					AuthRequired: []string{},
					Parameters: []tools.Parameter{
						tools.NewStringParameter("name", "some name description"),
						tools.NewStringParameter("view", "some view description"),
						tools.NewArrayParameterWithDefault("aspectTypes", []any{}, "some aspect types description", tools.NewStringParameter("aspectType", "some aspect type description")),
						tools.NewStringParameter("entry", "some entry description"),
					},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

```

--------------------------------------------------------------------------------
/internal/sources/cloudmonitoring/cloud_monitoring_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudmonitoring_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlCloudMonitoring(t *testing.T) {
	t.Parallel()
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "basic example",
			in: `
			sources:
				my-cloud-monitoring-instance:
					kind: cloud-monitoring
			`,
			want: map[string]sources.SourceConfig{
				"my-cloud-monitoring-instance": cloudmonitoring.Config{
					Name:           "my-cloud-monitoring-instance",
					Kind:           cloudmonitoring.SourceKind,
					UseClientOAuth: false,
				},
			},
		},
		{
			desc: "use client auth example",
			in: `
			sources:
				my-cloud-monitoring-instance:
					kind: cloud-monitoring
					useClientOAuth: true
			`,
			want: map[string]sources.SourceConfig{
				"my-cloud-monitoring-instance": cloudmonitoring.Config{
					Name:           "my-cloud-monitoring-instance",
					Kind:           cloudmonitoring.SourceKind,
					UseClientOAuth: true,
				},
			},
		},
	}
	for _, tc := range tcs {
		tc := tc
		t.Run(tc.desc, func(t *testing.T) {
			t.Parallel()
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if !cmp.Equal(tc.want, got.Sources) {
				t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
			}
		})
	}
}

func TestFailParseFromYaml(t *testing.T) {
	t.Parallel()
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "extra field",
			in: `
			sources:
				my-cloud-monitoring-instance:
					kind: cloud-monitoring
					project: test-project
			`,
			err: `unable to parse source "my-cloud-monitoring-instance" as "cloud-monitoring": [2:1] unknown field "project"
   1 | kind: cloud-monitoring
>  2 | project: test-project
       ^
`,
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-cloud-monitoring-instance:
					useClientOAuth: true
			`,
			err: "missing 'kind' field for source \"my-cloud-monitoring-instance\"",
		},
	}
	for _, tc := range tcs {
		tc := tc
		t.Run(tc.desc, func(t *testing.T) {
			t.Parallel()
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if errStr != tc.err {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/internal/sources/dgraph/dgraph_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package dgraph_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources/dgraph"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlDgraph(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "basic example",
			in: `
			sources:
				my-dgraph-instance:
					kind: dgraph
					dgraphUrl: https://localhost:8080
					apiKey: abc123
					password: pass@123
					namespace: 0
					user: user123
			`,
			want: server.SourceConfigs{
				"my-dgraph-instance": dgraph.Config{
					Name:      "my-dgraph-instance",
					Kind:      dgraph.SourceKind,
					DgraphUrl: "https://localhost:8080",
					ApiKey:    "abc123",
					Password:  "pass@123",
					Namespace: 0,
					User:      "user123",
				},
			},
		},
		{
			desc: "basic example minimal field",
			in: `
			sources:
				my-dgraph-instance:
					kind: dgraph
					dgraphUrl: https://localhost:8080
			`,
			want: server.SourceConfigs{
				"my-dgraph-instance": dgraph.Config{
					Name:      "my-dgraph-instance",
					Kind:      dgraph.SourceKind,
					DgraphUrl: "https://localhost:8080",
				},
			},
		},
	}

	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}

			if diff := cmp.Diff(tc.want, got.Sources); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func TestFailParseFromYaml(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "extra field",
			in: `
			sources:
				my-dgraph-instance:
					kind: dgraph
					dgraphUrl: https://localhost:8080
					foo: bar
			`,
			err: "unable to parse source \"my-dgraph-instance\" as \"dgraph\": [2:1] unknown field \"foo\"\n   1 | dgraphUrl: https://localhost:8080\n>  2 | foo: bar\n       ^\n   3 | kind: dgraph",
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-dgraph-instance:
					kind: dgraph
			`,
			err: "unable to parse source \"my-dgraph-instance\" as \"dgraph\": Key: 'Config.DgraphUrl' Error:Field validation for 'DgraphUrl' failed on the 'required' tag",
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if errStr != tc.err {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/spanner.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Spanner"
type: docs
weight: 1
description: >
  Spanner is a fully managed database service from Google Cloud that combines 
  relational, key-value, graph, and search capabilities.

---

# Spanner Source

[Spanner][spanner-docs] is a fully managed, mission-critical database service
that brings together relational, graph, key-value, and search. It offers
transactional consistency at global scale, automatic, synchronous replication
for high availability, and support for two SQL dialects: GoogleSQL (ANSI 2011
with extensions) and PostgreSQL.

If you are new to Spanner, you can try to [create and query a database using
the Google Cloud console][spanner-quickstart].

[spanner-docs]: https://cloud.google.com/spanner/docs
[spanner-quickstart]:
    https://cloud.google.com/spanner/docs/create-query-database-console

## Available Tools

- [`spanner-sql`](../tools/spanner/spanner-sql.md)  
  Execute SQL on Google Cloud Spanner.

- [`spanner-execute-sql`](../tools/spanner/spanner-execute-sql.md)  
  Run structured and parameterized queries on Spanner.

### Pre-built Configurations

- [Spanner using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/spanner_mcp/)  
Connect your IDE to Spanner using Toolbox.

## Requirements

### IAM Permissions

Spanner uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Spanner resources at the project, Spanner instance, and
Spanner database levels. Toolbox will use your [Application Default Credentials
(ADC)][adc] to authorize and authenticate when interacting with Spanner.

In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the query
provided. See [Apply IAM roles][grant-permissions] for more information on
applying IAM permissions and roles to an identity.

[iam-overview]: https://cloud.google.com/spanner/docs/iam
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[grant-permissions]: https://cloud.google.com/spanner/docs/grant-permissions

## Example

```yaml
sources:
    my-spanner-source:
        kind: "spanner"
        project: "my-project-id"
        instance: "my-instance"
        database: "my_db"
        # dialect: "googlesql"
```

## Reference

| **field** | **type** | **required** | **description**                                                                                                     |
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "spanner".                                                                                                  |
| project   |  string  |     true     | Id of the GCP project that the cluster was created in (e.g. "my-project-id").                                       |
| instance  |  string  |     true     | Name of the Spanner instance.                                                                                       |
| database  |  string  |     true     | Name of the database on the Spanner instance                                                                        |
| dialect   |  string  |    false     | Name of the dialect type of the Spanner database, must be either `googlesql` or `postgresql`. Default: `googlesql`. |

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/firestore.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Firestore"
type: docs
weight: 1
description: >
  Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development. It's a fully managed, serverless database that supports mobile, web, and server development.

---

# Firestore Source

[Firestore][firestore-docs] is a NoSQL document database built for automatic
scaling, high performance, and ease of application development. While the
Firestore interface has many of the same features as traditional databases,
as a NoSQL database it differs from them in the way it describes relationships
between data objects.

If you are new to Firestore, you can [create a database and learn the
basics][firestore-quickstart].

[firestore-docs]: https://cloud.google.com/firestore/docs
[firestore-quickstart]: https://cloud.google.com/firestore/docs/quickstart-servers

## Requirements

### IAM Permissions

Firestore uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Firestore resources. Toolbox will use your [Application
Default Credentials (ADC)][adc] to authorize and authenticate when interacting
with [Firestore][firestore-docs].

In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for accessing
Firestore. Common roles include:

- `roles/datastore.user` - Read and write access to Firestore
- `roles/datastore.viewer` - Read-only access to Firestore
- `roles/firebaserules.admin` - Full management of Firebase Security Rules for
  Firestore. This role is required for operations that involve creating,
  updating, or managing Firestore security rules (see [Firebase Security Rules
  roles][firebaserules-roles])

See [Firestore access control][firestore-iam] for more information on
applying IAM permissions and roles to an identity.

[iam-overview]: https://cloud.google.com/firestore/docs/security/iam
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[firestore-iam]: https://cloud.google.com/firestore/docs/security/iam
[firebaserules-roles]:
    https://cloud.google.com/iam/docs/roles-permissions/firebaserules

### Database Selection

Firestore allows you to create multiple databases within a single project. Each
database is isolated from the others and has its own set of documents and
collections. If you don't specify a database in your configuration, the default
database named `(default)` will be used.

## Example

```yaml
sources:
  my-firestore-source:
    kind: "firestore"
    project: "my-project-id"
    # database: "my-database"  # Optional, defaults to "(default)"
```

## Reference

| **field** | **type** | **required** | **description**                                                                                          |
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "firestore".                                                                                     |
| project   |  string  |     true     | Id of the GCP project that contains the Firestore database (e.g. "my-project-id").                       |
| database  |  string  |     false    | Name of the Firestore database to connect to. Defaults to "(default)" if not specified.                  |

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/bigquery/bigquery-forecast.md:
--------------------------------------------------------------------------------

```markdown
---
title: "bigquery-forecast"
type: docs
weight: 1
description: >
  A "bigquery-forecast" tool forecasts time series data in BigQuery.
aliases:
- /resources/tools/bigquery-forecast
---

## About

A `bigquery-forecast` tool forecasts time series data in BigQuery.
It's compatible with the following sources:

- [bigquery](../../sources/bigquery.md)

`bigquery-forecast` constructs and executes a `SELECT * FROM AI.FORECAST(...)`
query based on the provided parameters:

- **history_data** (string, required): This specifies the source of the
  historical time series data. It can be either a fully qualified BigQuery table
  ID (e.g., my-project.my_dataset.my_table) or a SQL query that returns the
  data.
- **timestamp_col** (string, required): The name of the column in your
  history_data that contains the timestamps.
- **data_col** (string, required): The name of the column in your history_data
  that contains the numeric values to be forecasted.
- **id_cols** (array of strings, optional): If you are forecasting multiple time
  series at once (e.g., sales for different products), this parameter takes an
  array of column names that uniquely identify each series. It defaults to an
  empty array if not provided.
- **horizon** (integer, optional): The number of future time steps you want to
  predict. It defaults to 10 if not specified.

The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:

- **`allowed` (default) and `blocked`:** These modes do not impose any special restrictions on the `bigquery-forecast` tool.
- **`protected`:** This mode enables session-based execution. The tool will operate within the same BigQuery session as other
  tools using the same source. This allows the `history_data` parameter to be a query that references temporary resources (e.g., 
  `TEMP` tables) created within that session.

The tool's behavior is also influenced by the `allowedDatasets` restriction on the `bigquery` source:

- **Without `allowedDatasets` restriction:** The tool can use any table or query for the `history_data` parameter.
- **With `allowedDatasets` restriction:** The tool verifies that the `history_data` parameter only accesses tables within the allowed datasets.
  - If `history_data` is a table ID, the tool checks if the table's dataset is in the allowed list.
  - If `history_data` is a query, the tool performs a dry run to analyze the query and rejects it if it accesses any table outside the allowed list.

## Example

```yaml
tools:
 forecast_tool:
    kind: bigquery-forecast
    source: my-bigquery-source
    description: Use this tool to forecast time series data in BigQuery.
```

## Sample Prompt
You can use the following sample prompts to call this tool:

- Can you forecast the history time series data in bigquery table `bqml_tutorial.google_analytic`? Use project_id `myproject`.
- What are the future `total_visits` in bigquery table `bqml_tutorial.google_analytic`?


## Reference

| **field**   | **type** | **required** | **description**                                         |
|-------------|:--------:|:------------:|---------------------------------------------------------|
| kind        |  string  |     true     | Must be "bigquery-forecast".                            |
| source      |  string  |     true     | Name of the source the forecast tool should execute on. |
| description |  string  |     true     | Description of the tool that is passed to the LLM.      |

```

--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/cloud-sql-mysql.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

sources:
  cloud-sql-mysql-source:
    kind: cloud-sql-mysql
    project: ${CLOUD_SQL_MYSQL_PROJECT}
    region: ${CLOUD_SQL_MYSQL_REGION}
    instance: ${CLOUD_SQL_MYSQL_INSTANCE}
    database: ${CLOUD_SQL_MYSQL_DATABASE}
    user: ${CLOUD_SQL_MYSQL_USER}
    password: ${CLOUD_SQL_MYSQL_PASSWORD}
    ipType: ${CLOUD_SQL_MYSQL_IP_TYPE:PUBLIC}
tools:
  execute_sql:
    kind: mysql-execute-sql
    source: cloud-sql-mysql-source
    description: Use this tool to execute SQL.
  list_active_queries:
    kind: mysql-list-active-queries
    source: cloud-sql-mysql-source
    description: Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema.
  get_query_plan:
    kind: mysql-sql
    source: cloud-sql-mysql-source
    description: "Provide information about how MySQL executes a SQL statement. Common use cases include: 1) analyze query plan to improve its performance, and 2) determine effectiveness of existing indexes and evalueate new ones."
    statement: |
      EXPLAIN FORMAT=JSON {{.sql_statement}};
    templateParameters:
      - name: sql_statement
        type: string
        description: "the SQL statement to explain"
        required: true
  list_tables:
    kind: mysql-list-tables
    source: cloud-sql-mysql-source
    description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
  list_tables_missing_unique_indexes:
    kind: mysql-list-tables-missing-unique-indexes
    source: cloud-sql-mysql-source
    description: "Find tables that do not have primary or unique key constraint. A primary key or unique key is the only mechanism that guaranttes a row is unique. Without them, the database-level protection against data integrity issues will be missing."
  list_table_fragmentation:
    kind: mysql-list-table-fragmentation
    source: cloud-sql-mysql-source
    description: List table fragmentation in MySQL, by calculating the size of the data and index files and free space allocated to each table. The query calculates fragmentation percentage which represents the proportion of free space relative to the total data and index size. Storage can be reclaimed for tables with high fragmentation using OPTIMIZE TABLE.

toolsets:
  cloud_sql_mysql_database_tools:
    - execute_sql
    - list_tables
    - get_query_plan
    - list_active_queries
    - list_tables_missing_unique_indexes
    - list_table_fragmentation

```

--------------------------------------------------------------------------------
/docs/en/getting-started/quickstart/go/genkit/quickstart.go:
--------------------------------------------------------------------------------

```go
package main

import (
	"context"
	"fmt"
	"log"

	"github.com/googleapis/mcp-toolbox-sdk-go/core"
	"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"

	"github.com/firebase/genkit/go/ai"
	"github.com/firebase/genkit/go/genkit"
	"github.com/firebase/genkit/go/plugins/googlegenai"
)

const systemPrompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`

var queries = []string{
	"Find hotels in Basel with Basel in its name.",
	"Can you book the hotel Hilton Basel for me?",
	"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
	"My check in dates would be from April 10, 2024 to April 19, 2024.",
}

func main() {
	ctx := context.Background()

	// Create Toolbox Client
	toolboxClient, err := core.NewToolboxClient("http://127.0.0.1:5000")
	if err != nil {
		log.Fatalf("Failed to create Toolbox client: %v", err)
	}

	// Load the tools using the MCP Toolbox SDK.
	tools, err := toolboxClient.LoadToolset("my-toolset", ctx)
	if err != nil {
		log.Fatalf("Failed to load tools: %v\nMake sure your Toolbox server is running and the tool is configured.", err)
	}

	// Initialize Genkit
	g := genkit.Init(ctx,
		genkit.WithPlugins(&googlegenai.GoogleAI{}),
		genkit.WithDefaultModel("googleai/gemini-2.0-flash"),
	)
	if err != nil {
		log.Fatalf("Failed to init genkit: %v\n", err)
	}

	// Create a conversation history
	conversationHistory := []*ai.Message{
		ai.NewSystemTextMessage(systemPrompt),
	}

	// Convert your tool to a Genkit tool.
	genkitTools := make([]ai.Tool, len(tools))
	for i, tool := range tools {
		newTool, err := tbgenkit.ToGenkitTool(tool, g)
		if err != nil {
			log.Fatalf("Failed to convert tool: %v\n", err)
		}
		genkitTools[i] = newTool
	}

	toolRefs := make([]ai.ToolRef, len(genkitTools))

	for i, tool := range genkitTools {
		toolRefs[i] = tool
	}

	for _, query := range queries {
		conversationHistory = append(conversationHistory, ai.NewUserTextMessage(query))
		response, err := genkit.Generate(ctx, g,
			ai.WithMessages(conversationHistory...),
			ai.WithTools(toolRefs...),
			ai.WithReturnToolRequests(true),
		)

		if err != nil {
			log.Fatalf("%v\n", err)
		}
		conversationHistory = append(conversationHistory, response.Message)

		parts := []*ai.Part{}

		for _, req := range response.ToolRequests() {
			tool := genkit.LookupTool(g, req.Name)
			if tool == nil {
				log.Fatalf("tool %q not found", req.Name)
			}

			output, err := tool.RunRaw(ctx, req.Input)
			if err != nil {
				log.Fatalf("tool %q execution failed: %v", tool.Name(), err)
			}

			parts = append(parts,
				ai.NewToolResponsePart(&ai.ToolResponse{
					Name:   req.Name,
					Ref:    req.Ref,
					Output: output,
				}))

		}

		if len(parts) > 0 {
			resp, err := genkit.Generate(ctx, g,
				ai.WithMessages(append(response.History(), ai.NewMessage(ai.RoleTool, nil, parts...))...),
				ai.WithTools(toolRefs...),
			)
			if err != nil {
				log.Fatal(err)
			}
			fmt.Println("\n", resp.Text())
			conversationHistory = append(conversationHistory, resp.Message)
		} else {
			fmt.Println("\n", response.Text())
		}

	}

}

```

--------------------------------------------------------------------------------
/internal/sources/looker/looker_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package looker_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/sources/looker"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlLooker(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "basic example",
			in: `
			sources:
				my-looker-instance:
					kind: looker
					base_url: http://example.looker.com/
					client_id: jasdl;k;tjl
					client_secret: sdakl;jgflkasdfkfg
			`,
			want: map[string]sources.SourceConfig{
				"my-looker-instance": looker.Config{
					Name:               "my-looker-instance",
					Kind:               looker.SourceKind,
					BaseURL:            "http://example.looker.com/",
					ClientId:           "jasdl;k;tjl",
					ClientSecret:       "sdakl;jgflkasdfkfg",
					Timeout:            "600s",
					SslVerification:    true,
					UseClientOAuth:     false,
					ShowHiddenModels:   true,
					ShowHiddenExplores: true,
					ShowHiddenFields:   true,
					Location:           "us",
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if !cmp.Equal(tc.want, got.Sources) {
				t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
			}
		})
	}
}

func TestFailParseFromYamlLooker(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "extra field",
			in: `
			sources:
				my-looker-instance:
					kind: looker
					base_url: http://example.looker.com/
					client_id: jasdl;k;tjl
					client_secret: sdakl;jgflkasdfkfg
					schema: test-schema
			`,
			err: "unable to parse source \"my-looker-instance\" as \"looker\": [5:1] unknown field \"schema\"\n   2 | client_id: jasdl;k;tjl\n   3 | client_secret: sdakl;jgflkasdfkfg\n   4 | kind: looker\n>  5 | schema: test-schema\n       ^\n",
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-looker-instance:
					kind: looker
					client_id: jasdl;k;tjl
			`,
			err: "unable to parse source \"my-looker-instance\" as \"looker\": Key: 'Config.BaseURL' Error:Field validation for 'BaseURL' failed on the 'required' tag",
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if errStr != tc.err {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/internal/sources/util.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package sources

import (
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"strings"

	"cloud.google.com/go/cloudsqlconn"
	"golang.org/x/oauth2/google"
)

// GetCloudSQLDialOpts retrieve dial options with the right ip type and user agent for cloud sql
// databases.
func GetCloudSQLOpts(ipType, userAgent string, useIAM bool) ([]cloudsqlconn.Option, error) {
	opts := []cloudsqlconn.Option{cloudsqlconn.WithUserAgent(userAgent)}
	switch strings.ToLower(ipType) {
	case "private":
		opts = append(opts, cloudsqlconn.WithDefaultDialOptions(cloudsqlconn.WithPrivateIP()))
	case "public":
		opts = append(opts, cloudsqlconn.WithDefaultDialOptions(cloudsqlconn.WithPublicIP()))
	case "psc":
		opts = append(opts, cloudsqlconn.WithDefaultDialOptions(cloudsqlconn.WithPSC()))
	default:
		return nil, fmt.Errorf("invalid ipType %s. Must be one of `public`, `private`, or `psc`", ipType)
	}

	if useIAM {
		opts = append(opts, cloudsqlconn.WithIAMAuthN())
	}
	return opts, nil
}

// GetIAMPrincipalEmailFromADC finds the email associated with ADC
func GetIAMPrincipalEmailFromADC(ctx context.Context) (string, error) {
	// Finds ADC and returns an HTTP client associated with it
	client, err := google.DefaultClient(ctx,
		"https://www.googleapis.com/auth/userinfo.email")
	if err != nil {
		return "", fmt.Errorf("failed to call userinfo endpoint: %w", err)
	}

	// Retrieve the email associated with the token
	resp, err := client.Get("https://oauth2.googleapis.com/tokeninfo")
	if err != nil {
		return "", fmt.Errorf("failed to call tokeninfo endpoint: %w", err)
	}
	defer resp.Body.Close()

	bodyBytes, err := io.ReadAll(resp.Body)
	if err != nil {
		return "", fmt.Errorf("error reading response body %d: %s", resp.StatusCode, string(bodyBytes))
	}
	if resp.StatusCode != http.StatusOK {
		return "", fmt.Errorf("tokeninfo endpoint returned non-OK status %d: %s", resp.StatusCode, string(bodyBytes))
	}

	// Unmarshal response body and get `email`
	var responseJSON map[string]any
	err = json.Unmarshal(bodyBytes, &responseJSON)
	if err != nil {

		return "", fmt.Errorf("error parsing JSON: %v", err)
	}

	emailValue, ok := responseJSON["email"]
	if !ok {
		return "", fmt.Errorf("email not found in response: %v", err)
	}
	// service account email used for IAM should trim the suffix
	email := strings.TrimSuffix(emailValue.(string), ".gserviceaccount.com")
	return email, nil
}

func GetIAMAccessToken(ctx context.Context) (string, error) {
	creds, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/cloud-platform")
	if err != nil {
		return "", fmt.Errorf("failed to find default credentials (run 'gcloud auth application-default login'?): %w", err)
	}

	token, err := creds.TokenSource.Token() // This gets an oauth2.Token
	if err != nil {
		return "", fmt.Errorf("failed to get token from token source: %w", err)
	}

	if !token.Valid() {
		return "", fmt.Errorf("retrieved token is invalid or expired")
	}
	return token.AccessToken, nil
}

```

--------------------------------------------------------------------------------
/internal/sources/tidb/tidb.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package tidb

import (
	"context"
	"database/sql"
	"fmt"
	"regexp"

	_ "github.com/go-sql-driver/mysql"
	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"go.opentelemetry.io/otel/trace"
)

const SourceKind string = "tidb"
const TiDBCloudHostPattern string = `gateway\d{2}\.(.+)\.(prod|dev|staging)\.(.+)\.tidbcloud\.com`

// validate interface
var _ sources.SourceConfig = Config{}

func init() {
	if !sources.Register(SourceKind, newConfig) {
		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}

	// If the host is a TiDB Cloud instance, force to use SSL
	if IsTiDBCloudHost(actual.Host) {
		actual.UseSSL = true
	}

	return actual, nil
}

type Config struct {
	Name     string `yaml:"name" validate:"required"`
	Kind     string `yaml:"kind" validate:"required"`
	Host     string `yaml:"host" validate:"required"`
	Port     string `yaml:"port" validate:"required"`
	User     string `yaml:"user" validate:"required"`
	Password string `yaml:"password" validate:"required"`
	Database string `yaml:"database" validate:"required"`
	UseSSL   bool   `yaml:"ssl"`
}

func (r Config) SourceConfigKind() string {
	return SourceKind
}

func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
	pool, err := initTiDBConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.UseSSL)
	if err != nil {
		return nil, fmt.Errorf("unable to create pool: %w", err)
	}

	err = pool.PingContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to connect successfully: %w", err)
	}

	s := &Source{
		Name: r.Name,
		Kind: SourceKind,
		Pool: pool,
	}
	return s, nil
}

var _ sources.Source = &Source{}

type Source struct {
	Name string `yaml:"name"`
	Kind string `yaml:"kind"`
	Pool *sql.DB
}

func (s *Source) SourceKind() string {
	return SourceKind
}

func (s *Source) TiDBPool() *sql.DB {
	return s.Pool
}

func IsTiDBCloudHost(host string) bool {
	pattern := `gateway\d{2}\.(.+)\.(prod|dev|staging)\.(.+)\.tidbcloud\.com`
	match, err := regexp.MatchString(pattern, host)
	if err != nil {
		return false
	}
	return match
}

func initTiDBConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname string, useSSL bool) (*sql.DB, error) {
	//nolint:all // Reassigned ctx
	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
	defer span.End()

	// Configure the driver to connect to the database
	dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?parseTime=true&charset=utf8mb4&tls=%t", user, pass, host, port, dbname, useSSL)

	// Interact with the driver directly as you normally would
	pool, err := sql.Open("mysql", dsn)
	if err != nil {
		return nil, fmt.Errorf("sql.Open: %w", err)
	}
	return pool, nil
}

```

--------------------------------------------------------------------------------
/internal/tools/dataform/dataformcompilelocal/dataformcompilelocal.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package dataformcompilelocal

import (
	"context"
	"fmt"
	"os/exec"
	"strings"

	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

const kind string = "dataform-compile-local"

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	allParameters := tools.Parameters{
		tools.NewStringParameter("project_dir", "The Dataform project directory."),
	}
	paramManifest := allParameters.Manifest()
	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters)

	t := Tool{
		Name:         cfg.Name,
		Kind:         kind,
		AuthRequired: cfg.AuthRequired,
		Parameters:   allParameters,
		manifest:     tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
		mcpManifest:  mcpManifest,
	}

	return t, nil
}

var _ tools.Tool = Tool{}

type Tool struct {
	Name         string           `yaml:"name"`
	Kind         string           `yaml:"kind"`
	AuthRequired []string         `yaml:"authRequired"`
	Parameters   tools.Parameters `yaml:"allParams"`
	manifest     tools.Manifest
	mcpManifest  tools.McpManifest
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	paramsMap := params.AsMap()

	projectDir, ok := paramsMap["project_dir"].(string)
	if !ok || projectDir == "" {
		return nil, fmt.Errorf("error casting 'project_dir' to string or invalid value")
	}

	cmd := exec.CommandContext(ctx, "dataform", "compile", projectDir, "--json")
	output, err := cmd.CombinedOutput()
	if err != nil {
		return nil, fmt.Errorf("error executing dataform compile: %w\nOutput: %s", err, string(output))
	}

	return strings.TrimSpace(string(output)), nil
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.Parameters, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}

func (t Tool) RequiresClientAuthorization() bool {
	return false
}

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/yugabytedb.md:
--------------------------------------------------------------------------------

```markdown
---
title: "YugabyteDB"
type: docs
weight: 1
description: >
  YugabyteDB is a high-performance, distributed SQL database. 
---

## About

[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database
designed for global, internet-scale applications, with full PostgreSQL
compatibility.

[yugabytedb]: https://www.yugabyte.com/

## Example

```yaml
sources:
    my-yb-source:
        kind: yugabytedb
        host: 127.0.0.1
        port: 5433
        database: yugabyte
        user: ${USER_NAME}
        password: ${PASSWORD}
        loadBalance: true
        topologyKeys: cloud.region.zone1:1,cloud.region.zone2:2
```

## Reference

| **field**                    | **type** | **required** | **description**                                                                                                                                                       |
|------------------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind                         |  string  |     true     | Must be "yugabytedb".                                                                                                                                                 |
| host                         |  string  |     true     | IP address to connect to.                                                                                                                                             |
| port                         | integer  |     true     | Port to connect to. The default port is 5433.                                                                                                                         |
| database                     |  string  |     true     | Name of the YugabyteDB database to connect to. The default database name is yugabyte.                                                                                 |
| user                         |  string  |     true     | Name of the YugabyteDB user to connect as. The default user is yugabyte.                                                                                              |
| password                     |  string  |     true     | Password of the YugabyteDB user. The default password is yugabyte.                                                                                                    |
| loadBalance                  | boolean  |    false     | If true, enable uniform load balancing. The default loadBalance value is false.                                                                                       |
| topologyKeys                 |  string  |    false     | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
| ybServersRefreshInterval     | integer  |    false     | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300.                         |
| fallbackToTopologyKeysOnly   | boolean  |    false     | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false.                                     |
| failedHostReconnectDelaySecs | integer  |    false     | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5.                                                                        |

```

--------------------------------------------------------------------------------
/internal/sources/spanner/spanner.go:
--------------------------------------------------------------------------------

```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package spanner

import (
	"context"
	"fmt"

	"cloud.google.com/go/spanner"
	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/util"
	"go.opentelemetry.io/otel/trace"
)

const SourceKind string = "spanner"

// validate interface
var _ sources.SourceConfig = Config{}

func init() {
	if !sources.Register(SourceKind, newConfig) {
		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
	actual := Config{Name: name, Dialect: "googlesql"} // Default dialect
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	Name     string          `yaml:"name" validate:"required"`
	Kind     string          `yaml:"kind" validate:"required"`
	Project  string          `yaml:"project" validate:"required"`
	Instance string          `yaml:"instance" validate:"required"`
	Dialect  sources.Dialect `yaml:"dialect" validate:"required"`
	Database string          `yaml:"database" validate:"required"`
}

func (r Config) SourceConfigKind() string {
	return SourceKind
}

func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
	client, err := initSpannerClient(ctx, tracer, r.Name, r.Project, r.Instance, r.Database)
	if err != nil {
		return nil, fmt.Errorf("unable to create client: %w", err)
	}

	s := &Source{
		Name:    r.Name,
		Kind:    SourceKind,
		Client:  client,
		Dialect: r.Dialect.String(),
	}
	return s, nil
}

var _ sources.Source = &Source{}

type Source struct {
	Name    string `yaml:"name"`
	Kind    string `yaml:"kind"`
	Client  *spanner.Client
	Dialect string
}

func (s *Source) SourceKind() string {
	return SourceKind
}

func (s *Source) SpannerClient() *spanner.Client {
	return s.Client
}

func (s *Source) DatabaseDialect() string {
	return s.Dialect
}

func initSpannerClient(ctx context.Context, tracer trace.Tracer, name, project, instance, dbname string) (*spanner.Client, error) {
	//nolint:all // Reassigned ctx
	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
	defer span.End()

	// Configure the connection to the database
	db := fmt.Sprintf("projects/%s/instances/%s/databases/%s", project, instance, dbname)

	// Configure session pool to automatically clean inactive transactions
	sessionPoolConfig := spanner.SessionPoolConfig{
		TrackSessionHandles: true,
		InactiveTransactionRemovalOptions: spanner.InactiveTransactionRemovalOptions{
			ActionOnInactiveTransaction: spanner.WarnAndClose,
		},
	}

	// Create spanner client
	userAgent, err := util.UserAgentFromContext(ctx)
	if err != nil {
		return nil, err
	}
	client, err := spanner.NewClientWithConfig(ctx, db, spanner.ClientConfig{SessionPoolConfig: sessionPoolConfig, UserAgent: userAgent})
	if err != nil {
		return nil, fmt.Errorf("unable to create new client: %w", err)
	}

	return client, nil
}

```

--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/mysql.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

sources:
  mysql-source:
    kind: mysql
    host: ${MYSQL_HOST}
    port: ${MYSQL_PORT}
    database: ${MYSQL_DATABASE}
    user: ${MYSQL_USER}
    password: ${MYSQL_PASSWORD}
    # Optional: supply additional DSN parameters (e.g. TLS, charset) via env var.
    # Provide a YAML-encoded map in MYSQL_QUERY_PARAMS, for example:
    #   export MYSQL_QUERY_PARAMS="{tls: preferred, charset: utf8mb4}"
    # When the variable is empty/undefined, queryParams will be treated as nil.
    queryParams: ${MYSQL_QUERY_PARAMS:}
    queryTimeout: 30s # Optional
tools:
  execute_sql:
    kind: mysql-execute-sql
    source: mysql-source
    description: Use this tool to execute SQL.
  list_active_queries:
    kind: mysql-list-active-queries
    source: mysql-source
    description: Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema.
  get_query_plan:
    kind: mysql-sql
    source: mysql-source
    description: "Provide information about how MySQL executes a SQL statement. Common use cases include: 1) analyze query plan to improve its performance, and 2) determine effectiveness of existing indexes and evalueate new ones."
    statement: |
      EXPLAIN FORMAT=JSON {{.sql_statement}};
    templateParameters:
      - name: sql_statement
        type: string
        description: "the SQL statement to explain"
        required: true
  list_tables:
    kind: mysql-list-tables
    source: mysql-source
    description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
  list_tables_missing_unique_indexes:
    kind: mysql-list-tables-missing-unique-indexes
    source: mysql-source
    description: "Find tables that do not have primary or unique key constraint. A primary key or unique key is the only mechanism that guaranttes a row is unique. Without them, the database-level protection against data integrity issues will be missing."
  list_table_fragmentation:
    kind: mysql-list-table-fragmentation
    source: mysql-source
    description: List table fragmentation in MySQL, by calculating the size of the data and index files and free space allocated to each table. The query calculates fragmentation percentage which represents the proportion of free space relative to the total data and index size. Storage can be reclaimed for tables with high fragmentation using OPTIMIZE TABLE.

toolsets:
  mysql_database_tools:
    - execute_sql
    - list_tables
    - get_query_plan
    - list_active_queries
    - list_tables_missing_unique_indexes
    - list_table_fragmentation

```

--------------------------------------------------------------------------------
/docs/en/resources/sources/mssql.md:
--------------------------------------------------------------------------------

```markdown
---
title: "SQL Server"
type: docs
weight: 1
description: >
  SQL Server is a relational database management system (RDBMS).

---

## About

[SQL Server][mssql-docs] is a relational database management system (RDBMS)
developed by Microsoft that allows users to store, retrieve, and manage large
amount of data through a structured format.

[mssql-docs]: https://www.microsoft.com/en-us/sql-server

## Available Tools

- [`mssql-sql`](../tools/mssql/mssql-sql.md)  
  Execute pre-defined SQL Server queries with placeholder parameters.

- [`mssql-execute-sql`](../tools/mssql/mssql-execute-sql.md)  
  Run parameterized SQL Server queries in SQL Server.

- [`mssql-list-tables`](../tools/mssql/mssql-list-tables.md)  
  List tables in a SQL Server database.

## Requirements

### Database User

This source only uses standard authentication. You will need to [create a
SQL Server user][mssql-users] to login to the database with.

[mssql-users]: https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/create-a-database-user?view=sql-server-ver16

## Example

```yaml
sources:
    my-mssql-source:
        kind: mssql
        host: 127.0.0.1
        port: 1433
        database: my_db
        user: ${USER_NAME}
        password: ${PASSWORD}
        # encrypt: strict
```

{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}

## Reference

| **field** | **type** | **required** | **description**                                                                                                                                                                            |
|-----------|:--------:|:------------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind      |  string  |     true     | Must be "mssql".                                                                                                                                                                           |
| host      |  string  |     true     | IP address to connect to (e.g. "127.0.0.1").                                                                                                                                               |
| port      |  string  |     true     | Port to connect to (e.g. "1433").                                                                                                                                                          |
| database  |  string  |     true     | Name of the SQL Server database to connect to (e.g. "my_db").                                                                                                                              |
| user      |  string  |     true     | Name of the SQL Server user to connect as (e.g. "my-user").                                                                                                                                |
| password  |  string  |     true     | Password of the SQL Server user (e.g. "my-password").                                                                                                                                      |
| encrypt   |  string  |    false     | Encryption level for data transmitted between the client and server (e.g., "strict"). If not specified, defaults to the [github.com/microsoft/go-mssqldb](https://github.com/microsoft/go-mssqldb?tab=readme-ov-file#common-parameters) package's default encrypt value. |

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/bigquery/bigquery-analyze-contribution.md:
--------------------------------------------------------------------------------

```markdown
---
title: "bigquery-analyze-contribution"
type: docs
weight: 1
description: >
  A "bigquery-analyze-contribution" tool performs contribution analysis in BigQuery.
aliases:
- /resources/tools/bigquery-analyze-contribution
---

## About

A `bigquery-analyze-contribution` tool performs contribution analysis in
BigQuery by creating a temporary `CONTRIBUTION_ANALYSIS` model and then querying
it with `ML.GET_INSIGHTS` to find top contributors for a given metric.

It's compatible with the following sources:

- [bigquery](../../sources/bigquery.md)

`bigquery-analyze-contribution` takes the following parameters:

- **input_data** (string, required): The data that contain the test and control
  data to analyze. This can be a fully qualified BigQuery table ID (e.g.,
  `my-project.my_dataset.my_table`) or a SQL query that returns the data.
- **contribution_metric** (string, required): The name of the column that
  contains the metric to analyze. This can be SUM(metric_column_name),
  SUM(numerator_metric_column_name)/SUM(denominator_metric_column_name) or
  SUM(metric_sum_column_name)/COUNT(DISTINCT categorical_column_name) depending
  the type of metric to analyze.
- **is_test_col** (string, required): The name of the column that identifies
  whether a row is in the test or control group. The column must contain boolean
  values.
- **dimension_id_cols** (array of strings, optional): An array of column names
  that uniquely identify each dimension.
- **top_k_insights_by_apriori_support** (integer, optional): The number of top
  insights to return, ranked by apriori support. Default to '30'.
- **pruning_method** (string, optional): The method to use for pruning redundant
  insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to
  `'PRUNE_REDUNDANT_INSIGHTS'`.

The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:

- **`allowed` (default) and `blocked`:** These modes do not impose any special restrictions on the `bigquery-analyze-contribution` tool.
- **`protected`:** This mode enables session-based execution. The tool will operate within the same BigQuery session as other
  tools using the same source. This allows the `input_data` parameter to be a query that references temporary resources (e.g., 
  `TEMP` tables) created within that session.


## Example

```yaml
tools:
  contribution_analyzer:
    kind: bigquery-analyze-contribution
    source: my-bigquery-source
    description: Use this tool to run contribution analysis on a dataset in BigQuery.
```

## Sample Prompt
You can prepare a sample table following
https://cloud.google.com/bigquery/docs/get-contribution-analysis-insights.
And use the following sample prompts to call this tool:

- What drives the changes in sales in the table
  `bqml_tutorial.iowa_liquor_sales_sum_data`? Use the project id myproject.
- Analyze the contribution for the `total_sales` metric in the table
  `bqml_tutorial.iowa_liquor_sales_sum_data`. The test group is identified by
  the `is_test` column. The dimensions are `store_name`, `city`, `vendor_name`,
  `category_name` and `item_description`.

## Reference

| **field**   | **type** | **required** | **description**                                            |
|-------------|:--------:|:------------:|------------------------------------------------------------|
| kind        |  string  |     true     | Must be "bigquery-analyze-contribution".                   |
| source      |  string  |     true     | Name of the source the tool should execute on.             |
| description |  string  |     true     | Description of the tool that is passed to the LLM.         |

```

--------------------------------------------------------------------------------
/internal/tools/mongodb/mongodbdeleteone/mongodbdeleteone_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mongodbdeleteone_test

import (
	"strings"
	"testing"

	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: mongodb-delete-one
					source: my-instance
					description: some description
					database: test_db
					collection: test_coll
					filterPayload: |
					    { name: {{json .name}} }
					filterParams:
                        - name: name 
                          type: string
                          description: small description
			`,
			want: server.ToolConfigs{
				"example_tool": mongodbdeleteone.Config{
					Name:          "example_tool",
					Kind:          "mongodb-delete-one",
					Source:        "my-instance",
					AuthRequired:  []string{},
					Database:      "test_db",
					Collection:    "test_coll",
					Description:   "some description",
					FilterPayload: "{ name: {{json .name}} }\n",
					FilterParams: tools.Parameters{
						&tools.StringParameter{
							CommonParameter: tools.CommonParameter{
								Name: "name",
								Type: "string",
								Desc: "small description",
							},
						},
					},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func TestFailParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "Invalid method",
			in: `
			tools:
				example_tool:
					kind: mongodb-delete-one
					source: my-instance
					description: some description
					collection: test_coll
					filterPayload: |
					  { name : {{json .name}} }
			`,
			err: `unable to parse tool "example_tool" as kind "mongodb-delete-one"`,
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if !strings.Contains(errStr, tc.err) {
				t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
			}
		})
	}

}

```

--------------------------------------------------------------------------------
/internal/tools/mongodb/mongodbdeletemany/mongodbdeletemany_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mongodbdeletemany_test

import (
	"strings"
	"testing"

	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: mongodb-delete-many
					source: my-instance
					description: some description
					database: test_db
					collection: test_coll
					filterPayload: |
					    { name: {{json .name}} }
					filterParams:
                        - name: name 
                          type: string
                          description: small description
			`,
			want: server.ToolConfigs{
				"example_tool": mongodbdeletemany.Config{
					Name:          "example_tool",
					Kind:          "mongodb-delete-many",
					Source:        "my-instance",
					AuthRequired:  []string{},
					Database:      "test_db",
					Collection:    "test_coll",
					Description:   "some description",
					FilterPayload: "{ name: {{json .name}} }\n",
					FilterParams: tools.Parameters{
						&tools.StringParameter{
							CommonParameter: tools.CommonParameter{
								Name: "name",
								Type: "string",
								Desc: "small description",
							},
						},
					},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func TestFailParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "Invalid method",
			in: `
			tools:
				example_tool:
					kind: mongodb-delete-many
					source: my-instance
					description: some description
					collection: test_coll
					filterPayload: |
					  { name : {{json .name}} }
			`,
			err: `unable to parse tool "example_tool" as kind "mongodb-delete-many"`,
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if !strings.Contains(errStr, tc.err) {
				t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
			}
		})
	}

}

```

--------------------------------------------------------------------------------
/internal/sources/mssql/mssql.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mssql

import (
	"context"
	"database/sql"
	"fmt"
	"net/url"

	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/util"
	_ "github.com/microsoft/go-mssqldb"
	"go.opentelemetry.io/otel/trace"
)

const SourceKind string = "mssql"

// validate interface
var _ sources.SourceConfig = Config{}

func init() {
	if !sources.Register(SourceKind, newConfig) {
		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	// Cloud SQL MSSQL configs
	Name     string `yaml:"name" validate:"required"`
	Kind     string `yaml:"kind" validate:"required"`
	Host     string `yaml:"host" validate:"required"`
	Port     string `yaml:"port" validate:"required"`
	User     string `yaml:"user" validate:"required"`
	Password string `yaml:"password" validate:"required"`
	Database string `yaml:"database" validate:"required"`
	Encrypt  string `yaml:"encrypt"`
}

func (r Config) SourceConfigKind() string {
	// Returns Cloud SQL MSSQL source kind
	return SourceKind
}

func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
	// Initializes a MSSQL source
	db, err := initMssqlConnection(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.Encrypt)
	if err != nil {
		return nil, fmt.Errorf("unable to create db connection: %w", err)
	}

	// Verify db connection
	err = db.PingContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to connect successfully: %w", err)
	}

	s := &Source{
		Name: r.Name,
		Kind: SourceKind,
		Db:   db,
	}
	return s, nil
}

var _ sources.Source = &Source{}

type Source struct {
	// Cloud SQL MSSQL struct with connection pool
	Name string `yaml:"name"`
	Kind string `yaml:"kind"`
	Db   *sql.DB
}

func (s *Source) SourceKind() string {
	// Returns Cloud SQL MSSQL source kind
	return SourceKind
}

func (s *Source) MSSQLDB() *sql.DB {
	// Returns a Cloud SQL MSSQL database connection pool
	return s.Db
}

func initMssqlConnection(
	ctx context.Context,
	tracer trace.Tracer,
	name, host, port, user, pass, dbname, encrypt string,
) (
	*sql.DB,
	error,
) {
	//nolint:all // Reassigned ctx
	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
	defer span.End()

	userAgent, err := util.UserAgentFromContext(ctx)
	if err != nil {
		userAgent = "genai-toolbox"
	}
	// Create dsn
	query := url.Values{}
	query.Add("app name", userAgent)
	query.Add("database", dbname)
	if encrypt != "" {
		query.Add("encrypt", encrypt)
	}

	url := &url.URL{
		Scheme:   "sqlserver",
		User:     url.UserPassword(user, pass),
		Host:     fmt.Sprintf("%s:%s", host, port),
		RawQuery: query.Encode(),
	}

	// Open database connection
	db, err := sql.Open("sqlserver", url.String())
	if err != nil {
		return nil, fmt.Errorf("sql.Open: %w", err)
	}
	return db, nil
}

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/mongodb/mongodb-aggregate.md:
--------------------------------------------------------------------------------

```markdown
---
title: "mongodb-aggregate"
type: docs
weight: 1
description: > 
  A "mongodb-aggregate" tool executes a multi-stage aggregation pipeline against a MongoDB collection.
aliases:
- /resources/tools/mongodb-aggregate
---

## About

The `mongodb-aggregate` tool is the most powerful query tool for MongoDB,
allowing you to process data through a multi-stage pipeline. Each stage
transforms the documents as they pass through, enabling complex operations like
grouping, filtering, reshaping documents, and performing calculations.

The core of this tool is the `pipelinePayload`, which must be a string
containing a **JSON array of pipeline stage documents**. The tool returns a JSON
array of documents produced by the final stage of the pipeline.

A `readOnly` flag can be set to `true` as a safety measure to ensure the
pipeline does not contain any write stages (like `$out` or `$merge`).

This tool is compatible with the following source kind:

* [`mongodb`](../../sources/mongodb.md)

## Example

Here is an example that calculates the average price and total count of products
for each category, but only for products with an "active" status.

```yaml
tools:
  get_category_stats:
    kind: mongodb-aggregate
    source: my-mongo-source
    description: Calculates average price and count of products, grouped by category.
    database: ecommerce
    collection: products
    readOnly: true
    pipelinePayload: |
      [
        {
          "$match": {
            "status": {{json .status_filter}}
          }
        },
        {
          "$group": {
            "_id": "$category",
            "average_price": { "$avg": "$price" },
            "item_count": { "$sum": 1 }
          }
        },
        {
          "$sort": {
            "average_price": -1
          }
        }
      ]
    pipelineParams:
      - name: status_filter
        type: string
        description: The product status to filter by (e.g., "active").
```

## Reference

| **field**       | **type** | **required** | **description**                                                                                                |
|:----------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------------------|
| kind            | string   | true         | Must be `mongodb-aggregate`.                                                                                   |
| source          | string   | true         | The name of the `mongodb` source to use.                                                                       |
| description     | string   | true         | A description of the tool that is passed to the LLM.                                                           |
| database        | string   | true         | The name of the MongoDB database containing the collection.                                                    |
| collection      | string   | true         | The name of the MongoDB collection to run the aggregation on.                                                  |
| pipelinePayload | string   | true         | A JSON array of aggregation stage documents, provided as a string. Uses `{{json .param_name}}` for templating. |
| pipelineParams  | list     | true         | A list of parameter objects that define the variables used in the `pipelinePayload`.                           |
| canonical       | bool     | false        | Determines if the pipeline string is parsed using MongoDB's Canonical or Relaxed Extended JSON format.         |
| readOnly        | bool     | false        | If `true`, the tool will fail if the pipeline contains write stages (`$out` or `$merge`). Defaults to `false`. |

```

--------------------------------------------------------------------------------
/.github/workflows/deploy_previous_version_docs.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: "Deploy Previous Version Docs"

on:
  workflow_dispatch:
    inputs:
      version_tag:
        description: 'The old version tag to build docs for (e.g., v0.15.0)'
        required: true
        type: string

jobs:
  build_and_deploy:
    runs-on: ubuntu-latest
    permissions:
      contents: write

    steps:
      - name: Checkout main branch (for latest templates and theme)
        uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4
        with:
          ref: 'main'
          submodules: 'recursive'
          fetch-depth: 0

      - name: Checkout old content from tag into a temporary directory
        uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4
        with:
          ref: ${{ github.event.inputs.version_tag }}
          path: 'old_version_source' # Checkout into a temp subdir
          # Sparse checkout to only get the content directory
          sparse-checkout: |
            docs

      - name: Replace content with old version
        run: |
          # Remove the current content directory from the main branch checkout
          rm -rf docs/
          # Move the old content directory into place
          mv ./old_version_source/docs docs

      - name: Setup Hugo and Node
        uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
        with:
          hugo-version: "0.145.0"
          extended: true
      - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
        with:
          node-version: "22"

      - name: Install Dependencies
        run: npm ci
        working-directory: .hugo

      - name: Build Hugo Site for Archived Version
        run: hugo --minify
        working-directory: .hugo
        env:
          HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/${{ github.event.inputs.version_tag }}/
          HUGO_RELATIVEURLS: false

      - name: Deploy to gh-pages
        uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
        with:
          github_token: ${{ secrets.GITHUB_TOKEN }}
          publish_dir: .hugo/public
          publish_branch: versioned-gh-pages
          destination_dir: ./${{ github.event.inputs.version_tag }}
          keep_files: true
          allow_empty_commit: true
          commit_message: "docs(backport): deploy docs for ${{ github.event.inputs.version_tag }}"

      - name: Clean Build Directory
        run: rm -rf .hugo/public

      - name: Build Hugo Site
        run: hugo --minify
        working-directory: .hugo
        env:
          HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/
          HUGO_RELATIVEURLS: false

      - name: Deploy to root
        uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
        with:
          github_token: ${{ secrets.GITHUB_TOKEN }}
          publish_dir: .hugo/public
          publish_branch: versioned-gh-pages
          keep_files: true
          allow_empty_commit: true
          commit_message: "deploy: docs to root for ${{ github.event.inputs.version_tag }}"
```

--------------------------------------------------------------------------------
/internal/sources/http/http.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//	http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package http

import (
	"context"
	"crypto/tls"
	"fmt"
	"net/http"
	"net/url"
	"time"

	"github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/util"
	"go.opentelemetry.io/otel/trace"
)

const SourceKind string = "http"

// validate interface
var _ sources.SourceConfig = Config{}

func init() {
	if !sources.Register(SourceKind, newConfig) {
		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
	actual := Config{Name: name, Timeout: "30s"} // Default timeout
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type Config struct {
	Name                   string            `yaml:"name" validate:"required"`
	Kind                   string            `yaml:"kind" validate:"required"`
	BaseURL                string            `yaml:"baseUrl"`
	Timeout                string            `yaml:"timeout"`
	DefaultHeaders         map[string]string `yaml:"headers"`
	QueryParams            map[string]string `yaml:"queryParams"`
	DisableSslVerification bool              `yaml:"disableSslVerification"`
}

func (r Config) SourceConfigKind() string {
	return SourceKind
}

// Initialize initializes an HTTP Source instance.
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
	duration, err := time.ParseDuration(r.Timeout)
	if err != nil {
		return nil, fmt.Errorf("unable to parse Timeout string as time.Duration: %s", err)
	}

	tr := &http.Transport{}

	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
	}

	if r.DisableSslVerification {
		tr.TLSClientConfig = &tls.Config{
			InsecureSkipVerify: true,
		}

		logger.WarnContext(ctx, "Insecure HTTP is enabled for HTTP source %s. TLS certificate verification is skipped.\n", r.Name)
	}

	client := http.Client{
		Timeout:   duration,
		Transport: tr,
	}

	// Validate BaseURL
	_, err = url.ParseRequestURI(r.BaseURL)
	if err != nil {
		return nil, fmt.Errorf("failed to parse BaseUrl %v", err)
	}

	ua, err := util.UserAgentFromContext(ctx)
	if err != nil {
		fmt.Printf("Error in User Agent retrieval: %s", err)
	}
	if r.DefaultHeaders == nil {
		r.DefaultHeaders = make(map[string]string)
	}
	if existingUA, ok := r.DefaultHeaders["User-Agent"]; ok {
		ua = ua + " " + existingUA
	}
	r.DefaultHeaders["User-Agent"] = ua

	s := &Source{
		Name:           r.Name,
		Kind:           SourceKind,
		BaseURL:        r.BaseURL,
		DefaultHeaders: r.DefaultHeaders,
		QueryParams:    r.QueryParams,
		Client:         &client,
	}
	return s, nil

}

var _ sources.Source = &Source{}

type Source struct {
	Name           string            `yaml:"name"`
	Kind           string            `yaml:"kind"`
	BaseURL        string            `yaml:"baseUrl"`
	DefaultHeaders map[string]string `yaml:"headers"`
	QueryParams    map[string]string `yaml:"queryParams"`
	Client         *http.Client
}

func (s *Source) SourceKind() string {
	return SourceKind
}

```

--------------------------------------------------------------------------------
/internal/sources/mssql/mssql_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mssql_test

import (
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources/mssql"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlMssql(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "basic example",
			in: `
			sources:
				my-mssql-instance:
					kind: mssql
					host: 0.0.0.0
					port: my-port
					database: my_db
					user: my_user
					password: my_pass
			`,
			want: server.SourceConfigs{
				"my-mssql-instance": mssql.Config{
					Name:     "my-mssql-instance",
					Kind:     mssql.SourceKind,
					Host:     "0.0.0.0",
					Port:     "my-port",
					Database: "my_db",
					User:     "my_user",
					Password: "my_pass",
				},
			},
		},
		{
			desc: "with encrypt field",
			in: `
			sources:
				my-mssql-instance:
					kind: mssql
					host: 0.0.0.0
					port: my-port
					database: my_db
					user: my_user
					password: my_pass
					encrypt: strict
			`,
			want: server.SourceConfigs{
				"my-mssql-instance": mssql.Config{
					Name:     "my-mssql-instance",
					Kind:     mssql.SourceKind,
					Host:     "0.0.0.0",
					Port:     "my-port",
					Database: "my_db",
					User:     "my_user",
					Password: "my_pass",
					Encrypt:  "strict",
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if !cmp.Equal(tc.want, got.Sources) {
				t.Fatalf("incorrect psarse: want %v, got %v", tc.want, got.Sources)
			}
		})
	}
}

func TestFailParseFromYaml(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "extra field",
			in: `
			sources:
				my-mssql-instance:
					kind: mssql
					host: 0.0.0.0
					port: my-port
					database: my_db
					user: my_user
					password: my_pass
					foo: bar
			`,
			err: "unable to parse source \"my-mssql-instance\" as \"mssql\": [2:1] unknown field \"foo\"\n   1 | database: my_db\n>  2 | foo: bar\n       ^\n   3 | host: 0.0.0.0\n   4 | kind: mssql\n   5 | password: my_pass\n   6 | ",
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-mssql-instance:
					kind: mssql
					host: 0.0.0.0
					port: my-port
					database: my_db
					user: my_user
			`,
			err: "unable to parse source \"my-mssql-instance\" as \"mssql\": Key: 'Config.Password' Error:Field validation for 'Password' failed on the 'required' tag",
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if errStr != tc.err {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/internal/tools/mongodb/mongodbaggregate/mongodbaggregate_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mongodbaggregate_test

import (
	"strings"
	"testing"

	"github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

func TestParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: mongodb-aggregate
					source: my-instance
					description: some description
					database: test_db
					collection: test_coll
					readOnly: true
					pipelinePayload: |
					    [{ $match: { name: {{json .name}} }}]
					pipelineParams:
                        - name: name 
                          type: string
                          description: small description
			`,
			want: server.ToolConfigs{
				"example_tool": mongodbaggregate.Config{
					Name:            "example_tool",
					Kind:            "mongodb-aggregate",
					Source:          "my-instance",
					AuthRequired:    []string{},
					Database:        "test_db",
					Collection:      "test_coll",
					Description:     "some description",
					PipelinePayload: "[{ $match: { name: {{json .name}} }}]\n",
					PipelineParams: tools.Parameters{
						&tools.StringParameter{
							CommonParameter: tools.CommonParameter{
								Name: "name",
								Type: "string",
								Desc: "small description",
							},
						},
					},
					ReadOnly: true,
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func TestFailParseFromYamlMongoQuery(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "Invalid method",
			in: `
			tools:
				example_tool:
					kind: mongodb-aggregate
					source: my-instance
					description: some description
					collection: test_coll
					pipelinePayload: |
					  [{ $match: { name : {{json .name}} }}]
			`,
			err: `unable to parse tool "example_tool" as kind "mongodb-aggregate"`,
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if !strings.Contains(errStr, tc.err) {
				t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
			}
		})
	}

}

```

--------------------------------------------------------------------------------
/internal/sources/redis/redis_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package redis_test

import (
	"strings"
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/sources/redis"
	"github.com/googleapis/genai-toolbox/internal/testutils"
)

func TestParseFromYamlRedis(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		want server.SourceConfigs
	}{
		{
			desc: "default setting",
			in: `
			sources:
				my-redis-instance:
					kind: redis
					address:
					  - 127.0.0.1
			`,
			want: server.SourceConfigs{
				"my-redis-instance": redis.Config{
					Name:           "my-redis-instance",
					Kind:           redis.SourceKind,
					Address:        []string{"127.0.0.1"},
					ClusterEnabled: false,
					UseGCPIAM:      false,
				},
			},
		},
		{
			desc: "advanced example",
			in: `
			sources:
				my-redis-instance:
					kind: redis
					address:
					  - 127.0.0.1
					password: my-pass
					database: 1
					useGCPIAM: true
					clusterEnabled: true
			`,
			want: server.SourceConfigs{
				"my-redis-instance": redis.Config{
					Name:           "my-redis-instance",
					Kind:           redis.SourceKind,
					Address:        []string{"127.0.0.1"},
					Password:       "my-pass",
					Database:       1,
					ClusterEnabled: true,
					UseGCPIAM:      true,
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if !cmp.Equal(tc.want, got.Sources) {
				t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
			}
		})
	}

}

func TestFailParseFromYaml(t *testing.T) {
	tcs := []struct {
		desc string
		in   string
		err  string
	}{
		{
			desc: "invalid database",
			in: `
			sources:
				my-redis-instance:
					kind: redis
					project: my-project
					address:
					  - 127.0.0.1
					password: my-pass
					database: data
			`,
			err: "cannot unmarshal string into Go struct field .Sources of type int",
		},
		{
			desc: "extra field",
			in: `
			sources:
				my-redis-instance:
					kind: redis
					project: my-project
					address:
					  - 127.0.0.1
					password: my-pass
					database: 1
			`,
			err: "unable to parse source \"my-redis-instance\" as \"redis\": [6:1] unknown field \"project\"",
		},
		{
			desc: "missing required field",
			in: `
			sources:
				my-redis-instance:
					kind: redis
			`,
			err: "unable to parse source \"my-redis-instance\" as \"redis\": Key: 'Config.Address' Error:Field validation for 'Address' failed on the 'required' tag",
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Sources server.SourceConfigs `yaml:"sources"`
			}{}
			// Parse contents
			err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
			if err == nil {
				t.Fatalf("expect parsing to fail")
			}
			errStr := err.Error()
			if !strings.Contains(errStr, tc.err) {
				t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/.github/workflows/docs_preview_deploy.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: "docs"

permissions:
  contents: write
  pull-requests: write
  
# This Workflow depends on 'github.event.number',
# not compatible with branch or manual triggers.
on:
  pull_request:
    # Sync with github_actions_preview_fallback.yml on.pull_request.paths-ignore
    paths:
      - 'docs/**'
      - 'github/workflows/docs**'
      - '.hugo/**'
  pull_request_target:
    types: [labeled]
    paths:
      - 'docs/**'
      - 'github/workflows/docs**'
      - '.hugo/**'

jobs:
  preview:
    # run job on proper workflow event triggers (skip job for pull_request event
    # from forks and only run pull_request_target for "docs: deploy-preview"
    # label)
    if: "${{ (github.event.action != 'labeled' && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) || github.event.label.name == 'docs: deploy-preview' }}"
    runs-on: ubuntu-24.04
    defaults:
      run:
        working-directory: .hugo
    concurrency:
      # Shared concurrency group wih preview cleanup.
      group: "preview-${{ github.event.number }}"
      cancel-in-progress: true
    steps:
      - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
        with:
          # Checkout the PR's HEAD commit (supports forks).
          ref: ${{ github.event.pull_request.head.sha }}
          fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod

      - name: Setup Hugo
        uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
        with:
          hugo-version: "0.145.0"
          extended: true

      - name: Setup Node
        uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5
        with:
          node-version: "22"

      - name: Cache dependencies
        uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
        with:
          path: ~/.npm
          key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
          restore-keys: |
            ${{ runner.os }}-node-

      - run: npm ci
      - run: hugo --minify
        env:
          HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/
          HUGO_ENVIRONMENT: preview
          HUGO_RELATIVEURLS: false

      - name: Deploy
        uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
        with:
          github_token: ${{ secrets.GITHUB_TOKEN }}
          publish_dir: .hugo/public
          publish_branch: versioned-gh-pages
          destination_dir: ./previews/PR-${{ github.event.number }}
          commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"

      - name: Comment
        uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
        with:
          script: |
            github.rest.issues.createComment({
              issue_number: context.payload.number,
              owner: context.repo.owner,
              repo: context.repo.repo,
              body: "🔎 Preview at https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/previews/PR-${{ github.event.number }}/"
            })

```
Page 6/33FirstPrevNextLast