#
tokens: 49819/50000 16/819 files (page 18/36)
lines: off (toggle) GitHub
raw markdown copy
This is page 18 of 36. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── serverless-spark.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-create-project-file.md
│       │       │   ├── looker-delete-project-file.md
│       │       │   ├── looker-dev-mode.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-get-project-file.md
│       │       │   ├── looker-get-project-files.md
│       │       │   ├── looker-get-projects.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   ├── looker-run-look.md
│       │       │   └── looker-update-project-file.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-tables.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── serverless-spark
│       │       │   ├── _index.md
│       │       │   ├── serverless-spark-get-batch.md
│       │       │   └── serverless-spark-list-batches.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── serverless-spark.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   └── bigquery.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlessspark_test.go
│   │   │   └── serverlessspark.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookercreateprojectfile
│   │   │   │   ├── lookercreateprojectfile_test.go
│   │   │   │   └── lookercreateprojectfile.go
│   │   │   ├── lookerdeleteprojectfile
│   │   │   │   ├── lookerdeleteprojectfile_test.go
│   │   │   │   └── lookerdeleteprojectfile.go
│   │   │   ├── lookerdevmode
│   │   │   │   ├── lookerdevmode_test.go
│   │   │   │   └── lookerdevmode.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookergetprojectfile
│   │   │   │   ├── lookergetprojectfile_test.go
│   │   │   │   └── lookergetprojectfile.go
│   │   │   ├── lookergetprojectfiles
│   │   │   │   ├── lookergetprojectfiles_test.go
│   │   │   │   └── lookergetprojectfiles.go
│   │   │   ├── lookergetprojects
│   │   │   │   ├── lookergetprojects_test.go
│   │   │   │   └── lookergetprojects.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   ├── lookerrunlook
│   │   │   │   ├── lookerrunlook_test.go
│   │   │   │   └── lookerrunlook.go
│   │   │   └── lookerupdateprojectfile
│   │   │       ├── lookerupdateprojectfile_test.go
│   │   │       └── lookerupdateprojectfile.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlesssparkgetbatch
│   │   │   │   ├── serverlesssparkgetbatch_test.go
│   │   │   │   └── serverlesssparkgetbatch.go
│   │   │   └── serverlesssparklistbatches
│   │   │       ├── serverlesssparklistbatches_test.go
│   │   │       └── serverlesssparklistbatches.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── serverlessspark
    │   └── serverless_spark_integration_test.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/internal/tools/sqlite/sqliteexecutesql/sqliteexecutesql_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package sqliteexecutesql_test

import (
	"context"
	"database/sql"
	"reflect"
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
	_ "modernc.org/sqlite"
)

func TestParseFromYamlExecuteSql(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: sqlite-execute-sql
					source: my-instance
					description: some description
					authRequired:
						- my-google-auth-service
						- other-auth-service
			`,
			want: server.ToolConfigs{
				"example_tool": sqliteexecutesql.Config{
					Name:         "example_tool",
					Kind:         "sqlite-execute-sql",
					Source:       "my-instance",
					Description:  "some description",
					AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func setupTestDB(t *testing.T) *sql.DB {
	db, err := sql.Open("sqlite", ":memory:")
	if err != nil {
		t.Fatalf("Failed to open in-memory database: %v", err)
	}
	return db
}

func TestTool_Invoke(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}

	type fields struct {
		Name         string
		Kind         string
		AuthRequired []string
		Parameters   tools.Parameters
		DB           *sql.DB
	}
	type args struct {
		ctx         context.Context
		params      tools.ParamValues
		accessToken tools.AccessToken
	}
	tests := []struct {
		name    string
		fields  fields
		args    args
		want    any
		wantErr bool
	}{
		{
			name: "create table",
			fields: fields{
				DB: setupTestDB(t),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, age INTEGER)"},
				},
			},
			want:    nil,
			wantErr: false,
		},
		{
			name: "insert data",
			fields: fields{
				DB: setupTestDB(t),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, age INTEGER); INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30), (2, 'Bob', 25)"},
				},
			},
			want:    nil,
			wantErr: false,
		},
		{
			name: "select data",
			fields: fields{
				DB: func() *sql.DB {
					db := setupTestDB(t)
					if _, err := db.Exec("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, age INTEGER); INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30), (2, 'Bob', 25)"); err != nil {
						t.Fatalf("Failed to set up database for select: %v", err)
					}
					return db
				}(),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "SELECT * FROM users"},
				},
			},
			want: []any{
				map[string]any{"id": int64(1), "name": "Alice", "age": int64(30)},
				map[string]any{"id": int64(2), "name": "Bob", "age": int64(25)},
			},
			wantErr: false,
		},
		{
			name: "drop table",
			fields: fields{
				DB: func() *sql.DB {
					db := setupTestDB(t)
					if _, err := db.Exec("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, age INTEGER)"); err != nil {
						t.Fatalf("Failed to set up database for drop: %v", err)
					}
					return db
				}(),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "DROP TABLE users"},
				},
			},
			want:    nil,
			wantErr: false,
		},
		{
			name: "invalid sql",
			fields: fields{
				DB: setupTestDB(t),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "SELECT * FROM non_existent_table"},
				},
			},
			want:    nil,
			wantErr: true,
		},
		{
			name: "empty sql",
			fields: fields{
				DB: setupTestDB(t),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: ""},
				},
			},
			want:    nil,
			wantErr: true,
		},
		{
			name: "data types",
			fields: fields{
				DB: func() *sql.DB {
					db := setupTestDB(t)
					if _, err := db.Exec("CREATE TABLE data_types (id INTEGER PRIMARY KEY, null_col TEXT, blob_col BLOB)"); err != nil {
						t.Fatalf("Failed to set up database for data types: %v", err)
					}
					if _, err := db.Exec("INSERT INTO data_types (id, null_col, blob_col) VALUES (1, NULL, ?)", []byte{1, 2, 3}); err != nil {
						t.Fatalf("Failed to insert data for data types: %v", err)
					}
					return db
				}(),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "SELECT * FROM data_types"},
				},
			},
			want: []any{
				map[string]any{"id": int64(1), "null_col": nil, "blob_col": []byte{1, 2, 3}},
			},
			wantErr: false,
		},
		{
			name: "join operation",
			fields: fields{
				DB: func() *sql.DB {
					db := setupTestDB(t)
					if _, err := db.Exec("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, age INTEGER)"); err != nil {
						t.Fatalf("Failed to set up database for join: %v", err)
					}
					if _, err := db.Exec("INSERT INTO users (id, name, age) VALUES (1, 'Alice', 30), (2, 'Bob', 25)"); err != nil {
						t.Fatalf("Failed to insert data for join: %v", err)
					}
					if _, err := db.Exec("CREATE TABLE orders (id INTEGER PRIMARY KEY, user_id INTEGER, item TEXT)"); err != nil {
						t.Fatalf("Failed to set up database for join: %v", err)
					}
					if _, err := db.Exec("INSERT INTO orders (id, user_id, item) VALUES (1, 1, 'Laptop'), (2, 2, 'Keyboard')"); err != nil {
						t.Fatalf("Failed to insert data for join: %v", err)
					}
					return db
				}(),
			},
			args: args{
				ctx: ctx,
				params: []tools.ParamValue{
					{Name: "sql", Value: "SELECT u.name, o.item FROM users u JOIN orders o ON u.id = o.user_id"},
				},
			},
			want: []any{
				map[string]any{"name": "Alice", "item": "Laptop"},
				map[string]any{"name": "Bob", "item": "Keyboard"},
			},
			wantErr: false,
		},
	}
	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			tr := &sqliteexecutesql.Tool{
				Name:         tt.fields.Name,
				Kind:         tt.fields.Kind,
				AuthRequired: tt.fields.AuthRequired,
				Parameters:   tt.fields.Parameters,
				DB:           tt.fields.DB,
			}
			got, err := tr.Invoke(tt.args.ctx, tt.args.params, tt.args.accessToken)
			if (err != nil) != tt.wantErr {
				t.Errorf("Tool.Invoke() error = %v, wantErr %v", err, tt.wantErr)
				return
			}
			isEqual := false
			if got != nil && len(got.([]any)) == 0 && len(tt.want.([]any)) == 0 {
				isEqual = true // Special case for empty slices, since DeepEqual returns false
			} else {
				isEqual = reflect.DeepEqual(got, tt.want)
			}

			if !isEqual {
				t.Errorf("Tool.Invoke() = %v, want %v", got, tt.want)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/docs/en/how-to/deploy_toolbox.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Deploy to Cloud Run"
type: docs
weight: 3
description: >
  How to set up and configure Toolbox to run on Cloud Run.
---


## Before you begin

1. [Install](https://cloud.google.com/sdk/docs/install) the Google Cloud CLI.

1. Set the PROJECT_ID environment variable:

    ```bash
    export PROJECT_ID="my-project-id"
    ```

1. Initialize gcloud CLI:

    ```bash
    gcloud init
    gcloud config set project $PROJECT_ID
    ```

1. Make sure you've set up and initialized your database.

1. You must have the following APIs enabled:

    ```bash
    gcloud services enable run.googleapis.com \
                           cloudbuild.googleapis.com \
                           artifactregistry.googleapis.com \
                           iam.googleapis.com \
                           secretmanager.googleapis.com

    ```

1. To create an IAM account, you must have the following IAM permissions (or
   roles):
    - Create Service Account role (roles/iam.serviceAccountCreator)

1. To create a secret, you must have the following roles:
    - Secret Manager Admin role (roles/secretmanager.admin)

1. To deploy to Cloud Run, you must have the following set of roles:
    - Cloud Run Developer (roles/run.developer)
    - Service Account User role (roles/iam.serviceAccountUser)

{{< notice note >}}
If you are using sources that require VPC-access (such as
AlloyDB or Cloud SQL over private IP), make sure your Cloud Run service and the
database are in the same VPC network.
{{< /notice >}}

## Create a service account

1. Create a backend service account if you don't already have one:

    ```bash
    gcloud iam service-accounts create toolbox-identity
    ```

1. Grant permissions to use secret manager:

    ```bash
    gcloud projects add-iam-policy-binding $PROJECT_ID \
        --member serviceAccount:toolbox-identity@$PROJECT_ID.iam.gserviceaccount.com \
        --role roles/secretmanager.secretAccessor
    ```

1. Grant additional permissions to the service account that are specific to the
   source, e.g.:
    - [AlloyDB for PostgreSQL](../resources/sources/alloydb-pg.md#iam-permissions)
    - [Cloud SQL for PostgreSQL](../resources/sources/cloud-sql-pg.md#iam-permissions)

## Configure `tools.yaml` file

Create a `tools.yaml` file that contains your configuration for Toolbox. For
details, see the
[configuration](../resources/sources/)
section.

## Deploy to Cloud Run

1. Upload `tools.yaml` as a secret:

    ```bash
    gcloud secrets create tools --data-file=tools.yaml
    ```

    If you already have a secret and want to update the secret version, execute
    the following:

    ```bash
    gcloud secrets versions add tools --data-file=tools.yaml
    ```

1. Set an environment variable to the container image that you want to use for
   cloud run:

    ```bash
    export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
    ```

{{< notice note >}}  
**The `$PORT` Environment Variable**  
Google Cloud Run dictates the port your application must listen on by setting
the `$PORT` environment variable inside your container. This value defaults to
**8080**. Your application's `--port` argument **must** be set to listen on this
port. If there is a mismatch, the container will fail to start and the
deployment will time out.  
{{< /notice >}}

1. Deploy Toolbox to Cloud Run using the following command:

    ```bash
    gcloud run deploy toolbox \
        --image $IMAGE \
        --service-account toolbox-identity \
        --region us-central1 \
        --set-secrets "/app/tools.yaml=tools:latest" \
        --args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080"
        # --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
    ```

    If you are using a VPC network, use the command below:

    ```bash
    gcloud run deploy toolbox \
        --image $IMAGE \
        --service-account toolbox-identity \
        --region us-central1 \
        --set-secrets "/app/tools.yaml=tools:latest" \
        --args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080" \
        # TODO(dev): update the following to match your VPC if necessary
        --network default \
        --subnet default
        # --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
    ```

## Connecting with Toolbox Client SDK

You can connect to Toolbox Cloud Run instances directly through the SDK.

1. [Set up `Cloud Run Invoker` role
   access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals)
   to your Cloud Run service.

1. (Only for local runs) Set up [Application Default
   Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
   for the principal you set up the `Cloud Run Invoker` role access to.

1. Run the following to retrieve a non-deterministic URL for the cloud run service:

    ```bash
    gcloud run services describe toolbox --format 'value(status.url)'
    ```

1. Import and initialize the toolbox client with the URL retrieved above:

    {{< tabpane persist=header >}}
{{< tab header="Python" lang="python" >}}
from toolbox_core import ToolboxClient, auth_methods

# Replace with the Cloud Run service URL generated in the previous step.
URL = "https://cloud-run-url.app"

auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method

async with ToolboxClient(
    URL,
    client_headers={"Authorization": auth_token_provider},
) as toolbox:
{{< /tab >}}
{{< tab header="Javascript" lang="javascript" >}}
import { ToolboxClient } from '@toolbox-sdk/core';
import {getGoogleIdToken} from '@toolbox-sdk/core/auth'

// Replace with the Cloud Run service URL generated in the previous step.
const URL = 'http://127.0.0.1:5000';
const authTokenProvider = () => getGoogleIdToken(URL);

const client = new ToolboxClient(URL, null, {"Authorization": authTokenProvider});
{{< /tab >}}
{{< tab header="Go" lang="go" >}}
import "github.com/googleapis/mcp-toolbox-sdk-go/core"

func main() {
    // Replace with the Cloud Run service URL generated in the previous step.
    URL := "http://127.0.0.1:5000"
    auth_token_provider, err := core.GetGoogleIDToken(ctx, URL)
    if err != nil {
        log.Fatalf("Failed to fetch token %v", err)
    }
    toolboxClient, err := core.NewToolboxClient(
        URL,
        core.WithClientHeaderString("Authorization", auth_token_provider))
    if err != nil {
        log.Fatalf("Failed to create Toolbox client: %v", err)
    }
}
{{< /tab >}}
{{< /tabpane >}}


Now, you can use this client to connect to the deployed Cloud Run instance!

## Troubleshooting

{{< notice note >}}  
For any deployment or runtime error, the best first step is to check the logs
for your service in the Google Cloud Console's Cloud Run section. They often
contain the specific error message needed to diagnose the problem.
{{< /notice >}}

* **Deployment Fails with "Container failed to start":** This is almost always
    caused by a port mismatch. Ensure your container's `--port` argument is set to
    `8080` to match the `$PORT` environment variable provided by Cloud Run.

* **Client Receives Permission Denied Error (401 or 403):** If your client
  application (e.g., your local SDK) gets a `401 Unauthorized` or `403
  Forbidden` error when trying to call your Cloud Run service, it means the
  client is not properly authenticated as an invoker.
    * Ensure the user or service account calling the service has the **Cloud Run
      Invoker** (`roles/run.invoker`) IAM role.
    * If running locally, make sure your Application Default Credentials are set
      up correctly by running `gcloud auth application-default login`.

* **Service Fails to Access Secrets (in logs):** If your application starts but
  the logs show errors like "permission denied" when trying to access Secret
  Manager, it means the Toolbox service account is missing permissions.
    * Ensure the `toolbox-identity` service account has the **Secret Manager
      Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.

```

--------------------------------------------------------------------------------
/internal/tools/common_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package tools_test

import (
	"strings"
	"testing"
	"text/template"

	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/tools"
)

func TestPopulateTemplate(t *testing.T) {
	tcs := []struct {
		name           string
		templateName   string
		templateString string
		data           map[string]any
		want           string
		wantErr        bool
	}{
		{
			name:           "simple string substitution",
			templateName:   "test",
			templateString: "Hello {{.name}}!",
			data:           map[string]any{"name": "World"},
			want:           "Hello World!",
			wantErr:        false,
		},
		{
			name:           "multiple substitutions",
			templateName:   "test",
			templateString: "{{.greeting}} {{.name}}, you are {{.age}} years old",
			data:           map[string]any{"greeting": "Hello", "name": "Alice", "age": 30},
			want:           "Hello Alice, you are 30 years old",
			wantErr:        false,
		},
		{
			name:           "empty template",
			templateName:   "test",
			templateString: "",
			data:           map[string]any{},
			want:           "",
			wantErr:        false,
		},
		{
			name:           "no substitutions",
			templateName:   "test",
			templateString: "Plain text without templates",
			data:           map[string]any{},
			want:           "Plain text without templates",
			wantErr:        false,
		},
		{
			name:           "invalid template syntax",
			templateName:   "test",
			templateString: "{{.name",
			data:           map[string]any{"name": "World"},
			want:           "",
			wantErr:        true,
		},
		{
			name:           "missing field",
			templateName:   "test",
			templateString: "{{.missing}}",
			data:           map[string]any{"name": "World"},
			want:           "<no value>",
			wantErr:        false,
		},
		{
			name:           "invalid function call",
			templateName:   "test",
			templateString: "{{.name.invalid}}",
			data:           map[string]any{"name": "World"},
			want:           "",
			wantErr:        true,
		},
	}

	for _, tc := range tcs {
		t.Run(tc.name, func(t *testing.T) {
			got, err := tools.PopulateTemplate(tc.templateName, tc.templateString, tc.data)
			if tc.wantErr {
				if err == nil {
					t.Fatalf("expected error, got nil")
				}
				return
			}
			if err != nil {
				t.Fatalf("unexpected error: %s", err)
			}
			if diff := cmp.Diff(tc.want, got); diff != "" {
				t.Fatalf("incorrect result (-want +got):\n%s", diff)
			}
		})
	}
}

func TestPopulateTemplateWithFunc(t *testing.T) {
	// Custom function for testing
	customFuncs := template.FuncMap{
		"upper": strings.ToUpper,
		"add": func(a, b int) int {
			return a + b
		},
	}

	tcs := []struct {
		name           string
		templateName   string
		templateString string
		data           map[string]any
		funcMap        template.FuncMap
		want           string
		wantErr        bool
	}{
		{
			name:           "with custom upper function",
			templateName:   "test",
			templateString: "{{upper .text}}",
			data:           map[string]any{"text": "hello"},
			funcMap:        customFuncs,
			want:           "HELLO",
			wantErr:        false,
		},
		{
			name:           "with custom add function",
			templateName:   "test",
			templateString: "Result: {{add .x .y}}",
			data:           map[string]any{"x": 5, "y": 3},
			funcMap:        customFuncs,
			want:           "Result: 8",
			wantErr:        false,
		},
		{
			name:           "nil funcMap",
			templateName:   "test",
			templateString: "Hello {{.name}}",
			data:           map[string]any{"name": "World"},
			funcMap:        nil,
			want:           "Hello World",
			wantErr:        false,
		},
		{
			name:           "combine custom function with regular substitution",
			templateName:   "test",
			templateString: "{{upper .greeting}} {{.name}}!",
			data:           map[string]any{"greeting": "hello", "name": "Alice"},
			funcMap:        customFuncs,
			want:           "HELLO Alice!",
			wantErr:        false,
		},
		{
			name:           "undefined function",
			templateName:   "test",
			templateString: "{{undefined .text}}",
			data:           map[string]any{"text": "hello"},
			funcMap:        nil,
			want:           "",
			wantErr:        true,
		},
		{
			name:           "wrong number of arguments",
			templateName:   "test",
			templateString: "{{upper}}",
			data:           map[string]any{},
			funcMap:        template.FuncMap{"upper": strings.ToUpper},
			want:           "",
			wantErr:        true,
		},
	}

	for _, tc := range tcs {
		t.Run(tc.name, func(t *testing.T) {
			got, err := tools.PopulateTemplateWithFunc(tc.templateName, tc.templateString, tc.data, tc.funcMap)
			if tc.wantErr {
				if err == nil {
					t.Fatalf("expected error, got nil")
				}
				return
			}
			if err != nil {
				t.Fatalf("unexpected error: %s", err)
			}
			if diff := cmp.Diff(tc.want, got); diff != "" {
				t.Fatalf("incorrect result (-want +got):\n%s", diff)
			}
		})
	}
}

func TestPopulateTemplateWithJSON(t *testing.T) {
	tcs := []struct {
		name           string
		templateName   string
		templateString string
		data           map[string]any
		want           string
		wantErr        bool
	}{
		{
			name:           "json string",
			templateName:   "test",
			templateString: "Data: {{json .value}}",
			data:           map[string]any{"value": "hello"},
			want:           `Data: "hello"`,
			wantErr:        false,
		},
		{
			name:           "json number",
			templateName:   "test",
			templateString: "Number: {{json .num}}",
			data:           map[string]any{"num": 42},
			want:           "Number: 42",
			wantErr:        false,
		},
		{
			name:           "json boolean",
			templateName:   "test",
			templateString: "Bool: {{json .flag}}",
			data:           map[string]any{"flag": true},
			want:           "Bool: true",
			wantErr:        false,
		},
		{
			name:           "json array",
			templateName:   "test",
			templateString: "Array: {{json .items}}",
			data:           map[string]any{"items": []any{"a", "b", "c"}},
			want:           `Array: ["a","b","c"]`,
			wantErr:        false,
		},
		{
			name:           "json object",
			templateName:   "test",
			templateString: "Object: {{json .obj}}",
			data:           map[string]any{"obj": map[string]any{"name": "Alice", "age": 30}},
			want:           `Object: {"age":30,"name":"Alice"}`,
			wantErr:        false,
		},
		{
			name:           "json null",
			templateName:   "test",
			templateString: "Null: {{json .nullValue}}",
			data:           map[string]any{"nullValue": nil},
			want:           "Null: null",
			wantErr:        false,
		},
		{
			name:           "combine json with regular substitution",
			templateName:   "test",
			templateString: "User {{.name}} has data: {{json .data}}",
			data:           map[string]any{"name": "Bob", "data": map[string]any{"id": 123}},
			want:           `User Bob has data: {"id":123}`,
			wantErr:        false,
		},
		{
			name:           "missing field for json",
			templateName:   "test",
			templateString: "{{json .missing}}",
			data:           map[string]any{},
			want:           "null",
			wantErr:        false,
		},
	}

	for _, tc := range tcs {
		t.Run(tc.name, func(t *testing.T) {
			got, err := tools.PopulateTemplateWithJSON(tc.templateName, tc.templateString, tc.data)
			if tc.wantErr {
				if err == nil {
					t.Fatalf("expected error, got nil")
				}
				return
			}
			if err != nil {
				t.Fatalf("unexpected error: %s", err)
			}
			if diff := cmp.Diff(tc.want, got); diff != "" {
				t.Fatalf("incorrect result (-want +got):\n%s", diff)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/tests/cloudsqlmysql/cloud_sql_mysql_create_instance_integration_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqlmysql_test

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"net/http/httptest"
	"net/url"
	"reflect"
	"regexp"
	"strings"
	"testing"
	"time"

	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/tests"
	"google.golang.org/api/sqladmin/v1"
)

var (
	createInstanceToolKind = "cloud-sql-mysql-create-instance"
)

type createInstanceTransport struct {
	transport http.RoundTripper
	url       *url.URL
}

func (t *createInstanceTransport) RoundTrip(req *http.Request) (*http.Response, error) {
	if strings.HasPrefix(req.URL.String(), "https://sqladmin.googleapis.com") {
		req.URL.Scheme = t.url.Scheme
		req.URL.Host = t.url.Host
	}
	return t.transport.RoundTrip(req)
}

type masterHandler struct {
	t *testing.T
}

func (h *masterHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
	if !strings.Contains(r.UserAgent(), "genai-toolbox/") {
		h.t.Errorf("User-Agent header not found")
	}

	var body sqladmin.DatabaseInstance
	if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
		h.t.Fatalf("failed to decode request body: %v", err)
	}

	instanceName := body.Name
	if instanceName == "" {
		http.Error(w, "missing instance name", http.StatusBadRequest)
		return
	}

	var expectedBody sqladmin.DatabaseInstance
	var response any
	var statusCode int

	switch instanceName {
	case "instance1":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p1",
			Name:            "instance1",
			DatabaseVersion: "MYSQL_8_0",
			RootPassword:    "password123",
			Settings: &sqladmin.Settings{
				AvailabilityType: "REGIONAL",
				Edition:          "ENTERPRISE_PLUS",
				Tier:             "db-perf-optimized-N-8",
				DataDiskSizeGb:   250,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op1", "status": "PENDING"}
		statusCode = http.StatusOK
	case "instance2":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p2",
			Name:            "instance2",
			DatabaseVersion: "MYSQL_8_4",
			RootPassword:    "password456",
			Settings: &sqladmin.Settings{
				AvailabilityType: "ZONAL",
				Edition:          "ENTERPRISE_PLUS",
				Tier:             "db-perf-optimized-N-2",
				DataDiskSizeGb:   100,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op2", "status": "RUNNING"}
		statusCode = http.StatusOK
	default:
		http.Error(w, fmt.Sprintf("unhandled instance name: %s", instanceName), http.StatusInternalServerError)
		return
	}

	if expectedBody.Project != body.Project {
		h.t.Errorf("unexpected project: got %q, want %q", body.Project, expectedBody.Project)
	}
	if expectedBody.Name != body.Name {
		h.t.Errorf("unexpected name: got %q, want %q", body.Name, expectedBody.Name)
	}
	if expectedBody.DatabaseVersion != body.DatabaseVersion {
		h.t.Errorf("unexpected databaseVersion: got %q, want %q", body.DatabaseVersion, expectedBody.DatabaseVersion)
	}
	if expectedBody.RootPassword != body.RootPassword {
		h.t.Errorf("unexpected rootPassword: got %q, want %q", body.RootPassword, expectedBody.RootPassword)
	}
	if diff := cmp.Diff(expectedBody.Settings, body.Settings); diff != "" {
		h.t.Errorf("unexpected request body settings (-want +got):\n%s", diff)
	}

	w.Header().Set("Content-Type", "application/json")
	w.WriteHeader(statusCode)
	if err := json.NewEncoder(w).Encode(response); err != nil {
		http.Error(w, err.Error(), http.StatusInternalServerError)
	}
}

func TestCreateInstanceToolEndpoints(t *testing.T) {
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	handler := &masterHandler{t: t}
	server := httptest.NewServer(handler)
	defer server.Close()

	serverURL, err := url.Parse(server.URL)
	if err != nil {
		t.Fatalf("failed to parse server URL: %v", err)
	}

	originalTransport := http.DefaultClient.Transport
	if originalTransport == nil {
		originalTransport = http.DefaultTransport
	}
	http.DefaultClient.Transport = &createInstanceTransport{
		transport: originalTransport,
		url:       serverURL,
	}
	t.Cleanup(func() {
		http.DefaultClient.Transport = originalTransport
	})

	var args []string
	toolsFile := getCreateInstanceToolsConfig()
	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	tcs := []struct {
		name        string
		toolName    string
		body        string
		want        string
		expectError bool
		errorStatus int
	}{
		{
			name:     "successful creation - production",
			toolName: "create-instance-prod",
			body:     `{"project": "p1", "name": "instance1", "databaseVersion": "MYSQL_8_0", "rootPassword": "password123", "editionPreset": "Production"}`,
			want:     `{"name":"op1","status":"PENDING"}`,
		},
		{
			name:     "successful creation - development",
			toolName: "create-instance-dev",
			body:     `{"project": "p2", "name": "instance2", "rootPassword": "password456", "editionPreset": "Development"}`,
			want:     `{"name":"op2","status":"RUNNING"}`,
		},
		{
			name:        "missing required parameter",
			toolName:    "create-instance-prod",
			body:        `{"name": "instance1"}`,
			expectError: true,
			errorStatus: http.StatusBadRequest,
		},
	}

	for _, tc := range tcs {
		tc := tc
		t.Run(tc.name, func(t *testing.T) {
			api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
			req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
			if err != nil {
				t.Fatalf("unable to create request: %s", err)
			}
			req.Header.Add("Content-type", "application/json")
			resp, err := http.DefaultClient.Do(req)
			if err != nil {
				t.Fatalf("unable to send request: %s", err)
			}
			defer resp.Body.Close()

			if tc.expectError {
				if resp.StatusCode != tc.errorStatus {
					bodyBytes, _ := io.ReadAll(resp.Body)
					t.Fatalf("expected status %d but got %d: %s", tc.errorStatus, resp.StatusCode, string(bodyBytes))
				}
				return
			}

			if resp.StatusCode != http.StatusOK {
				bodyBytes, _ := io.ReadAll(resp.Body)
				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
			}

			var result struct {
				Result string `json:"result"`
			}
			if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
				t.Fatalf("failed to decode response: %v", err)
			}

			var got, want map[string]any
			if err := json.Unmarshal([]byte(result.Result), &got); err != nil {
				t.Fatalf("failed to unmarshal result: %v", err)
			}
			if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
				t.Fatalf("failed to unmarshal want: %v", err)
			}

			if !reflect.DeepEqual(got, want) {
				t.Fatalf("unexpected result: got %+v, want %+v", got, want)
			}
		})
	}
}

func getCreateInstanceToolsConfig() map[string]any {
	return map[string]any{
		"sources": map[string]any{
			"my-cloud-sql-source": map[string]any{
				"kind": "cloud-sql-admin",
			},
		},
		"tools": map[string]any{
			"create-instance-prod": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
			"create-instance-dev": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
		},
	}
}

```

--------------------------------------------------------------------------------
/tests/cloudsqlmssql/cloud_sql_mssql_create_instance_integration_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqlmssql_test

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"net/http/httptest"
	"net/url"
	"reflect"
	"regexp"
	"strings"
	"testing"
	"time"

	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/tests"
	"google.golang.org/api/sqladmin/v1"
)

var (
	createInstanceToolKind = "cloud-sql-mssql-create-instance"
)

type createInstanceTransport struct {
	transport http.RoundTripper
	url       *url.URL
}

func (t *createInstanceTransport) RoundTrip(req *http.Request) (*http.Response, error) {
	if strings.HasPrefix(req.URL.String(), "https://sqladmin.googleapis.com") {
		req.URL.Scheme = t.url.Scheme
		req.URL.Host = t.url.Host
	}
	return t.transport.RoundTrip(req)
}

type masterHandler struct {
	t *testing.T
}

func (h *masterHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
	if !strings.Contains(r.UserAgent(), "genai-toolbox/") {
		h.t.Errorf("User-Agent header not found")
	}
	var body sqladmin.DatabaseInstance
	if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
		h.t.Fatalf("failed to decode request body: %v", err)
	}

	instanceName := body.Name
	if instanceName == "" {
		http.Error(w, "missing instance name", http.StatusBadRequest)
		return
	}

	var expectedBody sqladmin.DatabaseInstance
	var response any
	var statusCode int

	switch instanceName {
	case "instance1":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p1",
			Name:            "instance1",
			DatabaseVersion: "SQLSERVER_2022_ENTERPRISE",
			RootPassword:    "password123",
			Settings: &sqladmin.Settings{
				AvailabilityType: "REGIONAL",
				Edition:          "ENTERPRISE",
				Tier:             "db-custom-4-26624",
				DataDiskSizeGb:   250,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op1", "status": "PENDING"}
		statusCode = http.StatusOK
	case "instance2":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p2",
			Name:            "instance2",
			DatabaseVersion: "SQLSERVER_2022_STANDARD",
			RootPassword:    "password456",
			Settings: &sqladmin.Settings{
				AvailabilityType: "ZONAL",
				Edition:          "ENTERPRISE",
				Tier:             "db-custom-2-8192",
				DataDiskSizeGb:   100,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op2", "status": "RUNNING"}
		statusCode = http.StatusOK
	default:
		http.Error(w, fmt.Sprintf("unhandled instance name: %s", instanceName), http.StatusInternalServerError)
		return
	}

	if expectedBody.Project != body.Project {
		h.t.Errorf("unexpected project: got %q, want %q", body.Project, expectedBody.Project)
	}
	if expectedBody.Name != body.Name {
		h.t.Errorf("unexpected name: got %q, want %q", body.Name, expectedBody.Name)
	}
	if expectedBody.DatabaseVersion != body.DatabaseVersion {
		h.t.Errorf("unexpected databaseVersion: got %q, want %q", body.DatabaseVersion, expectedBody.DatabaseVersion)
	}
	if expectedBody.RootPassword != body.RootPassword {
		h.t.Errorf("unexpected rootPassword: got %q, want %q", body.RootPassword, expectedBody.RootPassword)
	}
	if diff := cmp.Diff(expectedBody.Settings, body.Settings); diff != "" {
		h.t.Errorf("unexpected request body settings (-want +got):\n%s", diff)
	}

	w.Header().Set("Content-Type", "application/json")
	w.WriteHeader(statusCode)
	if err := json.NewEncoder(w).Encode(response); err != nil {
		http.Error(w, err.Error(), http.StatusInternalServerError)
	}
}

func TestCreateInstanceToolEndpoints(t *testing.T) {
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	handler := &masterHandler{t: t}
	server := httptest.NewServer(handler)
	defer server.Close()

	serverURL, err := url.Parse(server.URL)
	if err != nil {
		t.Fatalf("failed to parse server URL: %v", err)
	}

	originalTransport := http.DefaultClient.Transport
	if originalTransport == nil {
		originalTransport = http.DefaultTransport
	}
	http.DefaultClient.Transport = &createInstanceTransport{
		transport: originalTransport,
		url:       serverURL,
	}
	t.Cleanup(func() {
		http.DefaultClient.Transport = originalTransport
	})

	var args []string
	toolsFile := getCreateInstanceToolsConfig()
	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	tcs := []struct {
		name        string
		toolName    string
		body        string
		want        string
		expectError bool
		errorStatus int
	}{
		{
			name:     "successful creation - production",
			toolName: "create-instance-prod",
			body:     `{"project": "p1", "name": "instance1", "databaseVersion": "SQLSERVER_2022_ENTERPRISE", "rootPassword": "password123", "editionPreset": "Production"}`,
			want:     `{"name":"op1","status":"PENDING"}`,
		},
		{
			name:     "successful creation - development",
			toolName: "create-instance-dev",
			body:     `{"project": "p2", "name": "instance2", "rootPassword": "password456", "editionPreset": "Development"}`,
			want:     `{"name":"op2","status":"RUNNING"}`,
		},
		{
			name:        "missing required parameter",
			toolName:    "create-instance-prod",
			body:        `{"name": "instance1"}`,
			expectError: true,
			errorStatus: http.StatusBadRequest,
		},
	}

	for _, tc := range tcs {
		tc := tc
		t.Run(tc.name, func(t *testing.T) {
			api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
			req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
			if err != nil {
				t.Fatalf("unable to create request: %s", err)
			}
			req.Header.Add("Content-type", "application/json")
			resp, err := http.DefaultClient.Do(req)
			if err != nil {
				t.Fatalf("unable to send request: %s", err)
			}
			defer resp.Body.Close()

			if tc.expectError {
				if resp.StatusCode != tc.errorStatus {
					bodyBytes, _ := io.ReadAll(resp.Body)
					t.Fatalf("expected status %d but got %d: %s", tc.errorStatus, resp.StatusCode, string(bodyBytes))
				}
				return
			}

			if resp.StatusCode != http.StatusOK {
				bodyBytes, _ := io.ReadAll(resp.Body)
				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
			}

			var result struct {
				Result string `json:"result"`
			}
			if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
				t.Fatalf("failed to decode response: %v", err)
			}

			var got, want map[string]any
			if err := json.Unmarshal([]byte(result.Result), &got); err != nil {
				t.Fatalf("failed to unmarshal result: %v", err)
			}
			if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
				t.Fatalf("failed to unmarshal want: %v", err)
			}

			if !reflect.DeepEqual(got, want) {
				t.Fatalf("unexpected result: got %+v, want %+v", got, want)
			}
		})
	}
}

func getCreateInstanceToolsConfig() map[string]any {
	return map[string]any{
		"sources": map[string]any{
			"my-cloud-sql-source": map[string]any{
				"kind": "cloud-sql-admin",
			},
		},
		"tools": map[string]any{
			"create-instance-prod": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
			"create-instance-dev": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
		},
	}
}

```

--------------------------------------------------------------------------------
/tests/cloudsqlpg/cloud_sql_pg_create_instances_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqlpg

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"net/http/httptest"
	"net/url"
	"reflect"
	"regexp"
	"strings"
	"testing"
	"time"

	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/tests"
	"google.golang.org/api/sqladmin/v1"
)

var (
	createInstanceToolKind = "cloud-sql-postgres-create-instance"
)

type createInstanceTransport struct {
	transport http.RoundTripper
	url       *url.URL
}

func (t *createInstanceTransport) RoundTrip(req *http.Request) (*http.Response, error) {
	if strings.HasPrefix(req.URL.String(), "https://sqladmin.googleapis.com") {
		req.URL.Scheme = t.url.Scheme
		req.URL.Host = t.url.Host
	}
	return t.transport.RoundTrip(req)
}

type masterHandler struct {
	t *testing.T
}

func (h *masterHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
	ua := r.Header.Get("User-Agent")
	if !strings.Contains(ua, "genai-toolbox/") {
		h.t.Errorf("User-Agent header not found in %q", ua)
	}

	var body sqladmin.DatabaseInstance
	if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
		h.t.Fatalf("failed to decode request body: %v", err)
	}

	instanceName := body.Name
	if instanceName == "" {
		http.Error(w, "missing instance name", http.StatusBadRequest)
		return
	}

	var expectedBody sqladmin.DatabaseInstance
	var response any
	var statusCode int

	switch instanceName {
	case "instance1":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p1",
			Name:            "instance1",
			DatabaseVersion: "POSTGRES_15",
			RootPassword:    "password123",
			Settings: &sqladmin.Settings{
				AvailabilityType: "REGIONAL",
				Edition:          "ENTERPRISE_PLUS",
				Tier:             "db-perf-optimized-N-8",
				DataDiskSizeGb:   250,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op1", "status": "PENDING"}
		statusCode = http.StatusOK
	case "instance2":
		expectedBody = sqladmin.DatabaseInstance{
			Project:         "p2",
			Name:            "instance2",
			DatabaseVersion: "POSTGRES_17",
			RootPassword:    "password456",
			Settings: &sqladmin.Settings{
				AvailabilityType: "ZONAL",
				Edition:          "ENTERPRISE_PLUS",
				Tier:             "db-perf-optimized-N-2",
				DataDiskSizeGb:   100,
				DataDiskType:     "PD_SSD",
			},
		}
		response = map[string]any{"name": "op2", "status": "RUNNING"}
		statusCode = http.StatusOK
	default:
		http.Error(w, fmt.Sprintf("unhandled instance name: %s", instanceName), http.StatusInternalServerError)
		return
	}

	if expectedBody.Project != body.Project {
		h.t.Errorf("unexpected project: got %q, want %q", body.Project, expectedBody.Project)
	}
	if expectedBody.Name != body.Name {
		h.t.Errorf("unexpected name: got %q, want %q", body.Name, expectedBody.Name)
	}
	if expectedBody.DatabaseVersion != body.DatabaseVersion {
		h.t.Errorf("unexpected databaseVersion: got %q, want %q", body.DatabaseVersion, expectedBody.DatabaseVersion)
	}
	if expectedBody.RootPassword != body.RootPassword {
		h.t.Errorf("unexpected rootPassword: got %q, want %q", body.RootPassword, expectedBody.RootPassword)
	}
	if diff := cmp.Diff(expectedBody.Settings, body.Settings); diff != "" {
		h.t.Errorf("unexpected request body settings (-want +got):\n%s", diff)
	}

	w.Header().Set("Content-Type", "application/json")
	w.WriteHeader(statusCode)
	if err := json.NewEncoder(w).Encode(response); err != nil {
		http.Error(w, err.Error(), http.StatusInternalServerError)
	}
}

func TestCreateInstanceToolEndpoints(t *testing.T) {
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	handler := &masterHandler{t: t}
	server := httptest.NewServer(handler)
	defer server.Close()

	serverURL, err := url.Parse(server.URL)
	if err != nil {
		t.Fatalf("failed to parse server URL: %v", err)
	}

	originalTransport := http.DefaultClient.Transport
	if originalTransport == nil {
		originalTransport = http.DefaultTransport
	}
	http.DefaultClient.Transport = &createInstanceTransport{
		transport: originalTransport,
		url:       serverURL,
	}
	t.Cleanup(func() {
		http.DefaultClient.Transport = originalTransport
	})

	var args []string
	toolsFile := getCreateInstanceToolsConfig()
	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	tcs := []struct {
		name        string
		toolName    string
		body        string
		want        string
		expectError bool
		errorStatus int
	}{
		{
			name:     "successful creation - production",
			toolName: "create-instance-prod",
			body:     `{"project": "p1", "name": "instance1", "databaseVersion": "POSTGRES_15", "rootPassword": "password123", "editionPreset": "Production"}`,
			want:     `{"name":"op1","status":"PENDING"}`,
		},
		{
			name:     "successful creation - development",
			toolName: "create-instance-dev",
			body:     `{"project": "p2", "name": "instance2", "rootPassword": "password456", "editionPreset": "Development"}`,
			want:     `{"name":"op2","status":"RUNNING"}`,
		},
		{
			name:        "missing required parameter",
			toolName:    "create-instance-prod",
			body:        `{"name": "instance1"}`,
			expectError: true,
			errorStatus: http.StatusBadRequest,
		},
	}

	for _, tc := range tcs {
		tc := tc
		t.Run(tc.name, func(t *testing.T) {
			api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
			req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
			if err != nil {
				t.Fatalf("unable to create request: %s", err)
			}
			req.Header.Add("Content-type", "application/json")
			resp, err := http.DefaultClient.Do(req)
			if err != nil {
				t.Fatalf("unable to send request: %s", err)
			}
			defer resp.Body.Close()

			if tc.expectError {
				if resp.StatusCode != tc.errorStatus {
					bodyBytes, _ := io.ReadAll(resp.Body)
					t.Fatalf("expected status %d but got %d: %s", tc.errorStatus, resp.StatusCode, string(bodyBytes))
				}
				return
			}

			if resp.StatusCode != http.StatusOK {
				bodyBytes, _ := io.ReadAll(resp.Body)
				t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
			}

			var result struct {
				Result string `json:"result"`
			}
			if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
				t.Fatalf("failed to decode response: %v", err)
			}

			var got, want map[string]any
			if err := json.Unmarshal([]byte(result.Result), &got); err != nil {
				t.Fatalf("failed to unmarshal result: %v", err)
			}
			if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
				t.Fatalf("failed to unmarshal want: %v", err)
			}

			if !reflect.DeepEqual(got, want) {
				t.Fatalf("unexpected result: got %+v, want %+v", got, want)
			}
		})
	}
}

func getCreateInstanceToolsConfig() map[string]any {
	return map[string]any{
		"sources": map[string]any{
			"my-cloud-sql-source": map[string]any{
				"kind": "cloud-sql-admin",
			},
		},
		"tools": map[string]any{
			"create-instance-prod": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
			"create-instance-dev": map[string]any{
				"kind":   createInstanceToolKind,
				"source": "my-cloud-sql-source",
			},
		},
	}
}

```

--------------------------------------------------------------------------------
/docs/en/how-to/connect-ide/sqlite_mcp.md:
--------------------------------------------------------------------------------

```markdown
---
title: SQLite using MCP
type: docs
weight: 2
description: "Connect your IDE to SQLite using Toolbox."
---

[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a SQLite instance:

* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]

[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client

## Set up the database

1.  [Create or select a SQLite database file.](https://www.sqlite.org/download.html)

## Install MCP Toolbox

1. Download the latest version of Toolbox as a binary. Select the [correct
   binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
   to your OS and CPU architecture. You are required to use Toolbox version
   V0.10.0+:

   <!-- {x-release-please-start-version} -->
   {{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/linux/amd64/toolbox
{{< /tab >}}

{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/arm64/toolbox
{{< /tab >}}

{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/amd64/toolbox
{{< /tab >}}

{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
    <!-- {x-release-please-end} -->

1. Make the binary executable:

    ```bash
    chmod +x toolbox
    ```

1. Verify the installation:

    ```bash
    ./toolbox --version
    ```

## Configure your MCP Client

{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}

1.  Install [Claude
    Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1.  Create a `.mcp.json` file in your project root if it doesn't exist.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt", "sqlite", "--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```

1.  Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}

1.  Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1.  Under the Developer tab, tap Edit Config to open the configuration file.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt", "sqlite", "--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```

1.  Restart Claude desktop.
1.  From the new chat screen, you should see a hammer (MCP) icon appear with the
    new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}

1.  Open the [Cline](https://github.com/cline/cline) extension in VS Code and
    tap the **MCP Servers** icon.
1.  Tap Configure MCP Servers to open the configuration file.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt", "sqlite", "--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```

1.  You should see a green active status after the server is successfully
    connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}

1.  Create a `.cursor` directory in your project root if it doesn't exist.
1.  Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt", "sqlite", "--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```

1.  Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
    Settings > MCP**. You should see a green active status after the server is
    successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}

1.  Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
    create a `.vscode` directory in your project root if it doesn't exist.
1.  Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "servers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt","sqlite","--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}

1.  Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
    Cascade assistant.
1.  Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1.  Add the following configuration, replace the environment variables with your
    values, and save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt","sqlite","--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}

1.  Install the [Gemini
    CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1.  In your working directory, create a folder named `.gemini`. Within it,
    create a `settings.json` file.
1.  Add the following configuration, replace the environment variables with your
    values, and then save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt","sqlite","--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}

1.  Install the [Gemini Code
    Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
    extension in Visual Studio Code.
1.  Enable Agent Mode in Gemini Code Assist chat.
1.  In your working directory, create a folder named `.gemini`. Within it,
    create a `settings.json` file.
1.  Add the following configuration, replace the environment variables with your
    values, and then save:

    ```json
    {
      "mcpServers": {
        "sqlite": {
          "command": "./PATH/TO/toolbox",
          "args": ["--prebuilt","sqlite","--stdio"],
          "env": {
            "SQLITE_DATABASE": "./sample.db"
          }
        }
      }
    }
    ```
{{% /tab %}}
{{< /tabpane >}}

## Use Tools

Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant
to list tables, create a table, or define and execute other SQL statements.

The following tools are available to the LLM:

1.  **list_tables**: lists tables and descriptions
1.  **execute_sql**: execute any SQL statement

{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

```

--------------------------------------------------------------------------------
/tests/oracle/oracle_integration_test.go:
--------------------------------------------------------------------------------

```go
// Copyright © 2025, Oracle and/or its affiliates.

package oracle

import (
	"context"
	"database/sql"
	"fmt"
	"os"
	"regexp"
	"strings"
	"testing"
	"time"

	"github.com/google/uuid"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/tests"
)

var (
	OracleSourceKind = "oracle"
	OracleToolKind   = "oracle-sql"
	OracleHost       = os.Getenv("ORACLE_HOST")
	OracleUser       = os.Getenv("ORACLE_USER")
	OraclePass       = os.Getenv("ORACLE_PASS")
	OracleServerName = os.Getenv("ORACLE_SERVER_NAME")
	OracleConnStr    = fmt.Sprintf(
		"%s:%s/%s", OracleHost, "1521", OracleServerName)
)

func getOracleVars(t *testing.T) map[string]any {
	switch "" {
	case OracleHost:
		t.Fatal("'ORACLE_HOST not set")
	case OracleUser:
		t.Fatal("'ORACLE_USER' not set")
	case OraclePass:
		t.Fatal("'ORACLE_PASS' not set")
	case OracleServerName:
		t.Fatal("'ORACLE_SERVER_NAME' not set")
	}

	return map[string]any{
		"kind":             OracleSourceKind,
		"connectionString": OracleConnStr,
		"user":             OracleUser,
		"password":         OraclePass,
	}
}

// Copied over from oracle.go
func initOracleConnection(ctx context.Context, user, pass, connStr string) (*sql.DB, error) {
	fullConnStr := fmt.Sprintf("oracle://%s:%s@%s", user, pass, connStr)

	db, err := sql.Open("oracle", fullConnStr)
	if err != nil {
		return nil, fmt.Errorf("unable to open Oracle connection: %w", err)
	}

	err = db.PingContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("unable to ping Oracle connection: %w", err)
	}

	return db, nil
}

func TestOracleSimpleToolEndpoints(t *testing.T) {
	sourceConfig := getOracleVars(t)
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	var args []string

	db, err := initOracleConnection(ctx, OracleUser, OraclePass, OracleConnStr)
	if err != nil {
		t.Fatalf("unable to create Oracle connection pool: %s", err)
	}

	dropAllUserTables(t, ctx, db)

	// create table name with UUID
	tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
	tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
	tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")

	// set up data for param tool
	createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getOracleParamToolInfo(tableNameParam)
	teardownTable1 := setupOracleTable(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
	defer teardownTable1(t)

	// set up data for auth tool
	createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getOracleAuthToolInfo(tableNameAuth)
	teardownTable2 := setupOracleTable(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
	defer teardownTable2(t)

	// Write config into a file and pass it to command
	toolsFile := tests.GetToolsConfig(sourceConfig, OracleToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
	toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "oracle-execute-sql")
	tmplSelectCombined, tmplSelectFilterCombined := tests.GetMySQLTmplToolStatement()
	toolsFile = tests.AddTemplateParamConfig(t, toolsFile, OracleToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")

	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	// Get configs for tests
	select1Want := "[{\"1\":1}]"
	mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute query: ORA-00900: invalid SQL statement\n error occur at position: 0"}],"isError":true}}`
	createTableStatement := `"CREATE TABLE t (id NUMBER GENERATED AS IDENTITY PRIMARY KEY, name VARCHAR2(255))"`
	mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"1\":1}"}]}}`

	// Run tests
	tests.RunToolGetTest(t)
	tests.RunToolInvokeTest(t, select1Want,
		tests.DisableArrayTest(),
	)
	tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
	tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want)
	tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
}

func setupOracleTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
	err := pool.PingContext(ctx)
	if err != nil {
		t.Fatalf("unable to connect to test database: %s", err)
	}

	// Create table
	_, err = pool.QueryContext(ctx, createStatement)
	if err != nil {
		t.Fatalf("unable to create test table %s: %s", tableName, err)
	}

	// Insert test data
	_, err = pool.QueryContext(ctx, insertStatement, params...)
	if err != nil {
		t.Fatalf("unable to insert test data: %s", err)
	}

	return func(t *testing.T) {
		// tear down test
		_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s", tableName))
		if err != nil {
			t.Errorf("Teardown failed: %s", err)
		}
	}
}

func getOracleParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
	// Use GENERATED AS IDENTITY for auto-incrementing primary keys.
	// VARCHAR2 is the standard string type in Oracle.
	createStatement := fmt.Sprintf(`CREATE TABLE %s ("id" NUMBER GENERATED AS IDENTITY PRIMARY KEY, "name" VARCHAR2(255))`, tableName)

	// MODIFIED: Use a PL/SQL block for multiple inserts
	insertStatement := fmt.Sprintf(`
		BEGIN
			INSERT INTO %s ("name") VALUES (:1);
			INSERT INTO %s ("name") VALUES (:2);
			INSERT INTO %s ("name") VALUES (:3);
			INSERT INTO %s ("name") VALUES (:4);
		END;`, tableName, tableName, tableName, tableName)

	toolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE "id" = :1 OR "name" = :2`, tableName)
	idParamStatement := fmt.Sprintf(`SELECT * FROM %s WHERE "id" = :1`, tableName)
	nameParamStatement := fmt.Sprintf(`SELECT * FROM %s WHERE "name" = :1`, tableName)

	// Oracle's equivalent for array parameters is using the 'MEMBER OF' operator
	// with a collection type defined in the database schema.
	arrayToolStatement := fmt.Sprintf(`SELECT * FROM %s WHERE "id" MEMBER OF :1 AND "name" MEMBER OF :2`, tableName)

	params := []any{"Alice", "Jane", "Sid", nil}

	return createStatement, insertStatement, toolStatement, idParamStatement, nameParamStatement, arrayToolStatement, params
}

// getOracleAuthToolInfo returns statements and params for my-auth-tool for Oracle SQL
func getOracleAuthToolInfo(tableName string) (string, string, string, []any) {
	createStatement := fmt.Sprintf(`CREATE TABLE %s ("id" NUMBER GENERATED AS IDENTITY PRIMARY KEY, "name" VARCHAR2(255), "email" VARCHAR2(255))`, tableName)

	// MODIFIED: Use a PL/SQL block for multiple inserts
	insertStatement := fmt.Sprintf(`
		BEGIN
			INSERT INTO %s ("name", "email") VALUES (:1, :2);
			INSERT INTO %s ("name", "email") VALUES (:3, :4);
		END;`, tableName, tableName)

	toolStatement := fmt.Sprintf(`SELECT "name" FROM %s WHERE "email" = :1`, tableName)

	params := []any{"Alice", tests.ServiceAccountEmail, "Jane", "[email protected]"}

	return createStatement, insertStatement, toolStatement, params
}

// dropAllUserTables finds and drops all tables owned by the current user.
func dropAllUserTables(t *testing.T, ctx context.Context, db *sql.DB) {
	// Query for only the tables we know are created by this test suite.
	const query = `
		SELECT table_name FROM user_tables
		WHERE table_name LIKE 'param_table_%'
		   OR table_name LIKE 'auth_table_%'
		   OR table_name LIKE 'template_param_table_%'`

	rows, err := db.QueryContext(ctx, query)
	if err != nil {
		t.Fatalf("failed to query for user tables: %v", err)
	}
	defer rows.Close()

	var tablesToDrop []string
	for rows.Next() {
		var tableName string
		if err := rows.Scan(&tableName); err != nil {
			t.Fatalf("failed to scan table name: %v", err)
		}
		tablesToDrop = append(tablesToDrop, tableName)
	}

	if err := rows.Err(); err != nil {
		t.Fatalf("error iterating over tables: %v", err)
	}

	for _, tableName := range tablesToDrop {
		_, err := db.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s CASCADE CONSTRAINTS", tableName))
		if err != nil {
			t.Logf("failed to drop table %s: %v", tableName, err)
		}
	}
}

```

--------------------------------------------------------------------------------
/tests/cloudsqlpg/cloud_sql_pg_integration_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package cloudsqlpg

import (
	"context"
	"fmt"
	"net"
	"os"
	"regexp"
	"strings"
	"testing"
	"time"

	"cloud.google.com/go/cloudsqlconn"
	"github.com/google/uuid"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/tests"
	"github.com/jackc/pgx/v5/pgxpool"
)

var (
	CloudSQLPostgresSourceKind = "cloud-sql-postgres"
	CloudSQLPostgresToolKind   = "postgres-sql"
	CloudSQLPostgresProject    = os.Getenv("CLOUD_SQL_POSTGRES_PROJECT")
	CloudSQLPostgresRegion     = os.Getenv("CLOUD_SQL_POSTGRES_REGION")
	CloudSQLPostgresInstance   = os.Getenv("CLOUD_SQL_POSTGRES_INSTANCE")
	CloudSQLPostgresDatabase   = os.Getenv("CLOUD_SQL_POSTGRES_DATABASE")
	CloudSQLPostgresUser       = os.Getenv("CLOUD_SQL_POSTGRES_USER")
	CloudSQLPostgresPass       = os.Getenv("CLOUD_SQL_POSTGRES_PASS")
)

func getCloudSQLPgVars(t *testing.T) map[string]any {
	switch "" {
	case CloudSQLPostgresProject:
		t.Fatal("'CLOUD_SQL_POSTGRES_PROJECT' not set")
	case CloudSQLPostgresRegion:
		t.Fatal("'CLOUD_SQL_POSTGRES_REGION' not set")
	case CloudSQLPostgresInstance:
		t.Fatal("'CLOUD_SQL_POSTGRES_INSTANCE' not set")
	case CloudSQLPostgresDatabase:
		t.Fatal("'CLOUD_SQL_POSTGRES_DATABASE' not set")
	case CloudSQLPostgresUser:
		t.Fatal("'CLOUD_SQL_POSTGRES_USER' not set")
	case CloudSQLPostgresPass:
		t.Fatal("'CLOUD_SQL_POSTGRES_PASS' not set")
	}

	return map[string]any{
		"kind":     CloudSQLPostgresSourceKind,
		"project":  CloudSQLPostgresProject,
		"instance": CloudSQLPostgresInstance,
		"region":   CloudSQLPostgresRegion,
		"database": CloudSQLPostgresDatabase,
		"user":     CloudSQLPostgresUser,
		"password": CloudSQLPostgresPass,
	}
}

// Copied over from cloud_sql_pg.go
func initCloudSQLPgConnectionPool(project, region, instance, ip_type, user, pass, dbname string) (*pgxpool.Pool, error) {
	// Configure the driver to connect to the database
	dsn := fmt.Sprintf("user=%s password=%s dbname=%s sslmode=disable", user, pass, dbname)
	config, err := pgxpool.ParseConfig(dsn)
	if err != nil {
		return nil, fmt.Errorf("unable to parse connection uri: %w", err)
	}

	// Create a new dialer with options
	dialOpts, err := tests.GetCloudSQLDialOpts(ip_type)
	if err != nil {
		return nil, err
	}
	d, err := cloudsqlconn.NewDialer(context.Background(), cloudsqlconn.WithDefaultDialOptions(dialOpts...))
	if err != nil {
		return nil, fmt.Errorf("unable to parse connection uri: %w", err)
	}

	// Tell the driver to use the Cloud SQL Go Connector to create connections
	i := fmt.Sprintf("%s:%s:%s", project, region, instance)
	config.ConnConfig.DialFunc = func(ctx context.Context, _ string, instance string) (net.Conn, error) {
		return d.Dial(ctx, i)
	}

	// Interact with the driver directly as you normally would
	pool, err := pgxpool.NewWithConfig(context.Background(), config)
	if err != nil {
		return nil, err
	}
	return pool, nil
}

func TestCloudSQLPgSimpleToolEndpoints(t *testing.T) {
	sourceConfig := getCloudSQLPgVars(t)
	ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
	defer cancel()

	var args []string

	pool, err := initCloudSQLPgConnectionPool(CloudSQLPostgresProject, CloudSQLPostgresRegion, CloudSQLPostgresInstance, "public", CloudSQLPostgresUser, CloudSQLPostgresPass, CloudSQLPostgresDatabase)
	if err != nil {
		t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
	}

	// create table name with UUID
	tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
	tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
	tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")

	// set up data for param tool
	createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
	teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
	defer teardownTable1(t)

	// set up data for auth tool
	createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
	teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
	defer teardownTable2(t)

	// Write config into a file and pass it to command
	toolsFile := tests.GetToolsConfig(sourceConfig, CloudSQLPostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
	toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
	tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
	toolsFile = tests.AddTemplateParamConfig(t, toolsFile, CloudSQLPostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")

	cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
	if err != nil {
		t.Fatalf("command initialization returned an error: %s", err)
	}
	defer cleanup()

	waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
	defer cancel()
	out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
	if err != nil {
		t.Logf("toolbox command logs: \n%s", out)
		t.Fatalf("toolbox didn't start successfully: %s", err)
	}

	// Get configs for tests
	select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want := tests.GetPostgresWants()

	// Run tests
	tests.RunToolGetTest(t)
	tests.RunToolInvokeTest(t, select1Want)
	tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
	tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want)
	tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
}

// Test connection with different IP type
func TestCloudSQLPgIpConnection(t *testing.T) {
	sourceConfig := getCloudSQLPgVars(t)

	tcs := []struct {
		name   string
		ipType string
	}{
		{
			name:   "public ip",
			ipType: "public",
		},
		{
			name:   "private ip",
			ipType: "private",
		},
	}
	for _, tc := range tcs {
		t.Run(tc.name, func(t *testing.T) {
			sourceConfig["ipType"] = tc.ipType
			err := tests.RunSourceConnectionTest(t, sourceConfig, CloudSQLPostgresToolKind)
			if err != nil {
				t.Fatalf("Connection test failure: %s", err)
			}
		})
	}
}

func TestCloudSQLPgIAMConnection(t *testing.T) {
	getCloudSQLPgVars(t)
	// service account email used for IAM should trim the suffix
	serviceAccountEmail := strings.TrimSuffix(tests.ServiceAccountEmail, ".gserviceaccount.com")

	noPassSourceConfig := map[string]any{
		"kind":     CloudSQLPostgresSourceKind,
		"project":  CloudSQLPostgresProject,
		"instance": CloudSQLPostgresInstance,
		"region":   CloudSQLPostgresRegion,
		"database": CloudSQLPostgresDatabase,
		"user":     serviceAccountEmail,
	}

	noUserSourceConfig := map[string]any{
		"kind":     CloudSQLPostgresSourceKind,
		"project":  CloudSQLPostgresProject,
		"instance": CloudSQLPostgresInstance,
		"region":   CloudSQLPostgresRegion,
		"database": CloudSQLPostgresDatabase,
		"password": "random",
	}

	noUserNoPassSourceConfig := map[string]any{
		"kind":     CloudSQLPostgresSourceKind,
		"project":  CloudSQLPostgresProject,
		"instance": CloudSQLPostgresInstance,
		"region":   CloudSQLPostgresRegion,
		"database": CloudSQLPostgresDatabase,
	}
	tcs := []struct {
		name         string
		sourceConfig map[string]any
		isErr        bool
	}{
		{
			name:         "no user no pass",
			sourceConfig: noUserNoPassSourceConfig,
			isErr:        false,
		},
		{
			name:         "no password",
			sourceConfig: noPassSourceConfig,
			isErr:        false,
		},
		{
			name:         "no user",
			sourceConfig: noUserSourceConfig,
			isErr:        true,
		},
	}
	for _, tc := range tcs {
		t.Run(tc.name, func(t *testing.T) {
			err := tests.RunSourceConnectionTest(t, tc.sourceConfig, CloudSQLPostgresToolKind)
			if err != nil {
				if tc.isErr {
					return
				}
				t.Fatalf("Connection test failure: %s", err)
			}
			if tc.isErr {
				t.Fatalf("Expected error but test passed.")
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/.ci/versioned.release.cloudbuild.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2024 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

steps:
  - id: "build-docker"
    name: "gcr.io/cloud-builders/docker"
    waitFor: ['-']
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        docker buildx create --name container-builder --driver docker-container --bootstrap --use

        export TAGS="-t ${_DOCKER_URI}:$VERSION"
        if [[ $_PUSH_LATEST == 'true' ]]; then
          export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
        fi
        docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse --short HEAD) $TAGS --push .

  - id: "install-dependencies"
    name: golang:1
    waitFor: ['-']
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        go get -d ./...

  - id: "build-linux-amd64"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64

  - id: "store-linux-amd64"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-linux-amd64"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox

  - id: "build-linux-amd64-geminicli"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64

  - id: "store-linux-amd64-geminicli"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-linux-amd64-geminicli"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.geminicli.linux.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/linux/amd64/toolbox

  - id: "build-darwin-arm64"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64

  - id: "store-darwin-arm64"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-darwin-arm64"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox

  - id: "build-darwin-arm64-geminicli"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64

  - id: "store-darwin-arm64-geminicli"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-darwin-arm64-geminicli"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.geminicli.darwin.arm64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/arm64/toolbox

  - id: "build-darwin-amd64"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64

  - id: "store-darwin-amd64"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-darwin-amd64"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox

  - id: "build-darwin-amd64-geminicli"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64

  - id: "store-darwin-amd64-geminicli"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-darwin-amd64-geminicli"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.geminicli.darwin.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/amd64/toolbox

  - id: "build-windows-amd64"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64

  - id: "store-windows-amd64"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-windows-amd64"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe

  - id: "build-windows-amd64-geminicli"
    name: golang:1
    waitFor: 
      - "install-dependencies"
    env: 
      - 'GOPATH=/gopath'
    volumes:
      - name: 'go'
        path: '/gopath'
    script: |
        #!/usr/bin/env bash
        export VERSION=$(cat ./cmd/version.txt)
        CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
          go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64

  - id: "store-windows-amd64-geminicli"
    name: "gcr.io/cloud-builders/gcloud:latest"
    waitFor:
      - "build-windows-amd64-geminicli"
    script: |
        #!/usr/bin/env bash
        export VERSION=v$(cat ./cmd/version.txt)
        gcloud storage cp toolbox.geminicli.windows.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/windows/amd64/toolbox.exe

options:
  automapSubstitutions: true
  dynamicSubstitutions: true
  logging: CLOUD_LOGGING_ONLY # Necessary for custom service account
  machineType: 'E2_HIGHCPU_32'

substitutions:
  _REGION: us-central1
  _AR_HOSTNAME: ${_REGION}-docker.pkg.dev
  _AR_REPO_NAME: toolbox
  _BUCKET_NAME: genai-toolbox
  _DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
  _PUSH_LATEST: "true"

```

--------------------------------------------------------------------------------
/docs/en/resources/tools/firestore/firestore-add-documents.md:
--------------------------------------------------------------------------------

```markdown
---
title: "firestore-add-documents"
type: docs
weight: 1
description: >
  A "firestore-add-documents" tool adds document to a given collection path.
aliases:
- /resources/tools/firestore-add-documents
---
## Description

The `firestore-add-documents` tool allows you to add new documents to a
Firestore collection. It supports all Firestore data types using Firestore's
native JSON format. The tool automatically generates a unique document ID for
each new document.

## Parameters

| Parameter        | Type    | Required | Description                                                                                                                                                                                                   |
|------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `collectionPath` | string  | Yes      | The path of the collection where the document will be added                                                                                                                                                   |
| `documentData`   | map     | Yes      | The data to be added as a document to the given collection. Must use [Firestore's native JSON format](https://cloud.google.com/firestore/docs/reference/rest/Shared.Types/ArrayValue#Value) with typed values |
| `returnData`     | boolean | No       | If set to true, the output will include the data of the created document. Defaults to false to help avoid overloading the context                                                                             |

## Output

The tool returns a map containing:

| Field          | Type   | Description                                                                                                                    |
|----------------|--------|--------------------------------------------------------------------------------------------------------------------------------|
| `documentPath` | string | The full resource name of the created document (e.g., `projects/{projectId}/databases/{databaseId}/documents/{document_path}`) |
| `createTime`   | string | The timestamp when the document was created                                                                                    |
| `documentData` | map    | The data that was added (only included when `returnData` is true)                                                              |

## Data Type Format

The tool requires Firestore's native JSON format for document data. Each field
must be wrapped with its type indicator:

### Basic Types
- **String**: `{"stringValue": "your string"}`
- **Integer**: `{"integerValue": "123"}` or `{"integerValue": 123}`
- **Double**: `{"doubleValue": 123.45}`
- **Boolean**: `{"booleanValue": true}`
- **Null**: `{"nullValue": null}`
- **Bytes**: `{"bytesValue": "base64EncodedString"}`
- **Timestamp**: `{"timestampValue": "2025-01-07T10:00:00Z"}` (RFC3339 format)

### Complex Types
- **GeoPoint**: `{"geoPointValue": {"latitude": 34.052235, "longitude": -118.243683}}`
- **Array**: `{"arrayValue": {"values": [{"stringValue": "item1"}, {"integerValue": "2"}]}}`
- **Map**: `{"mapValue": {"fields": {"key1": {"stringValue": "value1"}, "key2": {"booleanValue": true}}}}`
- **Reference**: `{"referenceValue": "collection/document"}`

## Examples

### Basic Document Creation

```yaml
tools:
  add-company-doc:
    kind: firestore-add-documents
    source: my-firestore
    description: Add a new company document
```

Usage:
```json
{
  "collectionPath": "companies",
  "documentData": {
    "name": {
      "stringValue": "Acme Corporation"
    },
    "establishmentDate": {
      "timestampValue": "2000-01-15T10:30:00Z"
    },
    "location": {
      "geoPointValue": {
        "latitude": 34.052235,
        "longitude": -118.243683
      }
    },
    "active": {
      "booleanValue": true
    },
    "employeeCount": {
      "integerValue": "1500"
    },
    "annualRevenue": {
      "doubleValue": 1234567.89
    }
  }
}
```

### With Nested Maps and Arrays

```json
{
  "collectionPath": "companies",
  "documentData": {
    "name": {
      "stringValue": "Tech Innovations Inc"
    },
    "contactInfo": {
      "mapValue": {
        "fields": {
          "email": {
            "stringValue": "[email protected]"
          },
          "phone": {
            "stringValue": "+1-555-123-4567"
          },
          "address": {
            "mapValue": {
              "fields": {
                "street": {
                  "stringValue": "123 Innovation Drive"
                },
                "city": {
                  "stringValue": "San Francisco"
                },
                "state": {
                  "stringValue": "CA"
                },
                "zipCode": {
                  "stringValue": "94105"
                }
              }
            }
          }
        }
      }
    },
    "products": {
      "arrayValue": {
        "values": [
          {
            "stringValue": "Product A"
          },
          {
            "stringValue": "Product B"
          },
          {
            "mapValue": {
              "fields": {
                "productName": {
                  "stringValue": "Product C Premium"
                },
                "version": {
                  "integerValue": "3"
                },
                "features": {
                  "arrayValue": {
                    "values": [
                      {
                        "stringValue": "Advanced Analytics"
                      },
                      {
                        "stringValue": "Real-time Sync"
                      }
                    ]
                  }
                }
              }
            }
          }
        ]
      }
    }
  },
  "returnData": true
}
```

### Complete Example with All Data Types

```json
{
  "collectionPath": "test-documents",
  "documentData": {
    "stringField": {
      "stringValue": "Hello World"
    },
    "integerField": {
      "integerValue": "42"
    },
    "doubleField": {
      "doubleValue": 3.14159
    },
    "booleanField": {
      "booleanValue": true
    },
    "nullField": {
      "nullValue": null
    },
    "timestampField": {
      "timestampValue": "2025-01-07T15:30:00Z"
    },
    "geoPointField": {
      "geoPointValue": {
        "latitude": 37.7749,
        "longitude": -122.4194
      }
    },
    "bytesField": {
      "bytesValue": "SGVsbG8gV29ybGQh"
    },
    "arrayField": {
      "arrayValue": {
        "values": [
          {
            "stringValue": "item1"
          },
          {
            "integerValue": "2"
          },
          {
            "booleanValue": false
          }
        ]
      }
    },
    "mapField": {
      "mapValue": {
        "fields": {
          "nestedString": {
            "stringValue": "nested value"
          },
          "nestedNumber": {
            "doubleValue": 99.99
          }
        }
      }
    }
  }
}
```

## Authentication

The tool can be configured to require authentication:

```yaml
tools:
  secure-add-docs:
    kind: firestore-add-documents
    source: prod-firestore
    description: Add documents with authentication required
    authRequired:
      - google-oauth
      - api-key
```

## Error Handling

Common errors include:

- Invalid collection path
- Missing or invalid document data
- Permission denied (if Firestore security rules block the operation)
- Invalid data type conversions

## Best Practices

1. **Always use typed values**: Every field must be wrapped with its appropriate
   type indicator (e.g., `{"stringValue": "text"}`)
2. **Integer values can be strings**: The tool accepts integer values as strings
   (e.g., `{"integerValue": "1500"}`)
3. **Use returnData sparingly**: Only set to true when you need to verify the
   exact data that was written
4. **Validate data before sending**: Ensure your data matches Firestore's native
   JSON format
5. **Handle timestamps properly**: Use RFC3339 format for timestamp strings
6. **Base64 encode binary data**: Binary data must be base64 encoded in the
   `bytesValue` field
7. **Consider security rules**: Ensure your Firestore security rules allow
   document creation in the target collection

## Related Tools

- [`firestore-get-documents`](firestore-get-documents.md) - Retrieve documents
  by their paths
- [`firestore-query-collection`](firestore-query-collection.md) - Query
  documents in a collection
- [`firestore-delete-documents`](firestore-delete-documents.md) - Delete
  documents from Firestore

```

--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/oceanbase.yaml:
--------------------------------------------------------------------------------

```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

sources:
  oceanbase-source:
    kind: oceanbase
    host: ${OCEANBASE_HOST}
    port: ${OCEANBASE_PORT}
    database: ${OCEANBASE_DATABASE}
    user: ${OCEANBASE_USER}
    password: ${OCEANBASE_PASSWORD}
tools:
  execute_sql:
    kind: oceanbase-execute-sql
    source: oceanbase-source
    description: Use this tool to execute SQL.
  list_tables:
    kind: oceanbase-sql
    source: oceanbase-source
    description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
    statement: |
      SELECT
          T.TABLE_SCHEMA AS schema_name,
          T.TABLE_NAME AS object_name,
          CONVERT( JSON_OBJECT(
              'schema_name', T.TABLE_SCHEMA,
              'object_name', T.TABLE_NAME,
              'object_type', 'TABLE',
              'owner', (
                  SELECT
                      IFNULL(U.GRANTEE, 'N/A')
                  FROM
                      INFORMATION_SCHEMA.SCHEMA_PRIVILEGES U
                  WHERE
                      U.TABLE_SCHEMA = T.TABLE_SCHEMA
                  LIMIT 1
              ),
              'comment', IFNULL(T.TABLE_COMMENT, ''),
              'columns', (
                  SELECT
                      IFNULL(
                          JSON_ARRAYAGG(
                              JSON_OBJECT(
                                  'column_name', C.COLUMN_NAME,
                                  'data_type', C.COLUMN_TYPE,
                                  'ordinal_position', C.ORDINAL_POSITION,
                                  'is_not_nullable', IF(C.IS_NULLABLE = 'NO', TRUE, FALSE),
                                  'column_default', C.COLUMN_DEFAULT,
                                  'column_comment', IFNULL(C.COLUMN_COMMENT, '')
                              )
                          ),
                          JSON_ARRAY()
                      )
                  FROM
                      INFORMATION_SCHEMA.COLUMNS C
                  WHERE
                      C.TABLE_SCHEMA = T.TABLE_SCHEMA AND C.TABLE_NAME = T.TABLE_NAME
                  ORDER BY C.ORDINAL_POSITION
              ),
              'constraints', (
                  SELECT
                      IFNULL(
                          JSON_ARRAYAGG(
                              JSON_OBJECT(
                                  'constraint_name', TC.CONSTRAINT_NAME,
                                  'constraint_type',
                                      CASE TC.CONSTRAINT_TYPE
                                          WHEN 'PRIMARY KEY' THEN 'PRIMARY KEY'
                                          WHEN 'FOREIGN KEY' THEN 'FOREIGN KEY'
                                          WHEN 'UNIQUE' THEN 'UNIQUE'
                                          ELSE TC.CONSTRAINT_TYPE
                                      END,
                                  'constraint_definition', '',
                                  'constraint_columns', (
                                      SELECT
                                          IFNULL(JSON_ARRAYAGG(KCU.COLUMN_NAME), JSON_ARRAY())
                                      FROM
                                          INFORMATION_SCHEMA.KEY_COLUMN_USAGE KCU
                                      WHERE
                                          KCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA
                                          AND KCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME
                                          AND KCU.TABLE_NAME = TC.TABLE_NAME
                                      ORDER BY KCU.ORDINAL_POSITION
                                  ),
                                  'foreign_key_referenced_table', IF(TC.CONSTRAINT_TYPE = 'FOREIGN KEY', RC.REFERENCED_TABLE_NAME, NULL),
                                  'foreign_key_referenced_columns', IF(TC.CONSTRAINT_TYPE = 'FOREIGN KEY',
                                      (SELECT IFNULL(JSON_ARRAYAGG(FKCU.REFERENCED_COLUMN_NAME), JSON_ARRAY())
                                      FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE FKCU
                                      WHERE FKCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA
                                        AND FKCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME
                                        AND FKCU.TABLE_NAME = TC.TABLE_NAME
                                        AND FKCU.REFERENCED_TABLE_NAME IS NOT NULL
                                      ORDER BY FKCU.ORDINAL_POSITION),
                                      NULL
                                  )
                              )
                          ),
                          JSON_ARRAY()
                      )
                  FROM
                      INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
                  LEFT JOIN
                      INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS RC
                      ON TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA
                      AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
                      AND TC.TABLE_NAME = RC.TABLE_NAME
                  WHERE
                      TC.TABLE_SCHEMA = T.TABLE_SCHEMA AND TC.TABLE_NAME = T.TABLE_NAME
              ),
              'indexes', (
                  SELECT
                      IFNULL(
                          JSON_ARRAYAGG(
                              JSON_OBJECT(
                                  'index_name', IndexData.INDEX_NAME,
                                  'is_unique', IF(IndexData.NON_UNIQUE = 0, TRUE, FALSE),
                                  'is_primary', IF(IndexData.INDEX_NAME = 'PRIMARY', TRUE, FALSE),
                                  'index_columns', IFNULL(IndexData.INDEX_COLUMNS_ARRAY, JSON_ARRAY())
                              )
                          ),
                          JSON_ARRAY()
                      )
                  FROM (
                      SELECT
                          S.TABLE_SCHEMA,
                          S.TABLE_NAME,
                          S.INDEX_NAME,
                          MIN(S.NON_UNIQUE) AS NON_UNIQUE, -- Aggregate NON_UNIQUE here to get unique status for the index
                          JSON_ARRAYAGG(S.COLUMN_NAME) AS INDEX_COLUMNS_ARRAY -- Aggregate columns into an array for this index
                      FROM
                          INFORMATION_SCHEMA.STATISTICS S
                      WHERE
                          S.TABLE_SCHEMA = T.TABLE_SCHEMA AND S.TABLE_NAME = T.TABLE_NAME
                      GROUP BY
                          S.TABLE_SCHEMA, S.TABLE_NAME, S.INDEX_NAME
                  ) AS IndexData
                  ORDER BY IndexData.INDEX_NAME
              ),
              'triggers', (
                  SELECT
                      IFNULL(
                          JSON_ARRAYAGG(
                              JSON_OBJECT(
                                  'trigger_name', TR.TRIGGER_NAME,
                                  'trigger_definition', TR.ACTION_STATEMENT
                              )
                          ),
                          JSON_ARRAY()
                      )
                  FROM
                      INFORMATION_SCHEMA.TRIGGERS TR
                  WHERE
                      TR.EVENT_OBJECT_SCHEMA = T.TABLE_SCHEMA AND TR.EVENT_OBJECT_TABLE = T.TABLE_NAME
                  ORDER BY TR.TRIGGER_NAME
              )
          ) USING utf8mb4) AS object_details
      FROM
          INFORMATION_SCHEMA.TABLES T
      WHERE
          T.TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
          AND (NULLIF(TRIM(?), '') IS NULL OR FIND_IN_SET(T.TABLE_NAME, ?))
          AND T.TABLE_TYPE = 'BASE TABLE'
      ORDER BY
          T.TABLE_SCHEMA, T.TABLE_NAME;
    parameters:
      - name: table_names
        type: string
        description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
toolsets:
  oceanbase_database_tools:
    - execute_sql
    - list_tables

```

--------------------------------------------------------------------------------
/docs/en/samples/bigquery/mcp_quickstart/_index.md:
--------------------------------------------------------------------------------

```markdown
---
title: "Quickstart (MCP with BigQuery)"
type: docs
weight: 2
description: >
  How to get started running Toolbox with MCP Inspector and BigQuery as the source.
---

## Overview

[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol
that standardizes how applications provide context to LLMs. Check out this page
on how to [connect to Toolbox via MCP](../../../how-to/connect_via_mcp.md).

## Step 1: Set up your BigQuery Dataset and Table

In this section, we will create a BigQuery dataset and a table, then insert some
data that needs to be accessed by our agent.

1. Create a new BigQuery dataset (replace `YOUR_DATASET_NAME` with your desired
   dataset name, e.g., `toolbox_mcp_ds`, and optionally specify a location like
   `US` or `EU`):

    ```bash
    export BQ_DATASET_NAME="YOUR_DATASET_NAME"
    export BQ_LOCATION="US"

    bq --location=$BQ_LOCATION mk $BQ_DATASET_NAME
    ```

    You can also do this through the [Google Cloud
    Console](https://console.cloud.google.com/bigquery).

1. The `hotels` table needs to be defined in your new dataset. First, create a
   file named `create_hotels_table.sql` with the following content:

    ```sql
    CREATE TABLE IF NOT EXISTS `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (
      id            INT64 NOT NULL,
      name          STRING NOT NULL,
      location      STRING NOT NULL,
      price_tier    STRING NOT NULL,
      checkin_date  DATE NOT NULL,
      checkout_date DATE NOT NULL,
      booked        BOOLEAN NOT NULL
    );
    ```

    > **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
    > with your actual project ID and dataset name.

    Then run the command below to execute the sql query:

    ```bash
    bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < create_hotels_table.sql
    ```

1. .  Next, populate the hotels table with some initial data. To do this, create
   a file named `insert_hotels_data.sql` and add the following SQL INSERT
   statement to it.

    ```sql
    INSERT INTO `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (id, name, location, price_tier, checkin_date, checkout_date, booked)
    VALUES
      (1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-20', '2024-04-22', FALSE),
      (2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', FALSE),
      (3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', FALSE),
      (4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-05', '2024-04-24', FALSE),
      (5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-01', '2024-04-23', FALSE),
      (6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', FALSE),
      (7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-02', '2024-04-27', FALSE),
      (8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-09', '2024-04-24', FALSE),
      (9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', FALSE),
      (10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', FALSE);
    ```

    > **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
    > with your actual project ID and dataset name.

    Then run the command below to execute the sql query:

    ```bash
    bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < insert_hotels_data.sql
    ```

## Step 2: Install and configure Toolbox

In this section, we will download Toolbox, configure our tools in a
`tools.yaml`, and then run the Toolbox server.

1. Download the latest version of Toolbox as a binary:

    {{< notice tip >}}
   Select the
   [correct binary](https://github.com/googleapis/genai-toolbox/releases)
   corresponding to your OS and CPU architecture.
    {{< /notice >}}
    <!-- {x-release-please-start-version} -->
    ```bash
    export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
    curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/$OS/toolbox
    ```
    <!-- {x-release-please-end} -->

1. Make the binary executable:

    ```bash
    chmod +x toolbox
    ```

1. Write the following into a `tools.yaml` file. You must replace the
   `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` placeholder in the config with your
   actual BigQuery project and dataset name. The `location` field is optional;
   if not specified, it defaults to 'us'. The table name `hotels` is used
   directly in the statements.

    {{< notice tip >}}
  Authentication with BigQuery is handled via Application Default Credentials
  (ADC). Ensure you have run `gcloud auth application-default login`.
    {{< /notice >}}

    ```yaml
    sources:
      my-bigquery-source:
        kind: bigquery
        project: YOUR_PROJECT_ID
        location: us
    tools:
      search-hotels-by-name:
        kind: bigquery-sql
        source: my-bigquery-source
        description: Search for hotels based on name.
        parameters:
          - name: name
            type: string
            description: The name of the hotel.
        statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%'));
      search-hotels-by-location:
        kind: bigquery-sql
        source: my-bigquery-source
        description: Search for hotels based on location.
        parameters:
          - name: location
            type: string
            description: The location of the hotel.
        statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%'));
      book-hotel:
        kind: bigquery-sql
        source: my-bigquery-source
        description: >-
           Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
        parameters:
          - name: hotel_id
            type: integer
            description: The ID of the hotel to book.
        statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id;
      update-hotel:
        kind: bigquery-sql
        source: my-bigquery-source
        description: >-
          Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not.
        parameters:
          - name: checkin_date
            type: string
            description: The new check-in date of the hotel.
          - name: checkout_date
            type: string
            description: The new check-out date of the hotel.
          - name: hotel_id
            type: integer
            description: The ID of the hotel to update.
        statement: >-
          UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id;
      cancel-hotel:
        kind: bigquery-sql
        source: my-bigquery-source
        description: Cancel a hotel by its ID.
        parameters:
          - name: hotel_id
            type: integer
            description: The ID of the hotel to cancel.
        statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id;
    toolsets:
      my-toolset:
        - search-hotels-by-name
        - search-hotels-by-location
        - book-hotel
        - update-hotel
        - cancel-hotel
    ```

    For more info on tools, check out the
    [Tools](../../../resources/tools/) section.

1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:

    ```bash
    ./toolbox --tools-file "tools.yaml"
    ```

## Step 3: Connect to MCP Inspector

1. Run the MCP Inspector:

    ```bash
    npx @modelcontextprotocol/inspector
    ```

1. Type `y` when it asks to install the inspector package.

1. It should show the following when the MCP Inspector is up and running (please
   take note of `<YOUR_SESSION_TOKEN>`):

    ```bash
    Starting MCP inspector...
    ⚙️ Proxy server listening on localhost:6277
    🔑 Session token: <YOUR_SESSION_TOKEN>
       Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth

    🚀 MCP Inspector is up and running at:
       http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
    ```

1. Open the above link in your browser.

1. For `Transport Type`, select `Streamable HTTP`.

1. For `URL`, type in `http://127.0.0.1:5000/mcp`.

1. For `Configuration` -> `Proxy Session Token`, make sure
   `<YOUR_SESSION_TOKEN>` is present.

1. Click Connect.

    ![inspector](./inspector.png)

1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.

    ![inspector_tools](./inspector_tools.png)

1. Test out your tools here!

```

--------------------------------------------------------------------------------
/internal/tools/sqlite/sqlitesql/sqlitesql_test.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package sqlitesql_test

import (
	"context"
	"database/sql"
	"reflect"
	"testing"

	yaml "github.com/goccy/go-yaml"
	"github.com/google/go-cmp/cmp"
	"github.com/googleapis/genai-toolbox/internal/server"
	"github.com/googleapis/genai-toolbox/internal/testutils"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
	_ "modernc.org/sqlite"
)

func TestParseFromYamlSQLite(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: sqlite-sql
					source: my-sqlite-instance
					description: some description
					statement: |
						SELECT * FROM SQL_STATEMENT;
					authRequired:
						- my-google-auth-service
						- other-auth-service
					parameters:
						- name: country
						  type: string
						  description: some description
						  authServices:
							- name: my-google-auth-service
							  field: user_id
							- name: other-auth-service
							  field: user_id
			`,
			want: server.ToolConfigs{
				"example_tool": sqlitesql.Config{
					Name:         "example_tool",
					Kind:         "sqlite-sql",
					Source:       "my-sqlite-instance",
					Description:  "some description",
					Statement:    "SELECT * FROM SQL_STATEMENT;\n",
					AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
					Parameters: []tools.Parameter{
						tools.NewStringParameterWithAuth("country", "some description",
							[]tools.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"},
								{Name: "other-auth-service", Field: "user_id"}}),
					},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}

}

func TestParseFromYamlWithTemplateSqlite(t *testing.T) {
	ctx, err := testutils.ContextWithNewLogger()
	if err != nil {
		t.Fatalf("unexpected error: %s", err)
	}
	tcs := []struct {
		desc string
		in   string
		want server.ToolConfigs
	}{
		{
			desc: "basic example",
			in: `
			tools:
				example_tool:
					kind: sqlite-sql
					source: my-sqlite-db
					description: some description
					statement: |
						SELECT * FROM SQL_STATEMENT;
					authRequired:
						- my-google-auth-service
						- other-auth-service
					parameters:
						- name: country
						  type: string
						  description: some description
						  authServices:
							- name: my-google-auth-service
							  field: user_id
							- name: other-auth-service
							  field: user_id
					templateParameters:
						- name: tableName
						  type: string
						  description: The table to select hotels from.
						- name: fieldArray
						  type: array
						  description: The columns to return for the query.
						  items: 
								name: column
								type: string
								description: A column name that will be returned from the query.
			`,
			want: server.ToolConfigs{
				"example_tool": sqlitesql.Config{
					Name:         "example_tool",
					Kind:         "sqlite-sql",
					Source:       "my-sqlite-db",
					Description:  "some description",
					Statement:    "SELECT * FROM SQL_STATEMENT;\n",
					AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
					Parameters: []tools.Parameter{
						tools.NewStringParameterWithAuth("country", "some description",
							[]tools.ParamAuthService{{Name: "my-google-auth-service", Field: "user_id"},
								{Name: "other-auth-service", Field: "user_id"}}),
					},
					TemplateParameters: []tools.Parameter{
						tools.NewStringParameter("tableName", "The table to select hotels from."),
						tools.NewArrayParameter("fieldArray", "The columns to return for the query.", tools.NewStringParameter("column", "A column name that will be returned from the query.")),
					},
				},
			},
		},
	}
	for _, tc := range tcs {
		t.Run(tc.desc, func(t *testing.T) {
			got := struct {
				Tools server.ToolConfigs `yaml:"tools"`
			}{}
			// Parse contents
			err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
			if err != nil {
				t.Fatalf("unable to unmarshal: %s", err)
			}
			if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
				t.Fatalf("incorrect parse: diff %v", diff)
			}
		})
	}
}

func setupTestDB(t *testing.T) *sql.DB {
	db, err := sql.Open("sqlite", ":memory:")
	if err != nil {
		t.Fatalf("Failed to open in-memory database: %v", err)
	}

	createTable := `
	CREATE TABLE users (
		id INTEGER PRIMARY KEY,
		name TEXT,
		age INTEGER
	);`
	if _, err := db.Exec(createTable); err != nil {
		t.Fatalf("Failed to create table: %v", err)
	}

	insertData := `
	INSERT INTO users (id, name, age) VALUES
	(1, 'Alice', 30),
	(2, 'Bob', 25);`
	if _, err := db.Exec(insertData); err != nil {
		t.Fatalf("Failed to insert data: %v", err)
	}

	return db
}

func TestTool_Invoke(t *testing.T) {
	type fields struct {
		Name               string
		Kind               string
		AuthRequired       []string
		Parameters         tools.Parameters
		TemplateParameters tools.Parameters
		AllParams          tools.Parameters
		Db                 *sql.DB
		Statement          string
	}
	type args struct {
		ctx         context.Context
		params      tools.ParamValues
		accessToken tools.AccessToken
	}
	tests := []struct {
		name    string
		fields  fields
		args    args
		want    any
		wantErr bool
	}{
		{
			name: "simple select",
			fields: fields{
				Db:        setupTestDB(t),
				Statement: "SELECT * FROM users",
			},
			args: args{
				ctx: context.Background(),
			},
			want: []any{
				map[string]any{"id": int64(1), "name": "Alice", "age": int64(30)},
				map[string]any{"id": int64(2), "name": "Bob", "age": int64(25)},
			},
			wantErr: false,
		},
		{
			name: "select with parameter",
			fields: fields{
				Db:        setupTestDB(t),
				Statement: "SELECT * FROM users WHERE name = ?",
				Parameters: []tools.Parameter{
					tools.NewStringParameter("name", "user name"),
				},
			},
			args: args{
				ctx: context.Background(),
				params: []tools.ParamValue{
					{Name: "name", Value: "Alice"},
				},
			},
			want: []any{
				map[string]any{"id": int64(1), "name": "Alice", "age": int64(30)},
			},
			wantErr: false,
		},
		{
			name: "select with template parameter",
			fields: fields{
				Db:        setupTestDB(t),
				Statement: "SELECT * FROM {{.tableName}}",
				TemplateParameters: []tools.Parameter{
					tools.NewStringParameter("tableName", "table name"),
				},
			},
			args: args{
				ctx: context.Background(),
				params: []tools.ParamValue{
					{Name: "tableName", Value: "users"},
				},
			},
			want: []any{
				map[string]any{"id": int64(1), "name": "Alice", "age": int64(30)},
				map[string]any{"id": int64(2), "name": "Bob", "age": int64(25)},
			},
			wantErr: false,
		},
		{
			name: "invalid sql",
			fields: fields{
				Db:        setupTestDB(t),
				Statement: "SELECT * FROM non_existent_table",
			},
			args: args{
				ctx: context.Background(),
			},
			want:    nil,
			wantErr: true,
		},
	}
	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			tr := sqlitesql.Tool{
				Name:               tt.fields.Name,
				Kind:               tt.fields.Kind,
				AuthRequired:       tt.fields.AuthRequired,
				Parameters:         tt.fields.Parameters,
				TemplateParameters: tt.fields.TemplateParameters,
				AllParams:          tt.fields.AllParams,
				Db:                 tt.fields.Db,
				Statement:          tt.fields.Statement,
			}
			got, err := tr.Invoke(tt.args.ctx, tt.args.params, tt.args.accessToken)
			if (err != nil) != tt.wantErr {
				t.Errorf("Tool.Invoke() error = %v, wantErr %v", err, tt.wantErr)
				return
			}
			if !reflect.DeepEqual(got, tt.want) {
				t.Errorf("Tool.Invoke() = %v, want %v", got, tt.want)
			}
		})
	}
}

```

--------------------------------------------------------------------------------
/internal/tools/mysql/mysqllistactivequeries/mysqllistactivequeries.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package mysqllistactivequeries

import (
	"context"
	"database/sql"
	"fmt"

	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	"github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
	"github.com/googleapis/genai-toolbox/internal/sources/mysql"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlcommon"
	"github.com/googleapis/genai-toolbox/internal/util"
)

const kind string = "mysql-list-active-queries"

const listActiveQueriesStatementMySQL = `
	SELECT
		p.id AS processlist_id,
		substring(IFNULL(p.info, t.trx_query), 1, 100) AS query,
		t.trx_started AS trx_started,
		(UNIX_TIMESTAMP(UTC_TIMESTAMP()) - UNIX_TIMESTAMP(t.trx_started)) AS trx_duration_seconds,
		(UNIX_TIMESTAMP(UTC_TIMESTAMP()) - UNIX_TIMESTAMP(t.trx_wait_started)) AS trx_wait_duration_seconds,
		p.time AS query_time,
		t.trx_state AS trx_state,
		p.state AS process_state,
		IF(p.host IS NULL OR p.host = '', p.user, concat(p.user, '@', SUBSTRING_INDEX(p.host, ':', 1))) AS user,
		t.trx_rows_locked AS trx_rows_locked,
		t.trx_rows_modified AS trx_rows_modified,
		p.db AS db
	FROM
		information_schema.processlist p
		LEFT OUTER JOIN
		information_schema.innodb_trx t
		ON p.id = t.trx_mysql_thread_id
	WHERE
		(? IS NULL OR p.time >= ?)
		AND p.id != CONNECTION_ID()
		AND Command NOT IN ('Binlog Dump', 'Binlog Dump GTID', 'Connect', 'Connect Out', 'Register Slave')
		AND User NOT IN ('system user', 'event_scheduler')
		AND (t.trx_id is NOT NULL OR command != 'Sleep')
	ORDER BY
		t.trx_started
	LIMIT ?;
`

const listActiveQueriesStatementCloudSQLMySQL = `
	SELECT
		p.id AS processlist_id,
		substring(IFNULL(p.info, t.trx_query), 1, 100) AS query,
		t.trx_started AS trx_started,
		(UNIX_TIMESTAMP(UTC_TIMESTAMP()) - UNIX_TIMESTAMP(t.trx_started)) AS trx_duration_seconds,
		(UNIX_TIMESTAMP(UTC_TIMESTAMP()) - UNIX_TIMESTAMP(t.trx_wait_started)) AS trx_wait_duration_seconds,
		p.time AS query_time,
		t.trx_state AS trx_state,
		p.state AS process_state,
		IF(p.host IS NULL OR p.host = '', p.user, concat(p.user, '@', SUBSTRING_INDEX(p.host, ':', 1))) AS user,
		t.trx_rows_locked AS trx_rows_locked,
		t.trx_rows_modified AS trx_rows_modified,
		p.db AS db
	FROM
		information_schema.processlist p
		LEFT OUTER JOIN
		information_schema.innodb_trx t
		ON p.id = t.trx_mysql_thread_id
	WHERE
		(? IS NULL OR p.time >= ?)
		AND p.id != CONNECTION_ID()
		AND SUBSTRING_INDEX(IFNULL(p.host,''), ':', 1) NOT IN ('localhost', '127.0.0.1')
		AND IFNULL(p.host,'') NOT LIKE '::1%'
		AND Command NOT IN ('Binlog Dump', 'Binlog Dump GTID', 'Connect', 'Connect Out', 'Register Slave')
		AND User NOT IN ('system user', 'event_scheduler')
		AND (t.trx_id is NOT NULL OR command != 'sleep')
	ORDER BY
		t.trx_started
	LIMIT ?;
`

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type compatibleSource interface {
	MySQLPool() *sql.DB
}

// validate compatible sources are still compatible
var _ compatibleSource = &mysql.Source{}
var _ compatibleSource = &cloudsqlmysql.Source{}

var compatibleSources = [...]string{mysql.SourceKind, cloudsqlmysql.SourceKind}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Source       string   `yaml:"source" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

// validate interface
var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	// verify source exists
	rawS, ok := srcs[cfg.Source]
	if !ok {
		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
	}

	// verify the source is compatible
	s, ok := rawS.(compatibleSource)
	if !ok {
		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
	}

	allParameters := tools.Parameters{
		tools.NewIntParameterWithDefault("min_duration_secs", 0, "Optional: Only show queries running for at least this long in seconds"),
		tools.NewIntParameterWithDefault("limit", 100, "Optional: The maximum number of rows to return."),
	}
	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters)

	var statement string
	sourceKind := rawS.SourceKind()

	switch sourceKind {
	case mysql.SourceKind:
		statement = listActiveQueriesStatementMySQL
	case cloudsqlmysql.SourceKind:
		statement = listActiveQueriesStatementCloudSQLMySQL
	default:
		return nil, fmt.Errorf("unsupported source kind kind: %q", sourceKind)
	}
	// finish tool setup
	t := Tool{
		Name:         cfg.Name,
		Kind:         kind,
		AuthRequired: cfg.AuthRequired,
		Pool:         s.MySQLPool(),
		allParams:    allParameters,
		manifest:     tools.Manifest{Description: cfg.Description, Parameters: allParameters.Manifest(), AuthRequired: cfg.AuthRequired},
		mcpManifest:  mcpManifest,
		statement:    statement,
	}
	return t, nil
}

// validate interface
var _ tools.Tool = Tool{}

type Tool struct {
	Name         string           `yaml:"name"`
	Kind         string           `yaml:"kind"`
	AuthRequired []string         `yaml:"authRequired"`
	allParams    tools.Parameters `yaml:"parameters"`
	Pool         *sql.DB
	manifest     tools.Manifest
	mcpManifest  tools.McpManifest
	statement    string
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	paramsMap := params.AsMap()

	duration, ok := paramsMap["min_duration_secs"].(int)
	if !ok {
		return nil, fmt.Errorf("invalid 'min_duration_secs' parameter; expected an integer")
	}
	limit, ok := paramsMap["limit"].(int)
	if !ok {
		return nil, fmt.Errorf("invalid 'limit' parameter; expected an integer")
	}

	// Log the query executed for debugging.
	logger, err := util.LoggerFromContext(ctx)
	if err != nil {
		return nil, fmt.Errorf("error getting logger: %s", err)
	}
	logger.DebugContext(ctx, "executing `%s` tool query: %s", kind, t.statement)

	results, err := t.Pool.QueryContext(ctx, t.statement, duration, duration, limit)
	if err != nil {
		return nil, fmt.Errorf("unable to execute query: %w", err)
	}
	defer results.Close()

	cols, err := results.Columns()
	if err != nil {
		return nil, fmt.Errorf("unable to retrieve rows column name: %w", err)
	}

	// create an array of values for each column, which can be re-used to scan each row
	rawValues := make([]any, len(cols))
	values := make([]any, len(cols))
	for i := range rawValues {
		values[i] = &rawValues[i]
	}

	colTypes, err := results.ColumnTypes()
	if err != nil {
		return nil, fmt.Errorf("unable to get column types: %w", err)
	}

	var out []any
	for results.Next() {
		err := results.Scan(values...)
		if err != nil {
			return nil, fmt.Errorf("unable to parse row: %w", err)
		}
		vMap := make(map[string]any)
		for i, name := range cols {
			val := rawValues[i]
			if val == nil {
				vMap[name] = nil
				continue
			}

			vMap[name], err = mysqlcommon.ConvertToType(colTypes[i], val)
			if err != nil {
				return nil, fmt.Errorf("errors encountered when converting values: %w", err)
			}
		}
		out = append(out, vMap)
	}

	if err := results.Err(); err != nil {
		return nil, fmt.Errorf("errors encountered during row iteration: %w", err)
	}

	return out, nil
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.allParams, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}

func (t Tool) RequiresClientAuthorization() bool {
	return false
}

```

--------------------------------------------------------------------------------
/internal/tools/firestore/firestorevalidaterules/firestorevalidaterules.go:
--------------------------------------------------------------------------------

```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package firestorevalidaterules

import (
	"context"
	"fmt"
	"strings"

	yaml "github.com/goccy/go-yaml"
	"github.com/googleapis/genai-toolbox/internal/sources"
	firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
	"github.com/googleapis/genai-toolbox/internal/tools"
	"google.golang.org/api/firebaserules/v1"
)

const kind string = "firestore-validate-rules"

// Parameter keys
const (
	sourceKey = "source"
)

func init() {
	if !tools.Register(kind, newConfig) {
		panic(fmt.Sprintf("tool kind %q already registered", kind))
	}
}

func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
	actual := Config{Name: name}
	if err := decoder.DecodeContext(ctx, &actual); err != nil {
		return nil, err
	}
	return actual, nil
}

type compatibleSource interface {
	FirebaseRulesClient() *firebaserules.Service
	GetProjectId() string
}

// validate compatible sources are still compatible
var _ compatibleSource = &firestoreds.Source{}

var compatibleSources = [...]string{firestoreds.SourceKind}

type Config struct {
	Name         string   `yaml:"name" validate:"required"`
	Kind         string   `yaml:"kind" validate:"required"`
	Source       string   `yaml:"source" validate:"required"`
	Description  string   `yaml:"description" validate:"required"`
	AuthRequired []string `yaml:"authRequired"`
}

// validate interface
var _ tools.ToolConfig = Config{}

func (cfg Config) ToolConfigKind() string {
	return kind
}

func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
	// verify source exists
	rawS, ok := srcs[cfg.Source]
	if !ok {
		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
	}

	// verify the source is compatible
	s, ok := rawS.(compatibleSource)
	if !ok {
		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
	}

	// Create parameters
	parameters := createParameters()
	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)

	// finish tool setup
	t := Tool{
		Name:         cfg.Name,
		Kind:         kind,
		Parameters:   parameters,
		AuthRequired: cfg.AuthRequired,
		RulesClient:  s.FirebaseRulesClient(),
		ProjectId:    s.GetProjectId(),
		manifest:     tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
		mcpManifest:  mcpManifest,
	}
	return t, nil
}

// createParameters creates the parameter definitions for the tool
func createParameters() tools.Parameters {
	sourceParameter := tools.NewStringParameter(
		sourceKey,
		"The Firestore Rules source code to validate",
	)

	return tools.Parameters{sourceParameter}
}

// validate interface
var _ tools.Tool = Tool{}

type Tool struct {
	Name         string           `yaml:"name"`
	Kind         string           `yaml:"kind"`
	AuthRequired []string         `yaml:"authRequired"`
	Parameters   tools.Parameters `yaml:"parameters"`

	RulesClient *firebaserules.Service
	ProjectId   string
	manifest    tools.Manifest
	mcpManifest tools.McpManifest
}

// Issue represents a validation issue in the rules
type Issue struct {
	SourcePosition SourcePosition `json:"sourcePosition"`
	Description    string         `json:"description"`
	Severity       string         `json:"severity"`
}

// SourcePosition represents the location of an issue in the source
type SourcePosition struct {
	FileName      string `json:"fileName,omitempty"`
	Line          int64  `json:"line"`          // 1-based
	Column        int64  `json:"column"`        // 1-based
	CurrentOffset int64  `json:"currentOffset"` // 0-based, inclusive start
	EndOffset     int64  `json:"endOffset"`     // 0-based, exclusive end
}

// ValidationResult represents the result of rules validation
type ValidationResult struct {
	Valid           bool    `json:"valid"`
	IssueCount      int     `json:"issueCount"`
	FormattedIssues string  `json:"formattedIssues,omitempty"`
	RawIssues       []Issue `json:"rawIssues,omitempty"`
}

func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
	mapParams := params.AsMap()

	// Get source parameter
	source, ok := mapParams[sourceKey].(string)
	if !ok || source == "" {
		return nil, fmt.Errorf("invalid or missing '%s' parameter", sourceKey)
	}

	// Create test request
	testRequest := &firebaserules.TestRulesetRequest{
		Source: &firebaserules.Source{
			Files: []*firebaserules.File{
				{
					Name:    "firestore.rules",
					Content: source,
				},
			},
		},
		// We don't need test cases for validation only
		TestSuite: &firebaserules.TestSuite{
			TestCases: []*firebaserules.TestCase{},
		},
	}

	// Call the test API
	projectName := fmt.Sprintf("projects/%s", t.ProjectId)
	response, err := t.RulesClient.Projects.Test(projectName, testRequest).Context(ctx).Do()
	if err != nil {
		return nil, fmt.Errorf("failed to validate rules: %w", err)
	}

	// Process the response
	result := t.processValidationResponse(response, source)

	return result, nil
}

func (t Tool) processValidationResponse(response *firebaserules.TestRulesetResponse, source string) ValidationResult {
	if len(response.Issues) == 0 {
		return ValidationResult{
			Valid:           true,
			IssueCount:      0,
			FormattedIssues: "✓ No errors detected. Rules are valid.",
		}
	}

	// Convert issues to our format
	issues := make([]Issue, len(response.Issues))
	for i, issue := range response.Issues {
		issues[i] = Issue{
			Description: issue.Description,
			Severity:    issue.Severity,
			SourcePosition: SourcePosition{
				FileName:      issue.SourcePosition.FileName,
				Line:          issue.SourcePosition.Line,
				Column:        issue.SourcePosition.Column,
				CurrentOffset: issue.SourcePosition.CurrentOffset,
				EndOffset:     issue.SourcePosition.EndOffset,
			},
		}
	}

	// Format issues
	formattedIssues := t.formatRulesetIssues(issues, source)

	return ValidationResult{
		Valid:           false,
		IssueCount:      len(issues),
		FormattedIssues: formattedIssues,
		RawIssues:       issues,
	}
}

// formatRulesetIssues formats validation issues into a human-readable string with code snippets
func (t Tool) formatRulesetIssues(issues []Issue, rulesSource string) string {
	sourceLines := strings.Split(rulesSource, "\n")
	var formattedOutput []string

	formattedOutput = append(formattedOutput, fmt.Sprintf("Found %d issue(s) in rules source:\n", len(issues)))

	for _, issue := range issues {
		issueString := fmt.Sprintf("%s: %s [Ln %d, Col %d]",
			issue.Severity,
			issue.Description,
			issue.SourcePosition.Line,
			issue.SourcePosition.Column)

		if issue.SourcePosition.Line > 0 {
			lineIndex := int(issue.SourcePosition.Line - 1) // 0-based index
			if lineIndex >= 0 && lineIndex < len(sourceLines) {
				errorLine := sourceLines[lineIndex]
				issueString += fmt.Sprintf("\n```\n%s", errorLine)

				// Add carets if we have column and offset information
				if issue.SourcePosition.Column > 0 &&
					issue.SourcePosition.CurrentOffset >= 0 &&
					issue.SourcePosition.EndOffset > issue.SourcePosition.CurrentOffset {

					startColumn := int(issue.SourcePosition.Column - 1) // 0-based
					errorTokenLength := int(issue.SourcePosition.EndOffset - issue.SourcePosition.CurrentOffset)

					if startColumn >= 0 && errorTokenLength > 0 && startColumn <= len(errorLine) {
						padding := strings.Repeat(" ", startColumn)
						carets := strings.Repeat("^", errorTokenLength)
						issueString += fmt.Sprintf("\n%s%s", padding, carets)
					}
				}
				issueString += "\n```"
			}
		}

		formattedOutput = append(formattedOutput, issueString)
	}

	return strings.Join(formattedOutput, "\n\n")
}

func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
	return tools.ParseParams(t.Parameters, data, claims)
}

func (t Tool) Manifest() tools.Manifest {
	return t.manifest
}

func (t Tool) McpManifest() tools.McpManifest {
	return t.mcpManifest
}

func (t Tool) Authorized(verifiedAuthServices []string) bool {
	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}

func (t Tool) RequiresClientAuthorization() bool {
	return false
}

```
Page 18/36FirstPrevNextLast