This is page 20 of 37. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-connection-databases.md
│ │ │ ├── looker-get-connection-schemas.md
│ │ │ ├── looker-get-connection-table-columns.md
│ │ │ ├── looker-get-connection-tables.md
│ │ │ ├── looker-get-connections.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-tables.md
│ │ │ ├── postgres-list-views.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ ├── serverless-spark-get-batch.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ └── bigquery.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/docs/en/resources/authServices/_index.md:
--------------------------------------------------------------------------------
```markdown
---
title: "AuthServices"
type: docs
weight: 1
description: >
AuthServices represent services that handle authentication and authorization.
---
AuthServices represent services that handle authentication and authorization. It
can primarily be used by [Tools](../tools/) in two different ways:
- [**Authorized Invocation**][auth-invoke] is when a tool
is validated by the auth service before the call can be invoked. Toolbox
will reject any calls that fail to validate or have an invalid token.
- [**Authenticated Parameters**][auth-params] replace the value of a parameter
with a field from an [OIDC][openid-claims] claim. Toolbox will automatically
resolve the ID token provided by the client and replace the parameter in the
tool call.
[openid-claims]: https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims
[auth-invoke]: ../tools/#authorized-invocations
[auth-params]: ../tools/#authenticated-parameters
## Example
The following configurations are placed at the top level of a `tools.yaml` file.
{{< notice tip >}}
If you are accessing Toolbox with multiple applications, each
application should register their own Client ID even if they use the same
"kind" of auth provider.
{{< /notice >}}
```yaml
authServices:
my_auth_app_1:
kind: google
clientId: ${YOUR_CLIENT_ID_1}
my_auth_app_2:
kind: google
clientId: ${YOUR_CLIENT_ID_2}
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
After you've configured an `authService` you'll, need to reference it in the
configuration for each tool that should use it:
- **Authorized Invocations** for authorizing a tool call, [use the
`authRequired` field in a tool config][auth-invoke]
- **Authenticated Parameters** for using the value from a OIDC claim, [use the
`authServices` field in a parameter config][auth-params]
## Specifying ID Tokens from Clients
After [configuring](#example) your `authServices` section, use a Toolbox SDK to
add your ID tokens to the header of a Tool invocation request. When specifying a
token you will provide a function (that returns an id). This function is called
when the tool is invoked. This allows you to cache and refresh the ID token as
needed.
The primary method for providing these getters is via the `auth_token_getters`
parameter when loading tools, or the `add_auth_token_getter`() /
`add_auth_token_getters()` methods on a loaded tool object.
### Specifying tokens during load
#### Python
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
{{< tabpane persist=header >}}
{{< tab header="Core" lang="Python" >}}
import asyncio
from toolbox_core import ToolboxClient
async def get_auth_token():
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
# This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" # Placeholder
async def main():
async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox:
auth_tool = await toolbox.load_tool(
"get_sensitive_data",
auth_token_getters={"my_auth_app_1": get_auth_token}
)
result = await auth_tool(param="value")
print(result)
if **name** == "**main**":
asyncio.run(main())
{{< /tab >}}
{{< tab header="LangChain" lang="Python" >}}
import asyncio
from toolbox_langchain import ToolboxClient
async def get_auth_token():
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
# This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" # Placeholder
async def main():
toolbox = ToolboxClient("<http://127.0.0.1:5000>")
auth_tool = await toolbox.aload_tool(
"get_sensitive_data",
auth_token_getters={"my_auth_app_1": get_auth_token}
)
result = await auth_tool.ainvoke({"param": "value"})
print(result)
if **name** == "**main**":
asyncio.run(main())
{{< /tab >}}
{{< tab header="Llamaindex" lang="Python" >}}
import asyncio
from toolbox_llamaindex import ToolboxClient
async def get_auth_token():
# ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
# This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" # Placeholder
async def main():
toolbox = ToolboxClient("<http://127.0.0.1:5000>")
auth_tool = await toolbox.aload_tool(
"get_sensitive_data",
auth_token_getters={"my_auth_app_1": get_auth_token}
)
# result = await auth_tool.acall(param="value")
# print(result.content)
if **name** == "**main**":
asyncio.run(main()){{< /tab >}}
{{< /tabpane >}}
#### Javascript/Typescript
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
```javascript
import { ToolboxClient } from '@toolbox-sdk/core';
async function getAuthToken() {
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
// This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" // Placeholder
}
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
const authTool = await client.loadTool("my-tool", {"my_auth_app_1": getAuthToken});
const result = await authTool({param:"value"});
console.log(result);
print(result)
```
#### Go
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
```go
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
import "fmt"
func getAuthToken() string {
// ... Logic to retrieve ID token (e.g., from local storage, OAuth flow)
// This example just returns a placeholder. Replace with your actual token retrieval.
return "YOUR_ID_TOKEN" // Placeholder
}
func main() {
URL := 'http://127.0.0.1:5000'
client, err := core.NewToolboxClient(URL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
dynamicTokenSource := core.NewCustomTokenSource(getAuthToken)
authTool, err := client.LoadTool(
"my-tool",
ctx,
core.WithAuthTokenSource("my_auth_app_1", dynamicTokenSource))
if err != nil {
log.Fatalf("Failed to load tool: %v", err)
}
inputs := map[string]any{"param": "value"}
result, err := authTool.Invoke(ctx, inputs)
if err != nil {
log.Fatalf("Failed to invoke tool: %v", err)
}
fmt.Println(result)
}
```
### Specifying tokens for existing tools
#### Python
Use the [Python
SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
{{< tabpane persist=header >}}
{{< tab header="Core" lang="Python" >}}
tools = await toolbox.load_toolset()
# for a single token
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
# OR, if multiple tokens are needed
authorized_tool = tools[0].add_auth_token_getters({
"my_auth1": get_auth1_token,
"my_auth2": get_auth2_token,
})
{{< /tab >}}
{{< tab header="LangChain" lang="Python" >}}
tools = toolbox.load_toolset()
# for a single token
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
# OR, if multiple tokens are needed
authorized_tool = tools[0].add_auth_token_getters({
"my_auth1": get_auth1_token,
"my_auth2": get_auth2_token,
})
{{< /tab >}}
{{< tab header="Llamaindex" lang="Python" >}}
tools = toolbox.load_toolset()
# for a single token
authorized_tool = tools[0].add_auth_token_getter("my_auth", get_auth_token)
# OR, if multiple tokens are needed
authorized_tool = tools[0].add_auth_token_getters({
"my_auth1": get_auth1_token,
"my_auth2": get_auth2_token,
})
{{< /tab >}}
{{< /tabpane >}}
#### Javascript/Typescript
Use the [JS SDK](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main).
```javascript
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
let tool = await client.loadTool("my-tool")
// for a single token
const authorizedTool = tool.addAuthTokenGetter("my_auth", get_auth_token)
// OR, if multiple tokens are needed
const multiAuthTool = tool.addAuthTokenGetters({
"my_auth_1": getAuthToken1,
"my_auth_2": getAuthToken2,
})
```
#### Go
Use the [Go SDK](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main).
```go
import "github.com/googleapis/mcp-toolbox-sdk-go/core"
func main() {
URL := 'http://127.0.0.1:5000'
client, err := core.NewToolboxClient(URL)
if err != nil {
log.Fatalf("Failed to create Toolbox client: %v", err)
}
tool, err := client.LoadTool("my-tool", ctx))
if err != nil {
log.Fatalf("Failed to load tool: %v", err)
}
dynamicTokenSource1 := core.NewCustomTokenSource(getAuthToken1)
dynamicTokenSource2 := core.NewCustomTokenSource(getAuthToken1)
// For a single token
authTool, err := tool.ToolFrom(
core.WithAuthTokenSource("my-auth", dynamicTokenSource),
)
// OR, if multiple tokens are needed
authTool, err := tool.ToolFrom(
core.WithAuthTokenSource("my-auth_1", dynamicTokenSource1),
core.WithAuthTokenSource("my-auth_2", dynamicTokenSource2),
)
}
```
## Kinds of Auth Services
```
--------------------------------------------------------------------------------
/docs/en/how-to/connect-ide/neo4j_mcp.md:
--------------------------------------------------------------------------------
```markdown
---
title: Neo4j using MCP
type: docs
weight: 2
description: "Connect your IDE to Neo4j using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to
expose your developer assistant tools to a Neo4j instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a Neo4j
instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
v0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcp" : {
"servers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"neo4j": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","neo4j","--stdio"],
"env": {
"NEO4J_URI": "",
"NEO4J_DATABASE": "",
"NEO4J_USERNAME": "",
"NEO4J_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant
to get the graph schema or execute Cypher statements.
The following tools are available to the LLM:
1. **get_schema**: extracts the complete database schema, including details
about node labels, relationships, properties, constraints, and indexes.
1. **execute_cypher**: executes any arbitrary Cypher statement.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}
```
--------------------------------------------------------------------------------
/docs/en/how-to/connect-ide/mysql_mcp.md:
--------------------------------------------------------------------------------
```markdown
---
title: MySQL using MCP
type: docs
weight: 2
description: "Connect your IDE to MySQL using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to
expose your developer assistant tools to a MySQL instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a MySQL instance.](https://dev.mysql.com/downloads/installer/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt", "mysql", "--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"servers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"mysql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mysql","--stdio"],
"env": {
"MYSQL_HOST": "",
"MYSQL_PORT": "",
"MYSQL_DATABASE": "",
"MYSQL_USER": "",
"MYSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant
to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}
```
--------------------------------------------------------------------------------
/docs/en/how-to/connect-ide/mssql_mcp.md:
--------------------------------------------------------------------------------
```markdown
---
title: SQL Server using MCP
type: docs
weight: 2
description: "Connect your IDE to SQL Server using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like SQL Server. This guide covers how to use [MCP Toolbox for
Databases][toolbox] to expose your developer assistant tools to a SQL Server
instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Set up the database
1. [Create or select a SQL Server
instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"servers": {
"mssql": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
"mcpServers": {
"sqlserver": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","mssql","--stdio"],
"env": {
"MSSQL_HOST": "",
"MSSQL_PORT": "",
"MSSQL_DATABASE": "",
"MSSQL_USER": "",
"MSSQL_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to SQL Server using MCP. Try asking your AI
assistant to list tables, create a table, or define and execute other SQL
statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}
```
--------------------------------------------------------------------------------
/tests/alloydbpg/alloydb_pg_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package alloydbpg
import (
"context"
"fmt"
"net"
"os"
"regexp"
"strings"
"testing"
"time"
"cloud.google.com/go/alloydbconn"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/tests"
"github.com/jackc/pgx/v5/pgxpool"
)
var (
AlloyDBPostgresSourceKind = "alloydb-postgres"
AlloyDBPostgresToolKind = "postgres-sql"
AlloyDBPostgresProject = os.Getenv("ALLOYDB_POSTGRES_PROJECT")
AlloyDBPostgresRegion = os.Getenv("ALLOYDB_POSTGRES_REGION")
AlloyDBPostgresCluster = os.Getenv("ALLOYDB_POSTGRES_CLUSTER")
AlloyDBPostgresInstance = os.Getenv("ALLOYDB_POSTGRES_INSTANCE")
AlloyDBPostgresDatabase = os.Getenv("ALLOYDB_POSTGRES_DATABASE")
AlloyDBPostgresUser = os.Getenv("ALLOYDB_POSTGRES_USER")
AlloyDBPostgresPass = os.Getenv("ALLOYDB_POSTGRES_PASS")
)
func getAlloyDBPgVars(t *testing.T) map[string]any {
switch "" {
case AlloyDBPostgresProject:
t.Fatal("'ALLOYDB_POSTGRES_PROJECT' not set")
case AlloyDBPostgresRegion:
t.Fatal("'ALLOYDB_POSTGRES_REGION' not set")
case AlloyDBPostgresCluster:
t.Fatal("'ALLOYDB_POSTGRES_CLUSTER' not set")
case AlloyDBPostgresInstance:
t.Fatal("'ALLOYDB_POSTGRES_INSTANCE' not set")
case AlloyDBPostgresDatabase:
t.Fatal("'ALLOYDB_POSTGRES_DATABASE' not set")
case AlloyDBPostgresUser:
t.Fatal("'ALLOYDB_POSTGRES_USER' not set")
case AlloyDBPostgresPass:
t.Fatal("'ALLOYDB_POSTGRES_PASS' not set")
}
return map[string]any{
"kind": AlloyDBPostgresSourceKind,
"project": AlloyDBPostgresProject,
"cluster": AlloyDBPostgresCluster,
"instance": AlloyDBPostgresInstance,
"region": AlloyDBPostgresRegion,
"database": AlloyDBPostgresDatabase,
"user": AlloyDBPostgresUser,
"password": AlloyDBPostgresPass,
}
}
// Copied over from alloydb_pg.go
func getAlloyDBDialOpts(ipType string) ([]alloydbconn.DialOption, error) {
switch strings.ToLower(ipType) {
case "private":
return []alloydbconn.DialOption{alloydbconn.WithPrivateIP()}, nil
case "public":
return []alloydbconn.DialOption{alloydbconn.WithPublicIP()}, nil
default:
return nil, fmt.Errorf("invalid ipType %s", ipType)
}
}
// Copied over from alloydb_pg.go
func initAlloyDBPgConnectionPool(project, region, cluster, instance, ipType, user, pass, dbname string) (*pgxpool.Pool, error) {
// Configure the driver to connect to the database
dsn := fmt.Sprintf("user=%s password=%s dbname=%s sslmode=disable", user, pass, dbname)
config, err := pgxpool.ParseConfig(dsn)
if err != nil {
return nil, fmt.Errorf("unable to parse connection uri: %w", err)
}
// Create a new dialer with options
dialOpts, err := getAlloyDBDialOpts(ipType)
if err != nil {
return nil, err
}
d, err := alloydbconn.NewDialer(context.Background(), alloydbconn.WithDefaultDialOptions(dialOpts...))
if err != nil {
return nil, fmt.Errorf("unable to parse connection uri: %w", err)
}
// Tell the driver to use the AlloyDB Go Connector to create connections
i := fmt.Sprintf("projects/%s/locations/%s/clusters/%s/instances/%s", project, region, cluster, instance)
config.ConnConfig.DialFunc = func(ctx context.Context, _ string, instance string) (net.Conn, error) {
return d.Dial(ctx, i)
}
// Interact with the driver directly as you normally would
pool, err := pgxpool.NewWithConfig(context.Background(), config)
if err != nil {
return nil, err
}
return pool, nil
}
func TestAlloyDBPgToolEndpoints(t *testing.T) {
sourceConfig := getAlloyDBPgVars(t)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
var args []string
pool, err := initAlloyDBPgConnectionPool(AlloyDBPostgresProject, AlloyDBPostgresRegion, AlloyDBPostgresCluster, AlloyDBPostgresInstance, "public", AlloyDBPostgresUser, AlloyDBPostgresPass, AlloyDBPostgresDatabase)
if err != nil {
t.Fatalf("unable to create AlloyDB connection pool: %s", err)
}
// create table name with UUID
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
// set up data for param tool
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
defer teardownTable1(t)
// set up data for auth tool
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
defer teardownTable2(t)
// Write config into a file and pass it to command
toolsFile := tests.GetToolsConfig(sourceConfig, AlloyDBPostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, AlloyDBPostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
// Get configs for tests
select1Want, failInvocationWant, createTableStatement, mcpSelect1Want := tests.GetPostgresWants()
// Run tests
tests.RunToolGetTest(t)
tests.RunToolInvokeTest(t, select1Want)
tests.RunMCPToolCallMethod(t, failInvocationWant, mcpSelect1Want)
tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want)
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
}
// Test connection with different IP type
func TestAlloyDBPgIpConnection(t *testing.T) {
sourceConfig := getAlloyDBPgVars(t)
tcs := []struct {
name string
ipType string
}{
{
name: "public ip",
ipType: "public",
},
{
name: "private ip",
ipType: "private",
},
}
for _, tc := range tcs {
t.Run(tc.name, func(t *testing.T) {
sourceConfig["ipType"] = tc.ipType
err := tests.RunSourceConnectionTest(t, sourceConfig, AlloyDBPostgresToolKind)
if err != nil {
t.Fatalf("Connection test failure: %s", err)
}
})
}
}
// Test IAM connection
func TestAlloyDBPgIAMConnection(t *testing.T) {
getAlloyDBPgVars(t)
// service account email used for IAM should trim the suffix
serviceAccountEmail := strings.TrimSuffix(tests.ServiceAccountEmail, ".gserviceaccount.com")
noPassSourceConfig := map[string]any{
"kind": AlloyDBPostgresSourceKind,
"project": AlloyDBPostgresProject,
"cluster": AlloyDBPostgresCluster,
"instance": AlloyDBPostgresInstance,
"region": AlloyDBPostgresRegion,
"database": AlloyDBPostgresDatabase,
"user": serviceAccountEmail,
}
noUserSourceConfig := map[string]any{
"kind": AlloyDBPostgresSourceKind,
"project": AlloyDBPostgresProject,
"cluster": AlloyDBPostgresCluster,
"instance": AlloyDBPostgresInstance,
"region": AlloyDBPostgresRegion,
"database": AlloyDBPostgresDatabase,
"password": "random",
}
noUserNoPassSourceConfig := map[string]any{
"kind": AlloyDBPostgresSourceKind,
"project": AlloyDBPostgresProject,
"cluster": AlloyDBPostgresCluster,
"instance": AlloyDBPostgresInstance,
"region": AlloyDBPostgresRegion,
"database": AlloyDBPostgresDatabase,
}
tcs := []struct {
name string
sourceConfig map[string]any
isErr bool
}{
{
name: "no user no pass",
sourceConfig: noUserNoPassSourceConfig,
isErr: false,
},
{
name: "no password",
sourceConfig: noPassSourceConfig,
isErr: false,
},
{
name: "no user",
sourceConfig: noUserSourceConfig,
isErr: true,
},
}
for _, tc := range tcs {
t.Run(tc.name, func(t *testing.T) {
err := tests.RunSourceConnectionTest(t, tc.sourceConfig, AlloyDBPostgresToolKind)
if err != nil {
if tc.isErr {
return
}
t.Fatalf("Connection test failure: %s", err)
}
if tc.isErr {
t.Fatalf("Expected error but test passed.")
}
})
}
}
```
--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerysearchcatalog/bigquerysearchcatalog.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bigquerysearchcatalog
import (
"context"
"fmt"
"strings"
dataplexapi "cloud.google.com/go/dataplex/apiv1"
dataplexpb "cloud.google.com/go/dataplex/apiv1/dataplexpb"
"github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
"github.com/googleapis/genai-toolbox/internal/tools"
"google.golang.org/api/iterator"
)
const kind string = "bigquery-search-catalog"
func init() {
if !tools.Register(kind, newConfig) {
panic(fmt.Sprintf("tool kind %q already registered", kind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
type compatibleSource interface {
MakeDataplexCatalogClient() func() (*dataplexapi.CatalogClient, bigqueryds.DataplexClientCreator, error)
BigQueryProject() string
UseClientAuthorization() bool
}
// validate compatible sources are still compatible
var _ compatibleSource = &bigqueryds.Source{}
var compatibleSources = [...]string{bigqueryds.SourceKind}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
Source string `yaml:"source" validate:"required"`
Description string `yaml:"description"`
AuthRequired []string `yaml:"authRequired"`
}
// validate interface
var _ tools.ToolConfig = Config{}
func (cfg Config) ToolConfigKind() string {
return kind
}
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
// Initialize the search configuration with the provided sources
rawS, ok := srcs[cfg.Source]
if !ok {
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
}
// verify the source is compatible
s, ok := rawS.(compatibleSource)
if !ok {
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
// Get the Dataplex client using the method from the source
makeCatalogClient := s.MakeDataplexCatalogClient()
prompt := tools.NewStringParameter("prompt", "Prompt representing search intention. Do not rewrite the prompt.")
datasetIds := tools.NewArrayParameterWithDefault("datasetIds", []any{}, "Array of dataset IDs.", tools.NewStringParameter("datasetId", "The IDs of the bigquery dataset."))
projectIds := tools.NewArrayParameterWithDefault("projectIds", []any{}, "Array of project IDs.", tools.NewStringParameter("projectId", "The IDs of the bigquery project."))
types := tools.NewArrayParameterWithDefault("types", []any{}, "Array of data types to filter by.", tools.NewStringParameter("type", "The type of the data. Accepted values are: CONNECTION, POLICY, DATASET, MODEL, ROUTINE, TABLE, VIEW."))
pageSize := tools.NewIntParameterWithDefault("pageSize", 5, "Number of results in the search page.")
parameters := tools.Parameters{prompt, datasetIds, projectIds, types, pageSize}
description := "Use this tool to find tables, views, models, routines or connections."
if cfg.Description != "" {
description = cfg.Description
}
mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, parameters)
t := Tool{
Name: cfg.Name,
Kind: kind,
Parameters: parameters,
AuthRequired: cfg.AuthRequired,
UseClientOAuth: s.UseClientAuthorization(),
MakeCatalogClient: makeCatalogClient,
ProjectID: s.BigQueryProject(),
manifest: tools.Manifest{
Description: cfg.Description,
Parameters: parameters.Manifest(),
AuthRequired: cfg.AuthRequired,
},
mcpManifest: mcpManifest,
}
return t, nil
}
type Tool struct {
Name string
Kind string
Parameters tools.Parameters
AuthRequired []string
UseClientOAuth bool
MakeCatalogClient func() (*dataplexapi.CatalogClient, bigqueryds.DataplexClientCreator, error)
ProjectID string
manifest tools.Manifest
mcpManifest tools.McpManifest
}
func (t Tool) Authorized(verifiedAuthServices []string) bool {
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}
func (t Tool) RequiresClientAuthorization() bool {
return t.UseClientOAuth
}
func constructSearchQueryHelper(predicate string, operator string, items []string) string {
if len(items) == 0 {
return ""
}
if len(items) == 1 {
return predicate + operator + items[0]
}
var builder strings.Builder
builder.WriteString("(")
for i, item := range items {
if i > 0 {
builder.WriteString(" OR ")
}
builder.WriteString(predicate)
builder.WriteString(operator)
builder.WriteString(item)
}
builder.WriteString(")")
return builder.String()
}
func constructSearchQuery(projectIds []string, datasetIds []string, types []string) string {
queryParts := []string{}
if clause := constructSearchQueryHelper("projectid", "=", projectIds); clause != "" {
queryParts = append(queryParts, clause)
}
if clause := constructSearchQueryHelper("parent", "=", datasetIds); clause != "" {
queryParts = append(queryParts, clause)
}
if clause := constructSearchQueryHelper("type", "=", types); clause != "" {
queryParts = append(queryParts, clause)
}
queryParts = append(queryParts, "system=bigquery")
return strings.Join(queryParts, " AND ")
}
type Response struct {
DisplayName string
Description string
Type string
Resource string
DataplexEntry string
}
var typeMap = map[string]string{
"bigquery-connection": "CONNECTION",
"bigquery-data-policy": "POLICY",
"bigquery-dataset": "DATASET",
"bigquery-model": "MODEL",
"bigquery-routine": "ROUTINE",
"bigquery-table": "TABLE",
"bigquery-view": "VIEW",
}
func ExtractType(resourceString string) string {
lastIndex := strings.LastIndex(resourceString, "/")
if lastIndex == -1 {
// No "/" found, return the original string
return resourceString
}
return typeMap[resourceString[lastIndex+1:]]
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
paramsMap := params.AsMap()
pageSize := int32(paramsMap["pageSize"].(int))
prompt, _ := paramsMap["prompt"].(string)
projectIdSlice, err := tools.ConvertAnySliceToTyped(paramsMap["projectIds"].([]any), "string")
if err != nil {
return nil, fmt.Errorf("can't convert projectIds to array of strings: %s", err)
}
projectIds := projectIdSlice.([]string)
datasetIdSlice, err := tools.ConvertAnySliceToTyped(paramsMap["datasetIds"].([]any), "string")
if err != nil {
return nil, fmt.Errorf("can't convert datasetIds to array of strings: %s", err)
}
datasetIds := datasetIdSlice.([]string)
typesSlice, err := tools.ConvertAnySliceToTyped(paramsMap["types"].([]any), "string")
if err != nil {
return nil, fmt.Errorf("can't convert types to array of strings: %s", err)
}
types := typesSlice.([]string)
req := &dataplexpb.SearchEntriesRequest{
Query: fmt.Sprintf("%s %s", prompt, constructSearchQuery(projectIds, datasetIds, types)),
Name: fmt.Sprintf("projects/%s/locations/global", t.ProjectID),
PageSize: pageSize,
SemanticSearch: true,
}
catalogClient, dataplexClientCreator, _ := t.MakeCatalogClient()
if t.UseClientOAuth {
tokenStr, err := accessToken.ParseBearerToken()
if err != nil {
return nil, fmt.Errorf("error parsing access token: %w", err)
}
catalogClient, err = dataplexClientCreator(tokenStr)
if err != nil {
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
}
}
it := catalogClient.SearchEntries(ctx, req)
if it == nil {
return nil, fmt.Errorf("failed to create search entries iterator for project %q", t.ProjectID)
}
var results []Response
for {
entry, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
break
}
entrySource := entry.DataplexEntry.GetEntrySource()
resp := Response{
DisplayName: entrySource.GetDisplayName(),
Description: entrySource.GetDescription(),
Type: ExtractType(entry.DataplexEntry.GetEntryType()),
Resource: entrySource.GetResource(),
DataplexEntry: entry.DataplexEntry.GetName(),
}
results = append(results, resp)
}
return results, nil
}
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
// Parse parameters from the provided data
return tools.ParseParams(t.Parameters, data, claims)
}
func (t Tool) Manifest() tools.Manifest {
// Returns the tool manifest
return t.manifest
}
func (t Tool) McpManifest() tools.McpManifest {
// Returns the tool MCP manifest
return t.mcpManifest
}
```
--------------------------------------------------------------------------------
/docs/en/resources/tools/http/http.md:
--------------------------------------------------------------------------------
```markdown
---
title: "http"
type: docs
weight: 1
description: >
A "http" tool sends out an HTTP request to an HTTP endpoint.
aliases:
- /resources/tools/http
---
## About
The `http` tool allows you to make HTTP requests to APIs to retrieve data.
An HTTP request is the method by which a client communicates with a server to
retrieve or manipulate resources.
Toolbox allows you to configure the request URL, method, headers, query
parameters, and the request body for an HTTP Tool.
### URL
An HTTP request URL identifies the target the client wants to access.
Toolbox composes the request URL from 3 places:
1. The HTTP Source's `baseUrl`.
2. The HTTP Tool's `path` field.
3. The HTTP Tool's `pathParams` for dynamic path composed during Tool
invocation.
For example, the following config allows you to reach different paths of the
same server using multiple Tools:
```yaml
sources:
my-http-source:
kind: http
baseUrl: https://api.example.com
tools:
my-post-tool:
kind: http
source: my-http-source
method: POST
path: /update
description: Tool to update information to the example API
my-get-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: Tool to search information from the example API
my-dynamic-path-tool:
kind: http
source: my-http-source
method: GET
path: /{{.myPathParam}}/search
description: Tool to reach endpoint based on the input to `myPathParam`
pathParams:
- name: myPathParam
type: string
description: The dynamic path parameter
```
### Headers
An HTTP request header is a key-value pair sent by a client to a server,
providing additional information about the request, such as the client's
preferences, the request body content type, and other metadata.
Headers specified by the HTTP Tool are combined with its HTTP Source headers for
the resulting HTTP request, and override the Source headers in case of conflict.
The HTTP Tool allows you to specify headers in two different ways:
- Static headers can be specified using the `headers` field, and will be the
same for every invocation:
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: Tool to search data from API
headers:
Authorization: API_KEY
Content-Type: application/json
```
- Dynamic headers can be specified as parameters in the `headerParams` field.
The `name` of the `headerParams` will be used as the header key, and the value
is determined by the LLM input upon Tool invocation:
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: some description
headerParams:
- name: Content-Type # Example LLM input: "application/json"
description: request content type
type: string
```
### Query parameters
Query parameters are key-value pairs appended to a URL after a question mark (?)
to provide additional information to the server for processing the request, like
filtering or sorting data.
- Static request query parameters should be specified in the `path` as part of
the URL itself:
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search?language=en&id=1
description: Tool to search for item with ID 1 in English
```
- Dynamic request query parameters should be specified as parameters in the
`queryParams` section:
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: Tool to search for item with ID
queryParams:
- name: id
description: item ID
type: integer
```
### Request body
The request body payload is a string that supports parameter replacement
following [Go template][go-template-doc]'s annotations.
The parameter names in the `requestBody` should be preceded by "." and enclosed
by double curly brackets "{{}}". The values will be populated into the request
body payload upon Tool invocation.
Example:
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: Tool to search for person with name and age
requestBody: |
{
"age": {{.age}},
"name": "{{.name}}"
}
bodyParams:
- name: age
description: age number
type: integer
- name: name
description: name string
type: string
```
#### Formatting Parameters
Some complex parameters (such as arrays) may require additional formatting to
match the expected output. For convenience, you can specify one of the following
pre-defined functions before the parameter name to format it:
##### JSON
The `json` keyword converts a parameter into a JSON format.
{{< notice note >}}
Using JSON may add quotes to the variable name for certain types (such as
strings).
{{< /notice >}}
Example:
```yaml
requestBody: |
{
"age": {{json .age}},
"name": {{json .name}},
"nickname": "{{json .nickname}}",
"nameArray": {{json .nameArray}}
}
```
will send the following output:
```yaml
{
"age": 18,
"name": "Katherine",
"nickname": ""Kat"", # Duplicate quotes
"nameArray": ["A", "B", "C"]
}
```
## Example
```yaml
my-http-tool:
kind: http
source: my-http-source
method: GET
path: /search
description: some description
authRequired:
- my-google-auth-service
- other-auth-service
queryParams:
- name: country
description: some description
type: string
requestBody: |
{
"age": {{.age}},
"city": "{{.city}}"
}
bodyParams:
- name: age
description: age number
type: integer
- name: city
description: city string
type: string
headers:
Authorization: API_KEY
Content-Type: application/json
headerParams:
- name: Language
description: language string
type: string
```
## Reference
| **field** | **type** | **required** | **description** |
|--------------|:------------------------------------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "http". |
| source | string | true | Name of the source the HTTP request should be sent to. |
| description | string | true | Description of the tool that is passed to the LLM. |
| path | string | true | The path of the HTTP request. You can include static query parameters in the path string. |
| method | string | true | The HTTP method to use (e.g., GET, POST, PUT, DELETE). |
| headers | map[string]string | false | A map of headers to include in the HTTP request (overrides source headers). |
| requestBody | string | false | The request body payload. Use [go template][go-template-doc] with the parameter name as the placeholder (e.g., `{{.id}}` will be replaced with the value of the parameter that has name `id` in the `bodyParams` section). |
| queryParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the query string. |
| bodyParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted into the request body payload. |
| headerParams | [parameters](../#specifying-parameters) | false | List of [parameters](../#specifying-parameters) that will be inserted as the request headers. |
[go-template-doc]: <https://pkg.go.dev/text/template#pkg-overview>
```
--------------------------------------------------------------------------------
/docs/en/how-to/connect-ide/postgres_mcp.md:
--------------------------------------------------------------------------------
```markdown
---
title: "PostgreSQL using MCP"
type: docs
weight: 2
description: >
Connect your IDE to PostgreSQL using Toolbox.
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Postgres. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a Postgres instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
{{< notice tip >}}
This guide can be used with [AlloyDB
Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
{{< /notice >}}
## Set up the database
1. Create or select a PostgreSQL instance.
* [Install PostgreSQL locally](https://www.postgresql.org/download/)
* [Install AlloyDB Omni](https://cloud.google.com/alloydb/omni/current/docs/quickstart)
1. Create or reuse [a database
user](https://cloud.google.com/alloydb/omni/current/docs/database-users/manage-users)
and have the username and password ready.
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.6.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.18.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"servers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
```json
{
"mcpServers": {
"postgres": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","postgres","--stdio"],
"env": {
"POSTGRES_HOST": "",
"POSTGRES_PORT": "",
"POSTGRES_DATABASE": "",
"POSTGRES_USER": "",
"POSTGRES_PASSWORD": ""
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Postgres using MCP. Try asking your AI
assistant to list tables, create a table, or define and execute other SQL
statements.
The following tools are available to the LLM:
1. **list_tables**: lists tables and descriptions
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}
```
--------------------------------------------------------------------------------
/tests/sqlite/sqlite_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sqlite
import (
"context"
"database/sql"
"fmt"
"io"
"net/http"
"os"
"regexp"
"strings"
"testing"
"time"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/tests"
)
var (
SQLiteSourceKind = "sqlite"
SQLiteToolKind = "sqlite-sql"
SQLiteDatabase = os.Getenv("SQLITE_DATABASE")
)
func getSQLiteVars(t *testing.T) map[string]any {
return map[string]any{
"kind": SQLiteSourceKind,
"database": SQLiteDatabase,
}
}
func initSQLiteDb(t *testing.T, sqliteDb string) (*sql.DB, func(t *testing.T), string, error) {
if sqliteDb == "" {
// Create a temporary database file
tmpFile, err := os.CreateTemp("", "test-*.db")
if err != nil {
return nil, nil, "", fmt.Errorf("failed to create temp file: %v", err)
}
sqliteDb = tmpFile.Name()
}
// Open database connection
db, err := sql.Open("sqlite", sqliteDb)
if err != nil {
return nil, nil, "", fmt.Errorf("failed to open database: %v", err)
}
cleanup := func(t *testing.T) {
if err := os.Remove(sqliteDb); err != nil {
t.Errorf("Failed to remove test database: %s", err)
}
}
return db, cleanup, sqliteDb, nil
}
// setupSQLiteTestDB creates a temporary SQLite database for testing
func setupSQLiteTestDB(t *testing.T, ctx context.Context, db *sql.DB, createStatement string, insertStatement string, tableName string, params []any) {
// Create test table
_, err := db.ExecContext(ctx, createStatement)
if err != nil {
t.Fatalf("unable to connect to create test table %s: %s", tableName, err)
}
_, err = db.ExecContext(ctx, insertStatement, params...)
if err != nil {
t.Fatalf("unable to insert test data: %s", err)
}
}
func getSQLiteParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (id INTEGER PRIMARY KEY, name TEXT);", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name) VALUES (?), (?), (?), (?);", tableName)
toolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ? OR name = ?;", tableName)
idToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ?;", tableName)
nameToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE name = ?;", tableName)
arrayToolStatement := fmt.Sprintf("SELECT * FROM %s WHERE id = ANY({{.idArray}}) AND name = ANY({{.nameArray}});", tableName)
params := []any{"Alice", "Jane", "Sid", nil}
return createStatement, insertStatement, toolStatement, idToolStatement, nameToolStatement, arrayToolStatement, params
}
func getSQLiteAuthToolInfo(tableName string) (string, string, string, []any) {
createStatement := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (id INTEGER PRIMARY KEY, name TEXT NOT NULL, email TEXT)", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (name, email) VALUES (?, ?), (?,?) RETURNING id, name, email;", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = ?;", tableName)
params := []any{"Alice", tests.ServiceAccountEmail, "Jane", "[email protected]"}
return createStatement, insertStatement, toolStatement, params
}
func getSQLiteTmplToolStatement() (string, string) {
tmplSelectCombined := "SELECT * FROM {{.tableName}} WHERE id = ?"
tmplSelectFilterCombined := "SELECT * FROM {{.tableName}} WHERE {{.columnFilter}} = ?"
return tmplSelectCombined, tmplSelectFilterCombined
}
func TestSQLiteToolEndpoint(t *testing.T) {
db, teardownDb, sqliteDb, err := initSQLiteDb(t, SQLiteDatabase)
if err != nil {
t.Fatal(err)
}
defer teardownDb(t)
defer db.Close()
sourceConfig := getSQLiteVars(t)
sourceConfig["database"] = sqliteDb
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
var args []string
// create table name with UUID
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
// set up data for param tool
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getSQLiteParamToolInfo(tableNameParam)
setupSQLiteTestDB(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
// set up data for auth tool
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getSQLiteAuthToolInfo(tableNameAuth)
setupSQLiteTestDB(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
// Write config into a file and pass it to command
toolsFile := tests.GetToolsConfig(sourceConfig, SQLiteToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
tmplSelectCombined, tmplSelectFilterCombined := getSQLiteTmplToolStatement()
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, SQLiteToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
// Get configs for tests
select1Want := "[{\"1\":1}]"
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to execute query: SQL logic error: near \"SELEC\": syntax error (1)"}],"isError":true}}`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"1\":1}"}]}}`
// Run tests
tests.RunToolGetTest(t)
tests.RunToolInvokeTest(t, select1Want, tests.DisableArrayTest())
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
}
func TestSQLiteExecuteSqlTool(t *testing.T) {
db, teardownDb, sqliteDb, err := initSQLiteDb(t, SQLiteDatabase)
if err != nil {
t.Fatal(err)
}
defer teardownDb(t)
defer db.Close()
sourceConfig := getSQLiteVars(t)
sourceConfig["database"] = sqliteDb
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
// Create a table and insert data
tableName := "exec_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
createStmt := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (id INTEGER PRIMARY KEY, name TEXT);", tableName)
insertStmt := fmt.Sprintf("INSERT INTO %s (name) VALUES (?);", tableName)
params := []any{"Bob"}
setupSQLiteTestDB(t, ctx, db, createStmt, insertStmt, tableName, params)
// Add sqlite-execute-sql tool config
toolConfig := map[string]any{
"tools": map[string]any{
"my-exec-sql-tool": map[string]any{
"kind": "sqlite-execute-sql",
"source": "my-instance",
"description": "Tool to execute SQL statements",
},
},
"sources": map[string]any{
"my-instance": sourceConfig,
},
}
cmd, cleanup, err := tests.StartCmd(ctx, toolConfig)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
// Table-driven test cases
testCases := []struct {
name string
sql string
wantStatus int
wantBody string
}{
{
name: "select existing row",
sql: fmt.Sprintf("SELECT name FROM %s WHERE id = 1", tableName),
wantStatus: 200,
wantBody: "Bob",
},
{
name: "select no rows",
sql: fmt.Sprintf("SELECT name FROM %s WHERE id = 999", tableName),
wantStatus: 200,
wantBody: "null",
},
{
name: "invalid SQL",
sql: "SELEC name FROM not_a_table",
wantStatus: 400,
wantBody: "SQL logic error",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
api := "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke"
reqBody := strings.NewReader(fmt.Sprintf(`{"sql":"%s"}`, tc.sql))
req, err := http.NewRequest("POST", api, reqBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("unable to read response: %s", err)
}
if resp.StatusCode != tc.wantStatus {
t.Fatalf("unexpected status: %d, body: %s", resp.StatusCode, string(bodyBytes))
}
if tc.wantBody != "" && !strings.Contains(string(bodyBytes), tc.wantBody) {
t.Fatalf("expected body to contain %q, got: %s", tc.wantBody, string(bodyBytes))
}
})
}
}
```
--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/cloud-sql-mssql-observability.yaml:
--------------------------------------------------------------------------------
```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
cloud-monitoring-source:
kind: cloud-monitoring
tools:
get_system_metrics:
kind: cloud-monitoring-query-prometheus
source: cloud-monitoring-source
description: |
Fetches system level cloudmonitoring data (timeseries metrics) for a SqlServer instance using a PromQL query. Take projectId and instanceId from the user for which the metrics timeseries data needs to be fetched.
To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
Generate PromQL `query` for SqlServer system metrics. Use the provided metrics and rules to construct queries, Get the labels like `instance_id` from user intent.
Defaults:
1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.
PromQL Query Examples:
1. Basic Time Series: `avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m])`
2. Top K: `topk(30, avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
3. Mean: `avg(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
4. Minimum: `min(min_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
5. Maximum: `max(max_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
6. Sum: `sum(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
7. Count streams: `count(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
8. Percentile with groupby on database_id: `quantile by ("database_id")(0.99,avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))`
Available Metrics List: metricname. description. monitored resource. labels. database_id is actually the instance id and the format is `project_id:instance_id`.
1. `cloudsql.googleapis.com/database/cpu/utilization`: Current CPU utilization as a percentage of the reserved CPU. `cloudsql_database`. `database`, `project_id`, `database_id`.
2. `cloudsql.googleapis.com/database/memory/usage`: RAM usage in bytes, excluding buffer/cache. `cloudsql_database`. `database`, `project_id`, `database_id`.
3. `cloudsql.googleapis.com/database/memory/total_usage`: Total RAM usage in bytes, including buffer/cache. `cloudsql_database`. `database`, `project_id`, `database_id`.
4. `cloudsql.googleapis.com/database/disk/bytes_used`: Data utilization in bytes. `cloudsql_database`. `database`, `project_id`, `database_id`.
5. `cloudsql.googleapis.com/database/disk/quota`: Maximum data disk size in bytes. `cloudsql_database`. `database`, `project_id`, `database_id`.
6. `cloudsql.googleapis.com/database/disk/read_ops_count`: Delta count of data disk read IO operations. `cloudsql_database`. `database`, `project_id`, `database_id`.
7. `cloudsql.googleapis.com/database/disk/write_ops_count`: Delta count of data disk write IO operations. `cloudsql_database`. `database`, `project_id`, `database_id`.
8. `cloudsql.googleapis.com/database/network/received_bytes_count`: Delta count of bytes received through the network. `cloudsql_database`. `database`, `project_id`, `database_id`.
9. `cloudsql.googleapis.com/database/network/sent_bytes_count`: Delta count of bytes sent through the network. `cloudsql_database`. `destination`, `database`, `project_id`, `database_id`.
10. `cloudsql.googleapis.com/database/sqlserver/memory/buffer_cache_hit_ratio`: Current percentage of pages found in the buffer cache without reading from disk. `cloudsql_database`. `database`, `project_id`, `database_id`.
11. `cloudsql.googleapis.com/database/sqlserver/memory/memory_grants_pending`: Current number of processes waiting for a workspace memory grant. `cloudsql_database`. `database`, `project_id`, `database_id`.
12. `cloudsql.googleapis.com/database/sqlserver/memory/free_list_stall_count`: Total number of requests that waited for a free page. `cloudsql_database`. `database`, `project_id`, `database_id`.
13. `cloudsql.googleapis.com/database/swap/pages_swapped_in_count`: Total count of pages swapped in from disk since the system was booted. `cloudsql_database`. `database`, `project_id`, `database_id`.
14. `cloudsql.googleapis.com/database/swap/pages_swapped_out_count`: Total count of pages swapped out to disk since the system was booted. `cloudsql_database`. `database`, `project_id`, `database_id`.
15. `cloudsql.googleapis.com/database/sqlserver/memory/checkpoint_page_count`: Total number of pages flushed to disk by a checkpoint. `cloudsql_database`. `database`, `project_id`, `database_id`.
16. `cloudsql.googleapis.com/database/sqlserver/memory/lazy_write_count`: Total number of buffers written by the buffer manager's lazy writer. `cloudsql_database`. `database`, `project_id`, `database_id`.
17. `cloudsql.googleapis.com/database/sqlserver/memory/page_life_expectancy`: Current number of seconds a page will stay in the buffer pool. `cloudsql_database`. `database`, `project_id`, `database_id`.
18. `cloudsql.googleapis.com/database/sqlserver/memory/page_operation_count`: Total number of physical database page reads or writes. `cloudsql_database`. `operation`, `database`, `project_id`, `database_id`.
19. `cloudsql.googleapis.com/database/sqlserver/transactions/page_split_count`: Total number of page splits from overflowing index pages. `cloudsql_database`. `database`, `project_id`, `database_id`.
20. `cloudsql.googleapis.com/database/sqlserver/transactions/deadlock_count`: Total number of lock requests that resulted in a deadlock. `cloudsql_database`. `locked_resource`, `database`, `project_id`, `database_id`.
21. `cloudsql.googleapis.com/database/sqlserver/transactions/transaction_count`: Total number of transactions started. `cloudsql_database`. `database`, `project_id`, `database_id`.
22. `cloudsql.googleapis.com/database/sqlserver/transactions/batch_request_count`: Total number of Transact-SQL command batches received. `cloudsql_database`. `database`, `project_id`, `database_id`.
23. `cloudsql.googleapis.com/database/sqlserver/transactions/sql_compilation_count`: Total number of SQL compilations. `cloudsql_database`. `database`, `project_id`, `database_id`.
24. `cloudsql.googleapis.com/database/sqlserver/transactions/sql_recompilation_count`: Total number of SQL recompilations. `cloudsql_database`. `database`, `project_id`, `database_id`.
25. `cloudsql.googleapis.com/database/sqlserver/connections/processes_blocked`: Current number of blocked processes. `cloudsql_database`. `database`, `project_id`, `database_id`.
26. `cloudsql.googleapis.com/database/sqlserver/transactions/lock_wait_time`: Total time lock requests were waiting for locks. `cloudsql_database`. `locked_resource`, `database`, `project_id`, `database_id`.
27. `cloudsql.googleapis.com/database/sqlserver/transactions/lock_wait_count`: Total number of lock requests that required the caller to wait. `cloudsql_database`. `locked_resource`, `database`, `project_id`, `database_id`.
28. `cloudsql.googleapis.com/database/network/connections`: Number of connections to databases on the instance. `cloudsql_database`. `database`, `project_id`, `database_id`.
29. `cloudsql.googleapis.com/database/sqlserver/connections/login_attempt_count`: Total number of login attempts since the last server restart. `cloudsql_database`. `database`, `project_id`, `database_id`.
30. `cloudsql.googleapis.com/database/sqlserver/connections/logout_count`: Total number of logout operations since the last server restart. `cloudsql_database`. `database`, `project_id`, `database_id`.
31. `cloudsql.googleapis.com/database/sqlserver/connections/connection_reset_count`: Total number of logins started from the connection pool since the last server restart. `cloudsql_database`. `database`, `project_id`, `database_id`.
32. `cloudsql.googleapis.com/database/sqlserver/transactions/full_scan_count`: Total number of unrestricted full scans (base-table or full-index). `cloudsql_database`. `database`, `project_id`, `database_id`.
toolsets:
cloud_sql_mssql_cloud_monitoring_tools:
- get_system_metrics
```
--------------------------------------------------------------------------------
/internal/tools/http/http.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package http
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"slices"
"strings"
"maps"
"text/template"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
"github.com/googleapis/genai-toolbox/internal/tools"
)
const kind string = "http"
func init() {
if !tools.Register(kind, newConfig) {
panic(fmt.Sprintf("tool kind %q already registered", kind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
Source string `yaml:"source" validate:"required"`
Description string `yaml:"description" validate:"required"`
AuthRequired []string `yaml:"authRequired"`
Path string `yaml:"path" validate:"required"`
Method tools.HTTPMethod `yaml:"method" validate:"required"`
Headers map[string]string `yaml:"headers"`
RequestBody string `yaml:"requestBody"`
PathParams tools.Parameters `yaml:"pathParams"`
QueryParams tools.Parameters `yaml:"queryParams"`
BodyParams tools.Parameters `yaml:"bodyParams"`
HeaderParams tools.Parameters `yaml:"headerParams"`
}
// validate interface
var _ tools.ToolConfig = Config{}
func (cfg Config) ToolConfigKind() string {
return kind
}
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
// verify source exists
rawS, ok := srcs[cfg.Source]
if !ok {
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
}
// verify the source is compatible
s, ok := rawS.(*httpsrc.Source)
if !ok {
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `http`", kind)
}
// Combine Source and Tool headers.
// In case of conflict, Tool header overrides Source header
combinedHeaders := make(map[string]string)
maps.Copy(combinedHeaders, s.DefaultHeaders)
maps.Copy(combinedHeaders, cfg.Headers)
// Create a slice for all parameters
allParameters := slices.Concat(cfg.PathParams, cfg.BodyParams, cfg.HeaderParams, cfg.QueryParams)
// Verify no duplicate parameter names
err := tools.CheckDuplicateParameters(allParameters)
if err != nil {
return nil, err
}
// Create Toolbox manifest
paramManifest := allParameters.Manifest()
if paramManifest == nil {
paramManifest = make([]tools.ParameterManifest, 0)
}
// Create MCP manifest
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters)
// finish tool setup
return Tool{
Name: cfg.Name,
Kind: kind,
BaseURL: s.BaseURL,
Path: cfg.Path,
Method: cfg.Method,
AuthRequired: cfg.AuthRequired,
RequestBody: cfg.RequestBody,
PathParams: cfg.PathParams,
QueryParams: cfg.QueryParams,
BodyParams: cfg.BodyParams,
HeaderParams: cfg.HeaderParams,
Headers: combinedHeaders,
DefaultQueryParams: s.QueryParams,
Client: s.Client,
AllParams: allParameters,
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
mcpManifest: mcpManifest,
}, nil
}
// validate interface
var _ tools.Tool = Tool{}
type Tool struct {
Name string `yaml:"name"`
Kind string `yaml:"kind"`
Description string `yaml:"description"`
AuthRequired []string `yaml:"authRequired"`
BaseURL string `yaml:"baseURL"`
Path string `yaml:"path"`
Method tools.HTTPMethod `yaml:"method"`
Headers map[string]string `yaml:"headers"`
DefaultQueryParams map[string]string `yaml:"defaultQueryParams"`
RequestBody string `yaml:"requestBody"`
PathParams tools.Parameters `yaml:"pathParams"`
QueryParams tools.Parameters `yaml:"queryParams"`
BodyParams tools.Parameters `yaml:"bodyParams"`
HeaderParams tools.Parameters `yaml:"headerParams"`
AllParams tools.Parameters `yaml:"allParams"`
Client *http.Client
manifest tools.Manifest
mcpManifest tools.McpManifest
}
// Helper function to generate the HTTP request body upon Tool invocation.
func getRequestBody(bodyParams tools.Parameters, requestBodyPayload string, paramsMap map[string]any) (string, error) {
bodyParamValues, err := tools.GetParams(bodyParams, paramsMap)
if err != nil {
return "", err
}
bodyParamsMap := bodyParamValues.AsMap()
requestBodyStr, err := tools.PopulateTemplateWithJSON("HTTPToolRequestBody", requestBodyPayload, bodyParamsMap)
if err != nil {
return "", err
}
return requestBodyStr, nil
}
// Helper function to generate the HTTP request URL upon Tool invocation.
func getURL(baseURL, path string, pathParams, queryParams tools.Parameters, defaultQueryParams map[string]string, paramsMap map[string]any) (string, error) {
// use Go template to replace path params
pathParamValues, err := tools.GetParams(pathParams, paramsMap)
if err != nil {
return "", err
}
pathParamsMap := pathParamValues.AsMap()
templ, err := template.New("url").Parse(path)
if err != nil {
return "", fmt.Errorf("error parsing URL: %s", err)
}
var templatedPath bytes.Buffer
err = templ.Execute(&templatedPath, pathParamsMap)
if err != nil {
return "", fmt.Errorf("error replacing pathParams: %s", err)
}
// Create URL based on BaseURL and Path
// Attach query parameters
parsedURL, err := url.Parse(baseURL + templatedPath.String())
if err != nil {
return "", fmt.Errorf("error parsing URL: %s", err)
}
// Get existing query parameters from the URL
queryParameters := parsedURL.Query()
for key, value := range defaultQueryParams {
queryParameters.Add(key, value)
}
parsedURL.RawQuery = queryParameters.Encode()
// Set dynamic query parameters
query := parsedURL.Query()
for _, p := range queryParams {
v, ok := paramsMap[p.GetName()]
if !ok || v == nil {
if !p.GetRequired(){
// If the param is not required AND
// Not provodid OR provided with a nil value
// Omitted from the URL
continue
}
v = ""
}
query.Add(p.GetName(), fmt.Sprintf("%v", v))
}
parsedURL.RawQuery = query.Encode()
return parsedURL.String(), nil
}
// Helper function to generate the HTTP headers upon Tool invocation.
func getHeaders(headerParams tools.Parameters, defaultHeaders map[string]string, paramsMap map[string]any) (map[string]string, error) {
// Populate header params
allHeaders := make(map[string]string)
maps.Copy(allHeaders, defaultHeaders)
for _, p := range headerParams {
headerValue, ok := paramsMap[p.GetName()]
if ok {
if strValue, ok := headerValue.(string); ok {
allHeaders[p.GetName()] = strValue
} else {
return nil, fmt.Errorf("header param %s got value of type %t, not string", p.GetName(), headerValue)
}
}
}
return allHeaders, nil
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
paramsMap := params.AsMap()
// Calculate request body
requestBody, err := getRequestBody(t.BodyParams, t.RequestBody, paramsMap)
if err != nil {
return nil, fmt.Errorf("error populating request body: %s", err)
}
// Calculate URL
urlString, err := getURL(t.BaseURL, t.Path, t.PathParams, t.QueryParams, t.DefaultQueryParams, paramsMap)
if err != nil {
return nil, fmt.Errorf("error populating path parameters: %s", err)
}
req, _ := http.NewRequest(string(t.Method), urlString, strings.NewReader(requestBody))
// Calculate request headers
allHeaders, err := getHeaders(t.HeaderParams, t.Headers, paramsMap)
if err != nil {
return nil, fmt.Errorf("error populating request headers: %s", err)
}
// Set request headers
for k, v := range allHeaders {
req.Header.Set(k, v)
}
// Make request and fetch response
resp, err := t.Client.Do(req)
if err != nil {
return nil, fmt.Errorf("error making HTTP request: %s", err)
}
defer resp.Body.Close()
var body []byte
body, err = io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if resp.StatusCode < 200 || resp.StatusCode > 299 {
return nil, fmt.Errorf("unexpected status code: %d, response body: %s", resp.StatusCode, string(body))
}
var data any
if err = json.Unmarshal(body, &data); err != nil {
// if unable to unmarshal data, return result as string.
return string(body), nil
}
return data, nil
}
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
return tools.ParseParams(t.AllParams, data, claims)
}
func (t Tool) Manifest() tools.Manifest {
return t.manifest
}
func (t Tool) McpManifest() tools.McpManifest {
return t.mcpManifest
}
func (t Tool) Authorized(verifiedAuthServices []string) bool {
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}
func (t Tool) RequiresClientAuthorization() bool {
return false
}
```
--------------------------------------------------------------------------------
/internal/sources/dgraph/dgraph.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dgraph
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
"go.opentelemetry.io/otel/trace"
)
const SourceKind string = "dgraph"
// validate interface
var _ sources.SourceConfig = Config{}
func init() {
if !sources.Register(SourceKind, newConfig) {
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
// HttpToken stores credentials for making HTTP request
type HttpToken struct {
UserId string
Password string
AccessJwt string
RefreshToken string
Namespace uint64
}
type DgraphClient struct {
httpClient *http.Client
*HttpToken
baseUrl string
apiKey string
}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
DgraphUrl string `yaml:"dgraphUrl" validate:"required"`
User string `yaml:"user"`
Password string `yaml:"password"`
Namespace uint64 `yaml:"namespace"`
ApiKey string `yaml:"apiKey"`
}
func (r Config) SourceConfigKind() string {
return SourceKind
}
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
hc, err := initDgraphHttpClient(ctx, tracer, r)
if err != nil {
return nil, err
}
if err := hc.healthCheck(); err != nil {
return nil, err
}
s := &Source{
Name: r.Name,
Kind: SourceKind,
Client: hc,
}
return s, nil
}
var _ sources.Source = &Source{}
type Source struct {
Name string `yaml:"name"`
Kind string `yaml:"kind"`
Client *DgraphClient `yaml:"client"`
}
func (s *Source) SourceKind() string {
return SourceKind
}
func (s *Source) DgraphClient() *DgraphClient {
return s.Client
}
func initDgraphHttpClient(ctx context.Context, tracer trace.Tracer, r Config) (*DgraphClient, error) {
//nolint:all // Reassigned ctx
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, r.Name)
defer span.End()
if r.DgraphUrl == "" {
return nil, fmt.Errorf("dgraph url should not be empty")
}
hc := &DgraphClient{
httpClient: &http.Client{},
baseUrl: r.DgraphUrl,
HttpToken: &HttpToken{
UserId: r.User,
Namespace: r.Namespace,
Password: r.Password,
},
apiKey: r.ApiKey,
}
if r.User != "" || r.Password != "" {
if err := hc.loginWithCredentials(); err != nil {
return nil, err
}
}
return hc, nil
}
func (hc *DgraphClient) ExecuteQuery(query string, paramsMap map[string]interface{},
isQuery bool, timeout string) ([]byte, error) {
if isQuery {
return hc.postDqlQuery(query, paramsMap, timeout)
} else {
return hc.mutate(query, paramsMap)
}
}
// postDqlQuery sends a DQL query to the Dgraph server with query, parameters, and optional timeout.
// Returns the response body ([]byte) and an error, if any.
func (hc *DgraphClient) postDqlQuery(query string, paramsMap map[string]interface{}, timeout string) ([]byte, error) {
urlParams := url.Values{}
urlParams.Add("timeout", timeout)
url, err := getUrl(hc.baseUrl, "/query", urlParams)
if err != nil {
return nil, err
}
p := struct {
Query string `json:"query"`
Variables map[string]interface{} `json:"variables"`
}{
Query: query,
Variables: paramsMap,
}
body, err := json.Marshal(p)
if err != nil {
return nil, fmt.Errorf("error marshlling json: %v", err)
}
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(body))
if err != nil {
return nil, fmt.Errorf("error building req for endpoint [%v] :%v", url, err)
}
req.Header.Add("Content-Type", "application/json")
return hc.doReq(req)
}
// mutate sends an RDF mutation to the Dgraph server with "commitNow: true", embedding parameters.
// Returns the server's response as a byte slice or an error if the mutation fails.
func (hc *DgraphClient) mutate(mutation string, paramsMap map[string]interface{}) ([]byte, error) {
mu := embedParamsIntoMutation(mutation, paramsMap)
params := url.Values{}
params.Add("commitNow", "true")
url, err := getUrl(hc.baseUrl, "/mutate", params)
if err != nil {
return nil, err
}
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBufferString(mu))
if err != nil {
return nil, fmt.Errorf("error building req for endpoint [%v] :%v", url, err)
}
req.Header.Add("Content-Type", "application/rdf")
return hc.doReq(req)
}
func (hc *DgraphClient) doReq(req *http.Request) ([]byte, error) {
if hc.HttpToken != nil {
req.Header.Add("X-Dgraph-AccessToken", hc.AccessJwt)
}
if hc.apiKey != "" {
req.Header.Set("Dg-Auth", hc.apiKey)
}
resp, err := hc.httpClient.Do(req)
if err != nil && !strings.Contains(err.Error(), "Token is expired") {
return nil, fmt.Errorf("error performing HTTP request: %w", err)
} else if err != nil && strings.Contains(err.Error(), "Token is expired") {
if errLogin := hc.loginWithToken(); errLogin != nil {
return nil, errLogin
}
if hc.HttpToken != nil {
req.Header.Add("X-Dgraph-AccessToken", hc.AccessJwt)
}
resp, err = hc.httpClient.Do(req)
if err != nil {
return nil, err
}
}
defer resp.Body.Close()
respBody, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("error reading response body: url: [%v], err: [%v]", req.URL, err)
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("got non 200 resp: %v", string(respBody))
}
return respBody, nil
}
func (hc *DgraphClient) loginWithCredentials() error {
credentials := map[string]interface{}{
"userid": hc.UserId,
"password": hc.Password,
"namespace": hc.Namespace,
}
return hc.doLogin(credentials)
}
func (hc *DgraphClient) loginWithToken() error {
credentials := map[string]interface{}{
"refreshJWT": hc.RefreshToken,
"namespace": hc.Namespace,
}
return hc.doLogin(credentials)
}
func (hc *DgraphClient) doLogin(creds map[string]interface{}) error {
url, err := getUrl(hc.baseUrl, "/login", nil)
if err != nil {
return err
}
payload, err := json.Marshal(creds)
if err != nil {
return fmt.Errorf("failed to marshal credentials: %v", err)
}
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(payload))
if err != nil {
return fmt.Errorf("error building req for endpoint [%v] : %v", url, err)
}
req.Header.Add("Content-Type", "application/json")
if hc.apiKey != "" {
req.Header.Set("Dg-Auth", hc.apiKey)
}
resp, err := hc.doReq(req)
if err != nil {
if strings.Contains(err.Error(), "Token is expired") &&
!strings.Contains(err.Error(), "unable to authenticate the refresh token") {
return hc.loginWithToken()
}
return err
}
if err := CheckError(resp); err != nil {
return err
}
var r struct {
Data struct {
AccessJWT string `json:"accessJWT"`
RefreshJWT string `json:"refreshJWT"`
} `json:"data"`
}
if err := json.Unmarshal(resp, &r); err != nil {
return fmt.Errorf("failed to unmarshal response: %v", err)
}
if r.Data.AccessJWT == "" {
return fmt.Errorf("no access JWT found in the response")
}
if r.Data.RefreshJWT == "" {
return fmt.Errorf("no refresh JWT found in the response")
}
hc.AccessJwt = r.Data.AccessJWT
hc.RefreshToken = r.Data.RefreshJWT
return nil
}
func (hc *DgraphClient) healthCheck() error {
url, err := getUrl(hc.baseUrl, "/health", nil)
if err != nil {
return err
}
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
return fmt.Errorf("error creating request: %w", err)
}
resp, err := hc.httpClient.Do(req)
if err != nil {
return fmt.Errorf("error performing request: %w", err)
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
return err
}
var result []struct {
Instance string `json:"instance"`
Address string `json:"address"`
Status string `json:"status"`
}
// Unmarshal response into the struct
if err := json.Unmarshal(data, &result); err != nil {
return fmt.Errorf("failed to unmarshal json: %v", err)
}
if len(result) == 0 {
return fmt.Errorf("health info should not empty for: %v", url)
}
var unhealthyErr error
for _, info := range result {
if info.Status != "healthy" {
unhealthyErr = fmt.Errorf("dgraph instance [%v] is not in healthy state, address is %v",
info.Instance, info.Address)
} else {
return nil
}
}
return unhealthyErr
}
func getUrl(baseUrl, resource string, params url.Values) (string, error) {
u, err := url.ParseRequestURI(baseUrl)
if err != nil {
return "", fmt.Errorf("failed to get url %v", err)
}
u.Path = resource
u.RawQuery = params.Encode()
return u.String(), nil
}
func CheckError(resp []byte) error {
var errResp struct {
Errors []struct {
Message string `json:"message"`
} `json:"errors"`
}
if err := json.Unmarshal(resp, &errResp); err != nil {
return fmt.Errorf("failed to unmarshal json: %v", err)
}
if len(errResp.Errors) > 0 {
return fmt.Errorf("error : %v", errResp.Errors)
}
return nil
}
func embedParamsIntoMutation(mutation string, paramsMap map[string]interface{}) string {
for key, value := range paramsMap {
mutation = strings.ReplaceAll(mutation, key, fmt.Sprintf(`"%v"`, value))
}
return mutation
}
```
--------------------------------------------------------------------------------
/tests/option.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tests
/* Configurations for RunToolInvokeTest() */
// InvokeTestConfig represents the various configuration options for RunToolInvokeTest()
type InvokeTestConfig struct {
myToolId3NameAliceWant string
myToolById4Want string
nullWant string
myArrayToolWant string
supportSelect1Want bool
supportOptionalNullParam bool
supportArrayParam bool
supportClientAuth bool
supportSelect1Auth bool
}
type InvokeTestOption func(*InvokeTestConfig)
// WithMyToolId3NameAliceWant represents the response value for my-tool with id=3 and name=Alice.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.WithMyToolId3NameAliceWant("custom"))
func WithMyToolId3NameAliceWant(s string) InvokeTestOption {
return func(c *InvokeTestConfig) {
c.myToolId3NameAliceWant = s
}
}
// WithMyArrayToolWant represents the response value for my-array-tool.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.WithMyArrayToolWant("custom"))
func WithMyArrayToolWant(s string) InvokeTestOption {
return func(c *InvokeTestConfig) {
c.myArrayToolWant = s
}
}
// WithMyToolById4Want represents the response value for my-tool-by-id with id=4.
// This response includes a null value column.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.WithMyToolById4Want("custom"))
func WithMyToolById4Want(s string) InvokeTestOption {
return func(c *InvokeTestConfig) {
c.myToolById4Want = s
}
}
// WithNullWant represents a response value of null string.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.WithNullWant("custom"))
func WithNullWant(s string) InvokeTestOption {
return func(c *InvokeTestConfig) {
c.nullWant = s
}
}
// DisableOptionalNullParamTest disables tests for optional null parameters.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.DisableOptionalNullParamTest())
func DisableOptionalNullParamTest() InvokeTestOption {
return func(c *InvokeTestConfig) {
c.supportOptionalNullParam = false
}
}
// DisableArrayTest disables tests for sources that do not support array.
// e.g. tests.RunToolInvokeTest(t, select1Want, tests.DisableArrayTest())
func DisableArrayTest() InvokeTestOption {
return func(c *InvokeTestConfig) {
c.supportArrayParam = false
}
}
// DisableSelect1Test disables tests for sources that do not support SELECT 1 query.
// e.g. tests.RunToolInvokeTest(t, "", tests.DisableSelect1Test())
func DisableSelect1Test() InvokeTestOption {
return func(c *InvokeTestConfig) {
c.supportSelect1Want = false
}
}
// DisableSelect1AuthTest disables auth tests for sources that do not support SELECT 1 query.
// e.g. tests.RunToolInvokeTest(t, "", tests.DisableSelect1AuthTest())
func DisableSelect1AuthTest() InvokeTestOption {
return func(c *InvokeTestConfig) {
c.supportSelect1Auth = false
}
}
// EnableClientAuthTest runs the client authorization tests.
// Only enable it if your source supports the `useClientOAuth` configuration.
// Currently, this should only be used with the BigQuery tests.
func EnableClientAuthTest() InvokeTestOption {
return func(c *InvokeTestConfig) {
c.supportClientAuth = true
}
}
/* Configurations for RunMCPToolCallMethod() */
// MCPTestConfig represents the various configuration options for mcp tool call tests.
type MCPTestConfig struct {
myToolId3NameAliceWant string
supportClientAuth bool
supportSelect1Auth bool
}
type McpTestOption func(*MCPTestConfig)
// WithMcpMyToolId3NameAliceWant represents the response value for my-tool with id=3 and name=Alice.
// e.g. tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, tests.WithMcpMyToolId3NameAliceWant("custom"))
func WithMcpMyToolId3NameAliceWant(s string) McpTestOption {
return func(c *MCPTestConfig) {
c.myToolId3NameAliceWant = s
}
}
// EnableMcpClientAuthTest runs the client authorization tests.
// Only enable it if your source supports the `useClientOAuth` configuration.
// Currently, this should only be used with the BigQuery tests.
func EnableMcpClientAuthTest() McpTestOption {
return func(c *MCPTestConfig) {
c.supportClientAuth = true
}
}
// DisableMcpSelect1AuthTest disables the auth tool tests which use select 1.
func DisableMcpSelect1AuthTest() McpTestOption {
return func(c *MCPTestConfig) {
c.supportSelect1Auth = false
}
}
/* Configurations for RunExecuteSqlToolInvokeTest() */
// ExecuteSqlTestConfig represents the various configuration options for RunExecuteSqlToolInvokeTest()
type ExecuteSqlTestConfig struct {
select1Statement string
}
type ExecuteSqlOption func(*ExecuteSqlTestConfig)
// WithSelect1Statement represents the database's statement for `SELECT 1`.
// e.g. tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want, tests.WithSelect1Statement("custom"))
func WithSelect1Statement(s string) ExecuteSqlOption {
return func(c *ExecuteSqlTestConfig) {
c.select1Statement = s
}
}
/* Configurations for RunToolInvokeWithTemplateParameters() */
// TemplateParameterTestConfig represents the various configuration options for template parameter tests.
type TemplateParameterTestConfig struct {
ddlWant string
selectAllWant string
selectId1Want string
selectNameWant string
selectEmptyWant string
insert1Want string
nameFieldArray string
nameColFilter string
createColArray string
supportDdl bool
supportInsert bool
supportSelectFields bool
}
type TemplateParamOption func(*TemplateParameterTestConfig)
// WithDdlWant represents the response value of ddl statements.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithDdlWant("custom"))
func WithDdlWant(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.ddlWant = s
}
}
// WithSelectAllWant represents the response value of select-templateParams-tool.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithSelectAllWant("custom"))
func WithSelectAllWant(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.selectAllWant = s
}
}
// WithTmplSelectId1Want represents the response value of select-templateParams-combined-tool with id=1.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithTmplSelectId1Want("custom"))
func WithTmplSelectId1Want(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.selectId1Want = s
}
}
// WithTmplSelectNameWant represents the response value of select-filter-templateParams-combined-tool with name.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithTmplSelectNameWant("custom"))
func WithTmplSelectNameWant(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.selectNameWant = s
}
}
// WithSelectEmptyWant represents the response value of select-templateParams-combined-tool with no results.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithSelectEmptyWant("custom"))
func WithSelectEmptyWant(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.selectEmptyWant = s
}
}
// WithInsert1Want represents the response value of insert-table-templateParams-tool.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithInsert1Want("custom"))
func WithInsert1Want(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.insert1Want = s
}
}
// WithNameFieldArray represents fields array parameter for select-fields-templateParams-tool.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithNameFieldArray("custom"))
func WithNameFieldArray(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.nameFieldArray = s
}
}
// WithNameColFilter represents the columnFilter parameter for select-filter-templateParams-combined-tool.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithNameColFilter("custom"))
func WithNameColFilter(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.nameColFilter = s
}
}
// WithCreateColArray represents the columns array parameter for create-table-templateParams-tool.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithCreateColArray("custom"))
func WithCreateColArray(s string) TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.createColArray = s
}
}
// DisableDdlTest disables tests for ddl statements for sources that do not support ddl.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.DisableDdlTest())
func DisableDdlTest() TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.supportDdl = false
}
}
// DisableInsertTest disables tests of insert statements for sources that do not support insert.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.DisableInsertTest())
func DisableInsertTest() TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.supportInsert = false
}
}
// DisableInsertTest disables tests of select-fields-templateParams-tool test.
// e.g. tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.DisableSelectFilterTest())
func DisableSelectFilterTest() TemplateParamOption {
return func(c *TemplateParameterTestConfig) {
c.supportSelectFields = false
}
}
```
--------------------------------------------------------------------------------
/tests/cassandra/cassandra_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cassandra
import (
"context"
"fmt"
"log"
"os"
"regexp"
"strings"
"testing"
"time"
"github.com/gocql/gocql"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/tests"
)
var (
CassandraSourceKind = "cassandra"
CassandraToolKind = "cassandra-cql"
Hosts = os.Getenv("CASSANDRA_HOST")
Keyspace = "example_keyspace"
Username = os.Getenv("CASSANDRA_USER")
Password = os.Getenv("CASSANDRA_PASS")
)
func getCassandraVars(t *testing.T) map[string]any {
switch "" {
case Hosts:
t.Fatal("'Hosts' not set")
case Username:
t.Fatal("'Username' not set")
case Password:
t.Fatal("'Password' not set")
}
return map[string]any{
"kind": CassandraSourceKind,
"hosts": strings.Split(Hosts, ","),
"keyspace": Keyspace,
"username": Username,
"password": Password,
}
}
func initCassandraSession() (*gocql.Session, error) {
hostStrings := strings.Split(Hosts, ",")
var hosts []string
for _, h := range hostStrings {
trimmedHost := strings.TrimSpace(h)
if trimmedHost != "" {
hosts = append(hosts, trimmedHost)
}
}
if len(hosts) == 0 {
return nil, fmt.Errorf("no valid hosts found in CASSANDRA_HOSTS env var")
}
// Configure cluster connection
cluster := gocql.NewCluster(hosts...)
cluster.Consistency = gocql.Quorum
cluster.ProtoVersion = 4
cluster.DisableInitialHostLookup = true
cluster.ConnectTimeout = 10 * time.Second
cluster.NumConns = 2
cluster.Authenticator = gocql.PasswordAuthenticator{
Username: Username,
Password: Password,
}
cluster.RetryPolicy = &gocql.ExponentialBackoffRetryPolicy{
NumRetries: 3,
Min: 200 * time.Millisecond,
Max: 2 * time.Second,
}
// Create session
session, err := cluster.CreateSession()
if err != nil {
return nil, fmt.Errorf("Failed to create session: %v", err)
}
// Create keyspace
err = session.Query(fmt.Sprintf(`
CREATE KEYSPACE IF NOT EXISTS %s
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}
`, Keyspace)).Exec()
if err != nil {
return nil, fmt.Errorf("Failed to create keyspace: %v", err)
}
return session, nil
}
func initTable(tableName string, session *gocql.Session) error {
// Create table with additional columns
err := session.Query(fmt.Sprintf(`
CREATE TABLE IF NOT EXISTS %s.%s (
id int PRIMARY KEY,
name text,
email text,
age int,
is_active boolean,
created_at timestamp
)
`, Keyspace, tableName)).Exec()
if err != nil {
return fmt.Errorf("Failed to create table: %v", err)
}
// Use fixed timestamps for reproducibility
fixedTime, _ := time.Parse(time.RFC3339, "2025-07-25T12:00:00Z")
dayAgo := fixedTime.Add(-24 * time.Hour)
twelveHoursAgo := fixedTime.Add(-12 * time.Hour)
// Insert minimal diverse data with fixed time.Time for timestamps
err = session.Query(fmt.Sprintf(`
INSERT INTO %s.%s (id, name,email, age, is_active, created_at)
VALUES (?, ?, ?, ?, ?, ?)`, Keyspace, tableName),
3, "Alice", tests.ServiceAccountEmail, 25, true, dayAgo,
).Exec()
if err != nil {
return fmt.Errorf("Failed to insert user: %v", err)
}
err = session.Query(fmt.Sprintf(`
INSERT INTO %s.%s (id, name,email, age, is_active, created_at)
VALUES (?, ?, ?, ?, ?, ?)`, Keyspace, tableName),
2, "Alex", "[email protected]", 30, false, twelveHoursAgo,
).Exec()
if err != nil {
return fmt.Errorf("Failed to insert user: %v", err)
}
err = session.Query(fmt.Sprintf(`
INSERT INTO %s.%s (id, name,email, age, is_active, created_at)
VALUES (?, ?, ?, ?, ?, ?)`, Keyspace, tableName),
1, "Sid", "[email protected]", 10, true, fixedTime,
).Exec()
if err != nil {
return fmt.Errorf("Failed to insert user: %v", err)
}
err = session.Query(fmt.Sprintf(`
INSERT INTO %s.%s (id, name,email, age, is_active, created_at)
VALUES (?, ?, ?, ?, ?, ?)`, Keyspace, tableName),
4, nil, "[email protected]", 40, false, fixedTime,
).Exec()
if err != nil {
return fmt.Errorf("Failed to insert user: %v", err)
}
return nil
}
func dropTable(session *gocql.Session, tableName string) {
err := session.Query(fmt.Sprintf("drop table %s.%s", Keyspace, tableName)).Exec()
if err != nil {
log.Printf("Failed to drop table %s: %v", tableName, err)
}
}
func TestCassandra(t *testing.T) {
session, err := initCassandraSession()
if err != nil {
t.Fatal(err)
}
defer session.Close()
sourceConfig := getCassandraVars(t)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
var args []string
paramTableName := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
err = initTable(paramTableName, session)
if err != nil {
t.Fatal(err)
}
defer dropTable(session, paramTableName)
err = initTable(tableNameAuth, session)
if err != nil {
t.Fatal(err)
}
defer dropTable(session, tableNameAuth)
err = initTable(tableNameTemplateParam, session)
if err != nil {
t.Fatal(err)
}
defer dropTable(session, tableNameTemplateParam)
paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt := createParamToolInfo(paramTableName)
_, _, authToolStmt := getCassandraAuthToolInfo(tableNameAuth)
toolsFile := tests.GetToolsConfig(sourceConfig, CassandraToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
tmplSelectCombined, tmplSelectFilterCombined := getCassandraTmplToolInfo()
tmpSelectAll := "SELECT * FROM {{.tableName}} where id = 1"
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, CassandraToolKind, tmplSelectCombined, tmplSelectFilterCombined, tmpSelectAll)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
selectIdNameWant, selectIdNullWant, selectArrayParamWant, mcpMyFailToolWant, mcpSelect1Want, mcpMyToolIdWant := getCassandraWants()
selectAllWant, selectIdWant, selectNameWant := getCassandraTmplWants()
tests.RunToolGetTest(t)
tests.RunToolInvokeTest(t, "", tests.DisableSelect1Test(),
tests.DisableOptionalNullParamTest(),
tests.WithMyToolId3NameAliceWant(selectIdNameWant),
tests.WithMyToolById4Want(selectIdNullWant),
tests.WithMyArrayToolWant(selectArrayParamWant),
tests.DisableSelect1AuthTest())
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam,
tests.DisableSelectFilterTest(),
tests.WithSelectAllWant(selectAllWant),
tests.DisableDdlTest(), tests.DisableInsertTest(), tests.WithTmplSelectId1Want(selectIdWant), tests.WithTmplSelectNameWant(selectNameWant))
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want,
tests.WithMcpMyToolId3NameAliceWant(mcpMyToolIdWant),
tests.DisableMcpSelect1AuthTest())
}
func createParamToolInfo(tableName string) (string, string, string, string) {
toolStatement := fmt.Sprintf("SELECT id, name FROM %s WHERE id = ? AND name = ? ALLOW FILTERING;", tableName)
idParamStatement := fmt.Sprintf("SELECT id,name FROM %s WHERE id = ?;", tableName)
nameParamStatement := fmt.Sprintf("SELECT id, name FROM %s WHERE name = ? ALLOW FILTERING;", tableName)
arrayToolStatement := fmt.Sprintf("SELECT id, name FROM %s WHERE id IN ? AND name IN ? ALLOW FILTERING;", tableName)
return toolStatement, idParamStatement, nameParamStatement, arrayToolStatement
}
func getCassandraAuthToolInfo(tableName string) (string, string, string) {
createStatement := fmt.Sprintf("CREATE TABLE %s (id UUID PRIMARY KEY, name TEXT, email TEXT);", tableName)
insertStatement := fmt.Sprintf("INSERT INTO %s (id, name, email) VALUES (uuid(), ?, ?), (uuid(), ?, ?);", tableName)
toolStatement := fmt.Sprintf("SELECT name FROM %s WHERE email = ? ALLOW FILTERING;", tableName)
return createStatement, insertStatement, toolStatement
}
func getCassandraTmplToolInfo() (string, string) {
selectAllTemplateStmt := "SELECT age, id, name FROM {{.tableName}} where id = ?;"
selectByIdTemplateStmt := "SELECT id, name FROM {{.tableName}} WHERE name = ? ALLOW FILTERING;"
return selectAllTemplateStmt, selectByIdTemplateStmt
}
func getCassandraWants() (string, string, string, string, string, string) {
selectIdNameWant := "[{\"id\":3,\"name\":\"Alice\"}]"
selectIdNullWant := "[{\"id\":4,\"name\":\"\"}]"
selectArrayParamWant := "[{\"id\":1,\"name\":\"Sid\"},{\"id\":3,\"name\":\"Alice\"}]"
mcpMyFailToolWant := "{\"jsonrpc\":\"2.0\",\"id\":\"invoke-fail-tool\",\"result\":{\"content\":[{\"type\":\"text\",\"text\":\"unable to parse rows: line 1:0 no viable alternative at input 'SELEC' ([SELEC]...)\"}],\"isError\":true}}"
mcpMyToolIdWant := "{\"jsonrpc\":\"2.0\",\"id\":\"my-tool\",\"result\":{\"content\":[{\"type\":\"text\",\"text\":\"[{\\\"id\\\":3,\\\"name\\\":\\\"Alice\\\"}]\"}]}}"
return selectIdNameWant, selectIdNullWant, selectArrayParamWant, mcpMyFailToolWant, "nil", mcpMyToolIdWant
}
func getCassandraTmplWants() (string, string, string) {
selectAllWant := "[{\"age\":10,\"created_at\":\"2025-07-25T12:00:00Z\",\"email\":\"[email protected]\",\"id\":1,\"is_active\":true,\"name\":\"Sid\"}]"
selectIdWant := "[{\"age\":10,\"id\":1,\"name\":\"Sid\"}]"
selectNameWant := "[{\"id\":2,\"name\":\"Alex\"}]"
return selectAllWant, selectIdWant, selectNameWant
}
```