This is page 58 of 76. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_prompts_with_coverage.sh
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .gemini
│ └── config.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ ├── trusted-contribution.yml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── link_checker_workflow.yaml
│ ├── lint.yaml
│ ├── publish-mcp.yml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── .lycheeignore
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ ├── ALLOYDBADMIN_README.md
│ ├── ALLOYDBPG_README.md
│ ├── BIGQUERY_README.md
│ ├── CLOUDSQLMSSQL_README.md
│ ├── CLOUDSQLMSSQLADMIN_README.md
│ ├── CLOUDSQLMYSQL_README.md
│ ├── CLOUDSQLMYSQLADMIN_README.md
│ ├── CLOUDSQLPG_README.md
│ ├── CLOUDSQLPGADMIN_README.md
│ ├── DATAPLEX_README.md
│ ├── en
│ │ ├── _index.md
│ │ ├── about
│ │ │ ├── _index.md
│ │ │ └── faq.md
│ │ ├── blogs
│ │ │ └── _index.md
│ │ ├── concepts
│ │ │ ├── _index.md
│ │ │ └── telemetry
│ │ │ ├── index.md
│ │ │ ├── telemetry_flow.png
│ │ │ └── telemetry_traces.png
│ │ ├── getting-started
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart.ipynb
│ │ │ ├── configure.md
│ │ │ ├── introduction
│ │ │ │ ├── _index.md
│ │ │ │ └── architecture.png
│ │ │ ├── local_quickstart_go.md
│ │ │ ├── local_quickstart_js.md
│ │ │ ├── local_quickstart.md
│ │ │ ├── mcp_quickstart
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ ├── prompts_quickstart_gemini_cli.md
│ │ │ └── quickstart
│ │ │ ├── go
│ │ │ │ ├── adkgo
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── genAI
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── genkit
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── langchain
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ ├── openAI
│ │ │ │ │ ├── go.mod
│ │ │ │ │ ├── go.sum
│ │ │ │ │ └── quickstart.go
│ │ │ │ └── quickstart_test.go
│ │ │ ├── golden.txt
│ │ │ ├── js
│ │ │ │ ├── adk
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── genAI
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── genkit
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── langchain
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ ├── llamaindex
│ │ │ │ │ ├── package-lock.json
│ │ │ │ │ ├── package.json
│ │ │ │ │ └── quickstart.js
│ │ │ │ └── quickstart.test.js
│ │ │ ├── python
│ │ │ │ ├── __init__.py
│ │ │ │ ├── adk
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── core
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── langchain
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ ├── llamaindex
│ │ │ │ │ ├── quickstart.py
│ │ │ │ │ └── requirements.txt
│ │ │ │ └── quickstart_test.py
│ │ │ └── shared
│ │ │ ├── cloud_setup.md
│ │ │ ├── configure_toolbox.md
│ │ │ └── database_setup.md
│ │ ├── how-to
│ │ │ ├── _index.md
│ │ │ ├── connect_via_geminicli.md
│ │ │ ├── connect_via_mcp.md
│ │ │ ├── connect-ide
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ │ ├── alloydb_pg_mcp.md
│ │ │ │ ├── bigquery_mcp.md
│ │ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ │ ├── firestore_mcp.md
│ │ │ │ ├── looker_mcp.md
│ │ │ │ ├── mssql_mcp.md
│ │ │ │ ├── mysql_mcp.md
│ │ │ │ ├── neo4j_mcp.md
│ │ │ │ ├── postgres_mcp.md
│ │ │ │ ├── spanner_mcp.md
│ │ │ │ └── sqlite_mcp.md
│ │ │ ├── deploy_adk_agent.md
│ │ │ ├── deploy_docker.md
│ │ │ ├── deploy_gke.md
│ │ │ ├── deploy_toolbox.md
│ │ │ ├── export_telemetry.md
│ │ │ └── toolbox-ui
│ │ │ ├── edit-headers.gif
│ │ │ ├── edit-headers.png
│ │ │ ├── index.md
│ │ │ ├── optional-param-checked.png
│ │ │ ├── optional-param-unchecked.png
│ │ │ ├── run-tool.gif
│ │ │ ├── tools.png
│ │ │ └── toolsets.png
│ │ ├── reference
│ │ │ ├── _index.md
│ │ │ ├── cli.md
│ │ │ └── prebuilt-tools.md
│ │ ├── resources
│ │ │ ├── _index.md
│ │ │ ├── authServices
│ │ │ │ ├── _index.md
│ │ │ │ └── google.md
│ │ │ ├── embeddingModels
│ │ │ │ ├── _index.md
│ │ │ │ └── gemini.md
│ │ │ ├── prompts
│ │ │ │ ├── _index.md
│ │ │ │ └── custom
│ │ │ │ └── _index.md
│ │ │ ├── sources
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb-admin.md
│ │ │ │ ├── alloydb-pg.md
│ │ │ │ ├── bigquery.md
│ │ │ │ ├── bigtable.md
│ │ │ │ ├── cassandra.md
│ │ │ │ ├── clickhouse.md
│ │ │ │ ├── cloud-gda.md
│ │ │ │ ├── cloud-healthcare.md
│ │ │ │ ├── cloud-monitoring.md
│ │ │ │ ├── cloud-sql-admin.md
│ │ │ │ ├── cloud-sql-mssql.md
│ │ │ │ ├── cloud-sql-mysql.md
│ │ │ │ ├── cloud-sql-pg.md
│ │ │ │ ├── couchbase.md
│ │ │ │ ├── dataplex.md
│ │ │ │ ├── dgraph.md
│ │ │ │ ├── elasticsearch.md
│ │ │ │ ├── firebird.md
│ │ │ │ ├── firestore.md
│ │ │ │ ├── http.md
│ │ │ │ ├── looker.md
│ │ │ │ ├── mariadb.md
│ │ │ │ ├── mindsdb.md
│ │ │ │ ├── mongodb.md
│ │ │ │ ├── mssql.md
│ │ │ │ ├── mysql.md
│ │ │ │ ├── neo4j.md
│ │ │ │ ├── oceanbase.md
│ │ │ │ ├── oracle.md
│ │ │ │ ├── postgres.md
│ │ │ │ ├── redis.md
│ │ │ │ ├── serverless-spark.md
│ │ │ │ ├── singlestore.md
│ │ │ │ ├── snowflake.md
│ │ │ │ ├── spanner.md
│ │ │ │ ├── sqlite.md
│ │ │ │ ├── tidb.md
│ │ │ │ ├── trino.md
│ │ │ │ ├── valkey.md
│ │ │ │ └── yugabytedb.md
│ │ │ └── tools
│ │ │ ├── _index.md
│ │ │ ├── alloydb
│ │ │ │ ├── _index.md
│ │ │ │ ├── alloydb-create-cluster.md
│ │ │ │ ├── alloydb-create-instance.md
│ │ │ │ ├── alloydb-create-user.md
│ │ │ │ ├── alloydb-get-cluster.md
│ │ │ │ ├── alloydb-get-instance.md
│ │ │ │ ├── alloydb-get-user.md
│ │ │ │ ├── alloydb-list-clusters.md
│ │ │ │ ├── alloydb-list-instances.md
│ │ │ │ ├── alloydb-list-users.md
│ │ │ │ └── alloydb-wait-for-operation.md
│ │ │ ├── alloydbainl
│ │ │ │ ├── _index.md
│ │ │ │ └── alloydb-ai-nl.md
│ │ │ ├── bigquery
│ │ │ │ ├── _index.md
│ │ │ │ ├── bigquery-analyze-contribution.md
│ │ │ │ ├── bigquery-conversational-analytics.md
│ │ │ │ ├── bigquery-execute-sql.md
│ │ │ │ ├── bigquery-forecast.md
│ │ │ │ ├── bigquery-get-dataset-info.md
│ │ │ │ ├── bigquery-get-table-info.md
│ │ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ │ ├── bigquery-list-table-ids.md
│ │ │ │ ├── bigquery-search-catalog.md
│ │ │ │ └── bigquery-sql.md
│ │ │ ├── bigtable
│ │ │ │ ├── _index.md
│ │ │ │ └── bigtable-sql.md
│ │ │ ├── cassandra
│ │ │ │ ├── _index.md
│ │ │ │ └── cassandra-cql.md
│ │ │ ├── clickhouse
│ │ │ │ ├── _index.md
│ │ │ │ ├── clickhouse-execute-sql.md
│ │ │ │ ├── clickhouse-list-databases.md
│ │ │ │ ├── clickhouse-list-tables.md
│ │ │ │ └── clickhouse-sql.md
│ │ │ ├── cloudgda
│ │ │ │ ├── _index.md
│ │ │ │ └── cloud-gda-query.md
│ │ │ ├── cloudhealthcare
│ │ │ │ ├── _index.md
│ │ │ │ ├── cloud-healthcare-fhir-fetch-page.md
│ │ │ │ ├── cloud-healthcare-fhir-patient-everything.md
│ │ │ │ ├── cloud-healthcare-fhir-patient-search.md
│ │ │ │ ├── cloud-healthcare-get-dataset.md
│ │ │ │ ├── cloud-healthcare-get-dicom-store-metrics.md
│ │ │ │ ├── cloud-healthcare-get-dicom-store.md
│ │ │ │ ├── cloud-healthcare-get-fhir-resource.md
│ │ │ │ ├── cloud-healthcare-get-fhir-store-metrics.md
│ │ │ │ ├── cloud-healthcare-get-fhir-store.md
│ │ │ │ ├── cloud-healthcare-list-dicom-stores.md
│ │ │ │ ├── cloud-healthcare-list-fhir-stores.md
│ │ │ │ ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│ │ │ │ ├── cloud-healthcare-search-dicom-instances.md
│ │ │ │ ├── cloud-healthcare-search-dicom-series.md
│ │ │ │ └── cloud-healthcare-search-dicom-studies.md
│ │ │ ├── cloudmonitoring
│ │ │ │ ├── _index.md
│ │ │ │ └── cloud-monitoring-query-prometheus.md
│ │ │ ├── cloudsql
│ │ │ │ ├── _index.md
│ │ │ │ ├── cloudsqlcloneinstance.md
│ │ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ │ ├── cloudsqlcreateusers.md
│ │ │ │ ├── cloudsqlgetinstances.md
│ │ │ │ ├── cloudsqllistdatabases.md
│ │ │ │ ├── cloudsqllistinstances.md
│ │ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ │ ├── cloudsqlpgupgradeprecheck.md
│ │ │ │ └── cloudsqlwaitforoperation.md
│ │ │ ├── couchbase
│ │ │ │ ├── _index.md
│ │ │ │ └── couchbase-sql.md
│ │ │ ├── dataform
│ │ │ │ ├── _index.md
│ │ │ │ └── dataform-compile-local.md
│ │ │ ├── dataplex
│ │ │ │ ├── _index.md
│ │ │ │ ├── dataplex-lookup-entry.md
│ │ │ │ ├── dataplex-search-aspect-types.md
│ │ │ │ └── dataplex-search-entries.md
│ │ │ ├── dgraph
│ │ │ │ ├── _index.md
│ │ │ │ └── dgraph-dql.md
│ │ │ ├── elasticsearch
│ │ │ │ ├── _index.md
│ │ │ │ └── elasticsearch-esql.md
│ │ │ ├── firebird
│ │ │ │ ├── _index.md
│ │ │ │ ├── firebird-execute-sql.md
│ │ │ │ └── firebird-sql.md
│ │ │ ├── firestore
│ │ │ │ ├── _index.md
│ │ │ │ ├── firestore-add-documents.md
│ │ │ │ ├── firestore-delete-documents.md
│ │ │ │ ├── firestore-get-documents.md
│ │ │ │ ├── firestore-get-rules.md
│ │ │ │ ├── firestore-list-collections.md
│ │ │ │ ├── firestore-query-collection.md
│ │ │ │ ├── firestore-query.md
│ │ │ │ ├── firestore-update-document.md
│ │ │ │ └── firestore-validate-rules.md
│ │ │ ├── http
│ │ │ │ ├── _index.md
│ │ │ │ └── http.md
│ │ │ ├── looker
│ │ │ │ ├── _index.md
│ │ │ │ ├── looker-add-dashboard-element.md
│ │ │ │ ├── looker-add-dashboard-filter.md
│ │ │ │ ├── looker-conversational-analytics.md
│ │ │ │ ├── looker-create-project-file.md
│ │ │ │ ├── looker-delete-project-file.md
│ │ │ │ ├── looker-dev-mode.md
│ │ │ │ ├── looker-generate-embed-url.md
│ │ │ │ ├── looker-get-connection-databases.md
│ │ │ │ ├── looker-get-connection-schemas.md
│ │ │ │ ├── looker-get-connection-table-columns.md
│ │ │ │ ├── looker-get-connection-tables.md
│ │ │ │ ├── looker-get-connections.md
│ │ │ │ ├── looker-get-dashboards.md
│ │ │ │ ├── looker-get-dimensions.md
│ │ │ │ ├── looker-get-explores.md
│ │ │ │ ├── looker-get-filters.md
│ │ │ │ ├── looker-get-looks.md
│ │ │ │ ├── looker-get-measures.md
│ │ │ │ ├── looker-get-models.md
│ │ │ │ ├── looker-get-parameters.md
│ │ │ │ ├── looker-get-project-file.md
│ │ │ │ ├── looker-get-project-files.md
│ │ │ │ ├── looker-get-projects.md
│ │ │ │ ├── looker-health-analyze.md
│ │ │ │ ├── looker-health-pulse.md
│ │ │ │ ├── looker-health-vacuum.md
│ │ │ │ ├── looker-make-dashboard.md
│ │ │ │ ├── looker-make-look.md
│ │ │ │ ├── looker-query-sql.md
│ │ │ │ ├── looker-query-url.md
│ │ │ │ ├── looker-query.md
│ │ │ │ ├── looker-run-dashboard.md
│ │ │ │ ├── looker-run-look.md
│ │ │ │ └── looker-update-project-file.md
│ │ │ ├── mindsdb
│ │ │ │ ├── _index.md
│ │ │ │ ├── mindsdb-execute-sql.md
│ │ │ │ └── mindsdb-sql.md
│ │ │ ├── mongodb
│ │ │ │ ├── _index.md
│ │ │ │ ├── mongodb-aggregate.md
│ │ │ │ ├── mongodb-delete-many.md
│ │ │ │ ├── mongodb-delete-one.md
│ │ │ │ ├── mongodb-find-one.md
│ │ │ │ ├── mongodb-find.md
│ │ │ │ ├── mongodb-insert-many.md
│ │ │ │ ├── mongodb-insert-one.md
│ │ │ │ ├── mongodb-update-many.md
│ │ │ │ └── mongodb-update-one.md
│ │ │ ├── mssql
│ │ │ │ ├── _index.md
│ │ │ │ ├── mssql-execute-sql.md
│ │ │ │ ├── mssql-list-tables.md
│ │ │ │ └── mssql-sql.md
│ │ │ ├── mysql
│ │ │ │ ├── _index.md
│ │ │ │ ├── mysql-execute-sql.md
│ │ │ │ ├── mysql-get-query-plan.md
│ │ │ │ ├── mysql-list-active-queries.md
│ │ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ │ ├── mysql-list-tables.md
│ │ │ │ └── mysql-sql.md
│ │ │ ├── neo4j
│ │ │ │ ├── _index.md
│ │ │ │ ├── neo4j-cypher.md
│ │ │ │ ├── neo4j-execute-cypher.md
│ │ │ │ └── neo4j-schema.md
│ │ │ ├── oceanbase
│ │ │ │ ├── _index.md
│ │ │ │ ├── oceanbase-execute-sql.md
│ │ │ │ └── oceanbase-sql.md
│ │ │ ├── oracle
│ │ │ │ ├── _index.md
│ │ │ │ ├── oracle-execute-sql.md
│ │ │ │ └── oracle-sql.md
│ │ │ ├── postgres
│ │ │ │ ├── _index.md
│ │ │ │ ├── postgres-database-overview.md
│ │ │ │ ├── postgres-execute-sql.md
│ │ │ │ ├── postgres-get-column-cardinality.md
│ │ │ │ ├── postgres-list-active-queries.md
│ │ │ │ ├── postgres-list-available-extensions.md
│ │ │ │ ├── postgres-list-database-stats.md
│ │ │ │ ├── postgres-list-indexes.md
│ │ │ │ ├── postgres-list-installed-extensions.md
│ │ │ │ ├── postgres-list-locks.md
│ │ │ │ ├── postgres-list-pg-settings.md
│ │ │ │ ├── postgres-list-publication-tables.md
│ │ │ │ ├── postgres-list-query-stats.md
│ │ │ │ ├── postgres-list-roles.md
│ │ │ │ ├── postgres-list-schemas.md
│ │ │ │ ├── postgres-list-sequences.md
│ │ │ │ ├── postgres-list-stored-procedure.md
│ │ │ │ ├── postgres-list-table-stats.md
│ │ │ │ ├── postgres-list-tables.md
│ │ │ │ ├── postgres-list-tablespaces.md
│ │ │ │ ├── postgres-list-triggers.md
│ │ │ │ ├── postgres-list-views.md
│ │ │ │ ├── postgres-long-running-transactions.md
│ │ │ │ ├── postgres-replication-stats.md
│ │ │ │ └── postgres-sql.md
│ │ │ ├── redis
│ │ │ │ ├── _index.md
│ │ │ │ └── redis.md
│ │ │ ├── serverless-spark
│ │ │ │ ├── _index.md
│ │ │ │ ├── serverless-spark-cancel-batch.md
│ │ │ │ ├── serverless-spark-create-pyspark-batch.md
│ │ │ │ ├── serverless-spark-create-spark-batch.md
│ │ │ │ ├── serverless-spark-get-batch.md
│ │ │ │ └── serverless-spark-list-batches.md
│ │ │ ├── singlestore
│ │ │ │ ├── _index.md
│ │ │ │ ├── singlestore-execute-sql.md
│ │ │ │ └── singlestore-sql.md
│ │ │ ├── snowflake
│ │ │ │ ├── _index.md
│ │ │ │ ├── snowflake-execute-sql.md
│ │ │ │ └── snowflake-sql.md
│ │ │ ├── spanner
│ │ │ │ ├── _index.md
│ │ │ │ ├── spanner-execute-sql.md
│ │ │ │ ├── spanner-list-graphs.md
│ │ │ │ ├── spanner-list-tables.md
│ │ │ │ └── spanner-sql.md
│ │ │ ├── sqlite
│ │ │ │ ├── _index.md
│ │ │ │ ├── sqlite-execute-sql.md
│ │ │ │ └── sqlite-sql.md
│ │ │ ├── tidb
│ │ │ │ ├── _index.md
│ │ │ │ ├── tidb-execute-sql.md
│ │ │ │ └── tidb-sql.md
│ │ │ ├── trino
│ │ │ │ ├── _index.md
│ │ │ │ ├── trino-execute-sql.md
│ │ │ │ └── trino-sql.md
│ │ │ ├── utility
│ │ │ │ ├── _index.md
│ │ │ │ └── wait.md
│ │ │ ├── valkey
│ │ │ │ ├── _index.md
│ │ │ │ └── valkey.md
│ │ │ └── yuagbytedb
│ │ │ ├── _index.md
│ │ │ └── yugabytedb-sql.md
│ │ ├── samples
│ │ │ ├── _index.md
│ │ │ ├── alloydb
│ │ │ │ ├── _index.md
│ │ │ │ ├── ai-nl
│ │ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ │ └── index.md
│ │ │ │ └── mcp_quickstart.md
│ │ │ ├── bigquery
│ │ │ │ ├── _index.md
│ │ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ │ ├── local_quickstart.md
│ │ │ │ └── mcp_quickstart
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ ├── looker
│ │ │ │ ├── _index.md
│ │ │ │ ├── looker_gemini_oauth
│ │ │ │ │ ├── _index.md
│ │ │ │ │ ├── authenticated.png
│ │ │ │ │ ├── authorize.png
│ │ │ │ │ └── registration.png
│ │ │ │ ├── looker_gemini.md
│ │ │ │ └── looker_mcp_inspector
│ │ │ │ ├── _index.md
│ │ │ │ ├── inspector_tools.png
│ │ │ │ └── inspector.png
│ │ │ └── snowflake
│ │ │ ├── _index.md
│ │ │ ├── runme.py
│ │ │ ├── snowflake-config.yaml
│ │ │ ├── snowflake-env.sh
│ │ │ └── test-snowflake.sh
│ │ └── sdks
│ │ ├── _index.md
│ │ ├── go-sdk.md
│ │ ├── js-sdk.md
│ │ └── python-sdk.md
│ ├── LOOKER_README.md
│ ├── SPANNER_README.md
│ └── TOOLBOX_README.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── embeddingmodels
│ │ ├── embeddingmodels.go
│ │ └── gemini
│ │ ├── gemini_test.go
│ │ └── gemini.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-healthcare.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── elasticsearch.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mindsdb.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── singlestore.yaml
│ │ ├── snowflake.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── prompts
│ │ ├── arguments_test.go
│ │ ├── arguments.go
│ │ ├── custom
│ │ │ ├── custom_test.go
│ │ │ └── custom.go
│ │ ├── messages_test.go
│ │ ├── messages.go
│ │ ├── prompts_test.go
│ │ ├── prompts.go
│ │ ├── promptsets_test.go
│ │ └── promptsets.go
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── resources
│ │ │ ├── resources_test.go
│ │ │ └── resources.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ ├── bigquery.go
│ │ │ └── cache.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudgda
│ │ │ ├── cloud_gda_test.go
│ │ │ └── cloud_gda.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloud_healthcare_test.go
│ │ │ └── cloud_healthcare.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── elasticsearch
│ │ │ ├── elasticsearch_test.go
│ │ │ └── elasticsearch.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mindsdb
│ │ │ ├── mindsdb_test.go
│ │ │ └── mindsdb.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ ├── oracle_test.go
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ ├── serverlessspark.go
│ │ │ ├── url_test.go
│ │ │ └── url.go
│ │ ├── singlestore
│ │ │ ├── singlestore_test.go
│ │ │ └── singlestore.go
│ │ ├── snowflake
│ │ │ ├── snowflake_test.go
│ │ │ └── snowflake.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudgda
│ │ │ ├── cloudgda_test.go
│ │ │ ├── cloudgda.go
│ │ │ └── types.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloudhealthcarefhirfetchpage
│ │ │ │ ├── cloudhealthcarefhirfetchpage_test.go
│ │ │ │ └── cloudhealthcarefhirfetchpage.go
│ │ │ ├── cloudhealthcarefhirpatienteverything
│ │ │ │ ├── cloudhealthcarefhirpatienteverything_test.go
│ │ │ │ └── cloudhealthcarefhirpatienteverything.go
│ │ │ ├── cloudhealthcarefhirpatientsearch
│ │ │ │ ├── cloudhealthcarefhirpatientsearch_test.go
│ │ │ │ └── cloudhealthcarefhirpatientsearch.go
│ │ │ ├── cloudhealthcaregetdataset
│ │ │ │ ├── cloudhealthcaregetdataset_test.go
│ │ │ │ └── cloudhealthcaregetdataset.go
│ │ │ ├── cloudhealthcaregetdicomstore
│ │ │ │ ├── cloudhealthcaregetdicomstore_test.go
│ │ │ │ └── cloudhealthcaregetdicomstore.go
│ │ │ ├── cloudhealthcaregetdicomstoremetrics
│ │ │ │ ├── cloudhealthcaregetdicomstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetdicomstoremetrics.go
│ │ │ ├── cloudhealthcaregetfhirresource
│ │ │ │ ├── cloudhealthcaregetfhirresource_test.go
│ │ │ │ └── cloudhealthcaregetfhirresource.go
│ │ │ ├── cloudhealthcaregetfhirstore
│ │ │ │ ├── cloudhealthcaregetfhirstore_test.go
│ │ │ │ └── cloudhealthcaregetfhirstore.go
│ │ │ ├── cloudhealthcaregetfhirstoremetrics
│ │ │ │ ├── cloudhealthcaregetfhirstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetfhirstoremetrics.go
│ │ │ ├── cloudhealthcarelistdicomstores
│ │ │ │ ├── cloudhealthcarelistdicomstores_test.go
│ │ │ │ └── cloudhealthcarelistdicomstores.go
│ │ │ ├── cloudhealthcarelistfhirstores
│ │ │ │ ├── cloudhealthcarelistfhirstores_test.go
│ │ │ │ └── cloudhealthcarelistfhirstores.go
│ │ │ ├── cloudhealthcareretrieverendereddicominstance
│ │ │ │ ├── cloudhealthcareretrieverendereddicominstance_test.go
│ │ │ │ └── cloudhealthcareretrieverendereddicominstance.go
│ │ │ ├── cloudhealthcaresearchdicominstances
│ │ │ │ ├── cloudhealthcaresearchdicominstances_test.go
│ │ │ │ └── cloudhealthcaresearchdicominstances.go
│ │ │ ├── cloudhealthcaresearchdicomseries
│ │ │ │ ├── cloudhealthcaresearchdicomseries_test.go
│ │ │ │ └── cloudhealthcaresearchdicomseries.go
│ │ │ ├── cloudhealthcaresearchdicomstudies
│ │ │ │ ├── cloudhealthcaresearchdicomstudies_test.go
│ │ │ │ └── cloudhealthcaresearchdicomstudies.go
│ │ │ └── common
│ │ │ └── util.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcloneinstance
│ │ │ │ ├── cloudsqlcloneinstance_test.go
│ │ │ │ └── cloudsqlcloneinstance.go
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloudsqlpgcreateinstances
│ │ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ │ └── cloudsqlpgcreateinstances.go
│ │ │ └── cloudsqlpgupgradeprecheck
│ │ │ ├── cloudsqlpgupgradeprecheck_test.go
│ │ │ └── cloudsqlpgupgradeprecheck.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── elasticsearch
│ │ │ └── elasticsearchesql
│ │ │ ├── elasticsearchesql_test.go
│ │ │ └── elasticsearchesql.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookeradddashboardfilter
│ │ │ │ ├── lookeradddashboardfilter_test.go
│ │ │ │ └── lookeradddashboardfilter.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergenerateembedurl
│ │ │ │ ├── lookergenerateembedurl_test.go
│ │ │ │ └── lookergenerateembedurl.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrundashboard
│ │ │ │ ├── lookerrundashboard_test.go
│ │ │ │ └── lookerrundashboard.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mindsdb
│ │ │ ├── mindsdbexecutesql
│ │ │ │ ├── mindsdbexecutesql_test.go
│ │ │ │ └── mindsdbexecutesql.go
│ │ │ └── mindsdbsql
│ │ │ ├── mindsdbsql_test.go
│ │ │ └── mindsdbsql.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqlgetqueryplan
│ │ │ │ ├── mysqlgetqueryplan_test.go
│ │ │ │ └── mysqlgetqueryplan.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ ├── oracleexecutesql_test.go
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ ├── oraclesql_test.go
│ │ │ └── oraclesql.go
│ │ ├── postgres
│ │ │ ├── postgresdatabaseoverview
│ │ │ │ ├── postgresdatabaseoverview_test.go
│ │ │ │ └── postgresdatabaseoverview.go
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgresgetcolumncardinality
│ │ │ │ ├── postgresgetcolumncardinality_test.go
│ │ │ │ └── postgresgetcolumncardinality.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistdatabasestats
│ │ │ │ ├── postgreslistdatabasestats_test.go
│ │ │ │ └── postgreslistdatabasestats.go
│ │ │ ├── postgreslistindexes
│ │ │ │ ├── postgreslistindexes_test.go
│ │ │ │ └── postgreslistindexes.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslistlocks
│ │ │ │ ├── postgreslistlocks_test.go
│ │ │ │ └── postgreslistlocks.go
│ │ │ ├── postgreslistpgsettings
│ │ │ │ ├── postgreslistpgsettings_test.go
│ │ │ │ └── postgreslistpgsettings.go
│ │ │ ├── postgreslistpublicationtables
│ │ │ │ ├── postgreslistpublicationtables_test.go
│ │ │ │ └── postgreslistpublicationtables.go
│ │ │ ├── postgreslistquerystats
│ │ │ │ ├── postgreslistquerystats_test.go
│ │ │ │ └── postgreslistquerystats.go
│ │ │ ├── postgreslistroles
│ │ │ │ ├── postgreslistroles_test.go
│ │ │ │ └── postgreslistroles.go
│ │ │ ├── postgreslistschemas
│ │ │ │ ├── postgreslistschemas_test.go
│ │ │ │ └── postgreslistschemas.go
│ │ │ ├── postgreslistsequences
│ │ │ │ ├── postgreslistsequences_test.go
│ │ │ │ └── postgreslistsequences.go
│ │ │ ├── postgresliststoredprocedure
│ │ │ │ ├── postgresliststoredprocedure_test.go
│ │ │ │ └── postgresliststoredprocedure.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslisttablespaces
│ │ │ │ ├── postgreslisttablespaces_test.go
│ │ │ │ └── postgreslisttablespaces.go
│ │ │ ├── postgreslisttablestats
│ │ │ │ ├── postgreslisttablestats_test.go
│ │ │ │ └── postgreslisttablestats.go
│ │ │ ├── postgreslisttriggers
│ │ │ │ ├── postgreslisttriggers_test.go
│ │ │ │ └── postgreslisttriggers.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ ├── postgreslongrunningtransactions
│ │ │ │ ├── postgreslongrunningtransactions_test.go
│ │ │ │ └── postgreslongrunningtransactions.go
│ │ │ ├── postgresreplicationstats
│ │ │ │ ├── postgresreplicationstats_test.go
│ │ │ │ └── postgresreplicationstats.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── createbatch
│ │ │ │ ├── config.go
│ │ │ │ └── tool.go
│ │ │ ├── serverlesssparkcancelbatch
│ │ │ │ ├── serverlesssparkcancelbatch_test.go
│ │ │ │ └── serverlesssparkcancelbatch.go
│ │ │ ├── serverlesssparkcreatepysparkbatch
│ │ │ │ ├── serverlesssparkcreatepysparkbatch_test.go
│ │ │ │ └── serverlesssparkcreatepysparkbatch.go
│ │ │ ├── serverlesssparkcreatesparkbatch
│ │ │ │ ├── serverlesssparkcreatesparkbatch_test.go
│ │ │ │ └── serverlesssparkcreatesparkbatch.go
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ ├── serverlesssparklistbatches
│ │ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ │ └── serverlesssparklistbatches.go
│ │ │ └── testutils
│ │ │ └── testutils.go
│ │ ├── singlestore
│ │ │ ├── singlestoreexecutesql
│ │ │ │ ├── singlestoreexecutesql_test.go
│ │ │ │ └── singlestoreexecutesql.go
│ │ │ └── singlestoresql
│ │ │ ├── singlestoresql_test.go
│ │ │ └── singlestoresql.go
│ │ ├── snowflake
│ │ │ ├── snowflakeexecutesql
│ │ │ │ ├── snowflakeexecutesql_test.go
│ │ │ │ └── snowflakeexecutesql.go
│ │ │ └── snowflakesql
│ │ │ ├── snowflakesql_test.go
│ │ │ └── snowflakesql.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlistgraphs
│ │ │ │ ├── spannerlistgraphs_test.go
│ │ │ │ └── spannerlistgraphs.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ ├── orderedmap
│ │ ├── orderedmap_test.go
│ │ └── orderedmap.go
│ ├── parameters
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── parameters_test.go
│ │ └── parameters.go
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
├── server.json
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudgda
│ └── cloud_gda_integration_test.go
├── cloudhealthcare
│ └── cloud_healthcare_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_clone_instance_test.go
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ ├── cloud_sql_pg_integration_test.go
│ └── cloud_sql_pg_upgrade_precheck_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── elasticsearch
│ └── elasticsearch_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mariadb
│ └── mariadb_integration_test.go
├── mindsdb
│ └── mindsdb_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── prompts
│ └── custom
│ └── prompts_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── singlestore
│ └── singlestore_integration_test.go
├── snowflake
│ └── snowflake_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/internal/tools/spanner/spannerlisttables/spannerlisttables.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package spannerlisttables
16 |
17 | import (
18 | "context"
19 | "fmt"
20 | "strings"
21 |
22 | "cloud.google.com/go/spanner"
23 | yaml "github.com/goccy/go-yaml"
24 | "github.com/googleapis/genai-toolbox/internal/embeddingmodels"
25 | "github.com/googleapis/genai-toolbox/internal/sources"
26 | "github.com/googleapis/genai-toolbox/internal/tools"
27 | "github.com/googleapis/genai-toolbox/internal/util/parameters"
28 | )
29 |
30 | const kind string = "spanner-list-tables"
31 |
32 | func init() {
33 | if !tools.Register(kind, newConfig) {
34 | panic(fmt.Sprintf("tool kind %q already registered", kind))
35 | }
36 | }
37 |
38 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
39 | actual := Config{Name: name}
40 | if err := decoder.DecodeContext(ctx, &actual); err != nil {
41 | return nil, err
42 | }
43 | return actual, nil
44 | }
45 |
46 | type compatibleSource interface {
47 | SpannerClient() *spanner.Client
48 | DatabaseDialect() string
49 | RunSQL(context.Context, bool, string, map[string]any) (any, error)
50 | }
51 |
52 | type Config struct {
53 | Name string `yaml:"name" validate:"required"`
54 | Kind string `yaml:"kind" validate:"required"`
55 | Source string `yaml:"source" validate:"required"`
56 | Description string `yaml:"description"`
57 | AuthRequired []string `yaml:"authRequired"`
58 | }
59 |
60 | // validate interface
61 | var _ tools.ToolConfig = Config{}
62 |
63 | func (cfg Config) ToolConfigKind() string {
64 | return kind
65 | }
66 |
67 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
68 | // Define parameters for the tool
69 | allParameters := parameters.Parameters{
70 | parameters.NewStringParameterWithDefault(
71 | "table_names",
72 | "",
73 | "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed.",
74 | ),
75 | parameters.NewStringParameterWithDefault(
76 | "output_format",
77 | "detailed",
78 | "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema.",
79 | ),
80 | }
81 |
82 | description := cfg.Description
83 | if description == "" {
84 | description = "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas. The output can be 'simple' (table names only) or 'detailed' (full schema)."
85 | }
86 | mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters, nil)
87 |
88 | // finish tool setup
89 | t := Tool{
90 | Config: cfg,
91 | AllParams: allParameters,
92 | manifest: tools.Manifest{Description: description, Parameters: allParameters.Manifest(), AuthRequired: cfg.AuthRequired},
93 | mcpManifest: mcpManifest,
94 | }
95 | return t, nil
96 | }
97 |
98 | // validate interface
99 | var _ tools.Tool = Tool{}
100 |
101 | type Tool struct {
102 | Config
103 | AllParams parameters.Parameters `yaml:"allParams"`
104 | manifest tools.Manifest
105 | mcpManifest tools.McpManifest
106 | }
107 |
108 | func getStatement(dialect string) string {
109 | switch strings.ToLower(dialect) {
110 | case "postgresql":
111 | return postgresqlStatement
112 | case "googlesql":
113 | return googleSQLStatement
114 | default:
115 | // Default to GoogleSQL
116 | return googleSQLStatement
117 | }
118 | }
119 |
120 | func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, error) {
121 | source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Kind)
122 | if err != nil {
123 | return nil, err
124 | }
125 |
126 | paramsMap := params.AsMap()
127 |
128 | // Get the appropriate SQL statement based on dialect
129 | statement := getStatement(source.DatabaseDialect())
130 |
131 | // Prepare parameters based on dialect
132 | var stmtParams map[string]interface{}
133 |
134 | tableNames, _ := paramsMap["table_names"].(string)
135 | outputFormat, _ := paramsMap["output_format"].(string)
136 | if outputFormat == "" {
137 | outputFormat = "detailed"
138 | }
139 |
140 | switch strings.ToLower(source.DatabaseDialect()) {
141 | case "postgresql":
142 | // PostgreSQL uses positional parameters ($1, $2)
143 | stmtParams = map[string]interface{}{
144 | "p1": tableNames,
145 | "p2": outputFormat,
146 | }
147 | case "googlesql":
148 | // GoogleSQL uses named parameters (@table_names, @output_format)
149 | stmtParams = map[string]interface{}{
150 | "table_names": tableNames,
151 | "output_format": outputFormat,
152 | }
153 | default:
154 | return nil, fmt.Errorf("unsupported dialect: %s", source.DatabaseDialect())
155 | }
156 |
157 | return source.RunSQL(ctx, true, statement, stmtParams)
158 | }
159 |
160 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
161 | return parameters.ParseParams(t.AllParams, data, claims)
162 | }
163 |
164 | func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
165 | return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
166 | }
167 |
168 | func (t Tool) Manifest() tools.Manifest {
169 | return t.manifest
170 | }
171 |
172 | func (t Tool) McpManifest() tools.McpManifest {
173 | return t.mcpManifest
174 | }
175 |
176 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
177 | return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
178 | }
179 |
180 | func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) {
181 | return false, nil
182 | }
183 |
184 | func (t Tool) ToConfig() tools.ToolConfig {
185 | return t.Config
186 | }
187 |
188 | func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
189 | return "Authorization", nil
190 | }
191 |
192 | // PostgreSQL statement for listing tables
193 | const postgresqlStatement = `
194 | WITH table_info_cte AS (
195 | SELECT
196 | T.TABLE_SCHEMA,
197 | T.TABLE_NAME,
198 | T.TABLE_TYPE,
199 | T.PARENT_TABLE_NAME,
200 | T.ON_DELETE_ACTION
201 | FROM INFORMATION_SCHEMA.TABLES AS T
202 | WHERE
203 | T.TABLE_SCHEMA = 'public'
204 | AND T.TABLE_TYPE = 'BASE TABLE'
205 | AND (
206 | NULLIF(TRIM($1), '') IS NULL OR
207 | T.TABLE_NAME IN (
208 | SELECT table_name
209 | FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
210 | )
211 | ),
212 |
213 | columns_info_cte AS (
214 | SELECT
215 | C.TABLE_SCHEMA,
216 | C.TABLE_NAME,
217 | ARRAY_AGG(
218 | CONCAT(
219 | '{',
220 | '"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
221 | '"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
222 | '"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
223 | '"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
224 | '"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
225 | '}'
226 | ) ORDER BY C.ORDINAL_POSITION
227 | ) AS columns_json_array_elements
228 | FROM INFORMATION_SCHEMA.COLUMNS AS C
229 | WHERE C.TABLE_SCHEMA = 'public'
230 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
231 | GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
232 | ),
233 |
234 | constraint_columns_agg_cte AS (
235 | SELECT
236 | CONSTRAINT_CATALOG,
237 | CONSTRAINT_SCHEMA,
238 | CONSTRAINT_NAME,
239 | ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
240 | FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
241 | WHERE CONSTRAINT_SCHEMA = 'public'
242 | GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
243 | ),
244 |
245 | constraints_info_cte AS (
246 | SELECT
247 | TC.TABLE_SCHEMA,
248 | TC.TABLE_NAME,
249 | ARRAY_AGG(
250 | CONCAT(
251 | '{',
252 | '"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
253 | '"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
254 | '"constraint_definition":',
255 | CASE TC.CONSTRAINT_TYPE
256 | WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
257 | WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
258 | WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
259 | WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
260 | COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
261 | ' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
262 | ELSE 'null'
263 | END, ',',
264 | '"constraint_columns":["', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"],',
265 | '"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
266 | '"foreign_key_referenced_columns":["', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), '"]',
267 | '}'
268 | ) ORDER BY TC.CONSTRAINT_NAME
269 | ) AS constraints_json_array_elements
270 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
271 | LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
272 | ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
273 | LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
274 | ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
275 | LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
276 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
277 | LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
278 | ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
279 | LEFT JOIN constraint_columns_agg_cte AS KeyCols
280 | ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
281 | LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
282 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
283 | WHERE TC.TABLE_SCHEMA = 'public'
284 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
285 | GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
286 | ),
287 |
288 | index_key_columns_agg_cte AS (
289 | SELECT
290 | TABLE_CATALOG,
291 | TABLE_SCHEMA,
292 | TABLE_NAME,
293 | INDEX_NAME,
294 | ARRAY_AGG(
295 | CONCAT(
296 | '{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
297 | '"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
298 | ) ORDER BY ORDINAL_POSITION
299 | ) AS key_column_json_details
300 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
301 | WHERE ORDINAL_POSITION IS NOT NULL
302 | AND TABLE_SCHEMA = 'public'
303 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
304 | ),
305 |
306 | index_storing_columns_agg_cte AS (
307 | SELECT
308 | TABLE_CATALOG,
309 | TABLE_SCHEMA,
310 | TABLE_NAME,
311 | INDEX_NAME,
312 | ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
313 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
314 | WHERE ORDINAL_POSITION IS NULL
315 | AND TABLE_SCHEMA = 'public'
316 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
317 | ),
318 |
319 | indexes_info_cte AS (
320 | SELECT
321 | I.TABLE_SCHEMA,
322 | I.TABLE_NAME,
323 | ARRAY_AGG(
324 | CONCAT(
325 | '{',
326 | '"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
327 | '"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
328 | '"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
329 | '"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
330 | '"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
331 | '"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
332 | '"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
333 | '}'
334 | ) ORDER BY I.INDEX_NAME
335 | ) AS indexes_json_array_elements
336 | FROM INFORMATION_SCHEMA.INDEXES AS I
337 | LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
338 | ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
339 | LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
340 | ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
341 | AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
342 | WHERE I.TABLE_SCHEMA = 'public'
343 | AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
344 | GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
345 | )
346 |
347 | SELECT
348 | TI.TABLE_SCHEMA AS schema_name,
349 | TI.TABLE_NAME AS object_name,
350 | CASE
351 | WHEN $2 = 'simple' THEN
352 | -- IF format is 'simple', return basic JSON
353 | CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
354 | ELSE
355 | CONCAT(
356 | '{',
357 | '"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
358 | '"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
359 | '"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
360 | '"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
361 | '"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
362 | '"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
363 | '}'
364 | )
365 | END AS object_details
366 | FROM table_info_cte AS TI
367 | LEFT JOIN columns_info_cte AS CI
368 | ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
369 | LEFT JOIN constraints_info_cte AS CONSI
370 | ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
371 | LEFT JOIN indexes_info_cte AS II
372 | ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
373 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
374 |
375 | // GoogleSQL statement for listing tables
376 | const googleSQLStatement = `
377 | WITH FilterTableNames AS (
378 | SELECT DISTINCT TRIM(name) AS TABLE_NAME
379 | FROM UNNEST(IF(@table_names = '' OR @table_names IS NULL, ['%'], SPLIT(@table_names, ','))) AS name
380 | ),
381 |
382 | -- 1. Table Information
383 | table_info_cte AS (
384 | SELECT
385 | T.TABLE_SCHEMA,
386 | T.TABLE_NAME,
387 | T.TABLE_TYPE,
388 | T.PARENT_TABLE_NAME, -- For interleaved tables
389 | T.ON_DELETE_ACTION -- For interleaved tables
390 | FROM INFORMATION_SCHEMA.TABLES AS T
391 | WHERE
392 | T.TABLE_SCHEMA = ''
393 | AND T.TABLE_TYPE = 'BASE TABLE'
394 | AND (EXISTS (SELECT 1 FROM FilterTableNames WHERE FilterTableNames.TABLE_NAME = '%') OR T.TABLE_NAME IN (SELECT TABLE_NAME FROM FilterTableNames))
395 | ),
396 |
397 | -- 2. Column Information (with JSON string for each column)
398 | columns_info_cte AS (
399 | SELECT
400 | C.TABLE_SCHEMA,
401 | C.TABLE_NAME,
402 | ARRAY_AGG(
403 | CONCAT(
404 | '{',
405 | '"column_name":"', IFNULL(C.COLUMN_NAME, ''), '",',
406 | '"data_type":"', IFNULL(C.SPANNER_TYPE, ''), '",',
407 | '"ordinal_position":', CAST(C.ORDINAL_POSITION AS STRING), ',',
408 | '"is_not_nullable":', IF(C.IS_NULLABLE = 'NO', 'true', 'false'), ',',
409 | '"column_default":', IF(C.COLUMN_DEFAULT IS NULL, 'null', CONCAT('"', C.COLUMN_DEFAULT, '"')),
410 | '}'
411 | ) ORDER BY C.ORDINAL_POSITION
412 | ) AS columns_json_array_elements
413 | FROM INFORMATION_SCHEMA.COLUMNS AS C
414 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
415 | GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
416 | ),
417 |
418 | -- Helper CTE for aggregating constraint columns
419 | constraint_columns_agg_cte AS (
420 | SELECT
421 | CONSTRAINT_CATALOG,
422 | CONSTRAINT_SCHEMA,
423 | CONSTRAINT_NAME,
424 | ARRAY_AGG(REPLACE(COLUMN_NAME, '"', '\"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
425 | FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
426 | GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
427 | ),
428 |
429 | -- 3. Constraint Information (with JSON string for each constraint)
430 | constraints_info_cte AS (
431 | SELECT
432 | TC.TABLE_SCHEMA,
433 | TC.TABLE_NAME,
434 | ARRAY_AGG(
435 | CONCAT(
436 | '{',
437 | '"constraint_name":"', IFNULL(TC.CONSTRAINT_NAME, ''), '",',
438 | '"constraint_type":"', IFNULL(TC.CONSTRAINT_TYPE, ''), '",',
439 | '"constraint_definition":',
440 | CASE TC.CONSTRAINT_TYPE
441 | WHEN 'CHECK' THEN IF(CC.CHECK_CLAUSE IS NULL, 'null', CONCAT('"', CC.CHECK_CLAUSE, '"'))
442 | WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
443 | WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
444 | WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ') REFERENCES ',
445 | IFNULL(RefKeyTable.TABLE_NAME, ''),
446 | ' (', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ', '), ')', '"')
447 | ELSE 'null'
448 | END, ',',
449 | '"constraint_columns":["', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ','), '"],',
450 | '"foreign_key_referenced_table":', IF(RefKeyTable.TABLE_NAME IS NULL, 'null', CONCAT('"', RefKeyTable.TABLE_NAME, '"')), ',',
451 | '"foreign_key_referenced_columns":["', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ','), '"]',
452 | '}'
453 | ) ORDER BY TC.CONSTRAINT_NAME
454 | ) AS constraints_json_array_elements
455 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
456 | LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
457 | ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
458 | LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
459 | ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
460 | LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
461 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
462 | LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
463 | ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
464 | LEFT JOIN constraint_columns_agg_cte AS KeyCols
465 | ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
466 | LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
467 | ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
468 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
469 | GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
470 | ),
471 |
472 | -- Helper CTE for aggregating index key columns (as JSON strings)
473 | index_key_columns_agg_cte AS (
474 | SELECT
475 | TABLE_CATALOG,
476 | TABLE_SCHEMA,
477 | TABLE_NAME,
478 | INDEX_NAME,
479 | ARRAY_AGG(
480 | CONCAT(
481 | '{"column_name":"', IFNULL(COLUMN_NAME, ''), '",',
482 | '"ordering":"', IFNULL(COLUMN_ORDERING, ''), '"}'
483 | ) ORDER BY ORDINAL_POSITION
484 | ) AS key_column_json_details
485 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
486 | WHERE ORDINAL_POSITION IS NOT NULL -- Key columns
487 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
488 | ),
489 |
490 | -- Helper CTE for aggregating index storing columns (as JSON strings)
491 | index_storing_columns_agg_cte AS (
492 | SELECT
493 | TABLE_CATALOG,
494 | TABLE_SCHEMA,
495 | TABLE_NAME,
496 | INDEX_NAME,
497 | ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
498 | FROM INFORMATION_SCHEMA.INDEX_COLUMNS
499 | WHERE ORDINAL_POSITION IS NULL -- Storing columns
500 | GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
501 | ),
502 |
503 | -- 4. Index Information (with JSON string for each index)
504 | indexes_info_cte AS (
505 | SELECT
506 | I.TABLE_SCHEMA,
507 | I.TABLE_NAME,
508 | ARRAY_AGG(
509 | CONCAT(
510 | '{',
511 | '"index_name":"', IFNULL(I.INDEX_NAME, ''), '",',
512 | '"index_type":"', IFNULL(I.INDEX_TYPE, ''), '",',
513 | '"is_unique":', IF(I.IS_UNIQUE, 'true', 'false'), ',',
514 | '"is_null_filtered":', IF(I.IS_NULL_FILTERED, 'true', 'false'), ',',
515 | '"interleaved_in_table":', IF(I.PARENT_TABLE_NAME IS NULL, 'null', CONCAT('"', I.PARENT_TABLE_NAME, '"')), ',',
516 | '"index_key_columns":[', ARRAY_TO_STRING(COALESCE(KeyIndexCols.key_column_json_details, []), ','), '],',
517 | '"storing_columns":[', ARRAY_TO_STRING(COALESCE(StoringIndexCols.storing_column_json_names, []), ','), ']',
518 | '}'
519 | ) ORDER BY I.INDEX_NAME
520 | ) AS indexes_json_array_elements
521 | FROM INFORMATION_SCHEMA.INDEXES AS I
522 | LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
523 | ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
524 | LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
525 | ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE = 'INDEX'
526 | WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
527 | GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
528 | )
529 |
530 | -- Final SELECT to build the JSON output
531 | SELECT
532 | TI.TABLE_SCHEMA AS schema_name,
533 | TI.TABLE_NAME AS object_name,
534 | CASE
535 | WHEN @output_format = 'simple' THEN
536 | -- IF format is 'simple', return basic JSON
537 | CONCAT('{"name":"', IFNULL(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
538 | ELSE
539 | CONCAT(
540 | '{',
541 | '"schema_name":"', IFNULL(TI.TABLE_SCHEMA, ''), '",',
542 | '"object_name":"', IFNULL(TI.TABLE_NAME, ''), '",',
543 | '"object_type":"', IFNULL(TI.TABLE_TYPE, ''), '",',
544 | '"columns":[', ARRAY_TO_STRING(COALESCE(CI.columns_json_array_elements, []), ','), '],',
545 | '"constraints":[', ARRAY_TO_STRING(COALESCE(CONSI.constraints_json_array_elements, []), ','), '],',
546 | '"indexes":[', ARRAY_TO_STRING(COALESCE(II.indexes_json_array_elements, []), ','), ']',
547 | '}'
548 | )
549 | END AS object_details
550 | FROM table_info_cte AS TI
551 | LEFT JOIN columns_info_cte AS CI
552 | ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
553 | LEFT JOIN constraints_info_cte AS CONSI
554 | ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
555 | LEFT JOIN indexes_info_cte AS II
556 | ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
557 | ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME`
558 |
```
--------------------------------------------------------------------------------
/tests/mongodb/mongodb_integration_test.go:
--------------------------------------------------------------------------------
```go
1 | // Copyright 2025 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package mongodb
16 |
17 | import (
18 | "bytes"
19 | "context"
20 | "encoding/json"
21 | "fmt"
22 | "io"
23 | "net/http"
24 | "os"
25 | "regexp"
26 | "testing"
27 | "time"
28 |
29 | "github.com/googleapis/genai-toolbox/internal/testutils"
30 | "github.com/googleapis/genai-toolbox/tests"
31 | "go.mongodb.org/mongo-driver/mongo"
32 | "go.mongodb.org/mongo-driver/mongo/options"
33 | )
34 |
35 | var (
36 | MongoDbSourceKind = "mongodb"
37 | MongoDbToolKind = "mongodb-find"
38 | MongoDbUri = os.Getenv("MONGODB_URI")
39 | MongoDbDatabase = os.Getenv("MONGODB_DATABASE")
40 | ServiceAccountEmail = os.Getenv("SERVICE_ACCOUNT_EMAIL")
41 | )
42 |
43 | func getMongoDBVars(t *testing.T) map[string]any {
44 | switch "" {
45 | case MongoDbUri:
46 | t.Fatal("'MongoDbUri' not set")
47 | case MongoDbDatabase:
48 | t.Fatal("'MongoDbDatabase' not set")
49 | }
50 | return map[string]any{
51 | "kind": MongoDbSourceKind,
52 | "uri": MongoDbUri,
53 | }
54 | }
55 |
56 | func initMongoDbDatabase(ctx context.Context, uri, database string) (*mongo.Database, error) {
57 | // Create a new mongodb Database
58 | client, err := mongo.Connect(ctx, options.Client().ApplyURI(uri))
59 | if err != nil {
60 | return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
61 | }
62 | err = client.Ping(ctx, nil)
63 | if err != nil {
64 | return nil, fmt.Errorf("unable to connect to mongodb: %s", err)
65 | }
66 | return client.Database(database), nil
67 | }
68 |
69 | func TestMongoDBToolEndpoints(t *testing.T) {
70 | sourceConfig := getMongoDBVars(t)
71 | ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
72 | defer cancel()
73 |
74 | var args []string
75 |
76 | database, err := initMongoDbDatabase(ctx, MongoDbUri, MongoDbDatabase)
77 | if err != nil {
78 | t.Fatalf("unable to create MongoDB connection: %s", err)
79 | }
80 |
81 | // set up data for param tool
82 | teardownDB := setupMongoDB(t, ctx, database)
83 | defer teardownDB(t)
84 |
85 | // Write config into a file and pass it to command
86 | toolsFile := getMongoDBToolsConfig(sourceConfig, MongoDbToolKind)
87 |
88 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
89 | if err != nil {
90 | t.Fatalf("command initialization returned an error: %s", err)
91 | }
92 | defer cleanup()
93 |
94 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
95 | defer cancel()
96 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
97 | if err != nil {
98 | t.Logf("toolbox command logs: \n%s", out)
99 | t.Fatalf("toolbox didn't start successfully: %s", err)
100 | }
101 |
102 | // Get configs for tests
103 | select1Want := `[{"_id":3,"id":3,"name":"Sid"}]`
104 | myToolId3NameAliceWant := `[{"_id":5,"id":3,"name":"Alice"}]`
105 | myToolById4Want := `null`
106 | mcpMyFailToolWant := `invalid JSON input: missing colon after key `
107 | mcpMyToolId3NameAliceWant := `{"jsonrpc":"2.0","id":"my-tool","result":{"content":[{"type":"text","text":"{\"_id\":5,\"id\":3,\"name\":\"Alice\"}"}]}}`
108 | mcpAuthRequiredWant := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"_id\":3,\"id\":3,\"name\":\"Sid\"}"}]}}`
109 |
110 | // Run tests
111 | tests.RunToolGetTest(t)
112 | tests.RunToolInvokeTest(t, select1Want,
113 | tests.WithMyToolId3NameAliceWant(myToolId3NameAliceWant),
114 | tests.WithMyArrayToolWant(myToolId3NameAliceWant),
115 | tests.WithMyToolById4Want(myToolById4Want),
116 | )
117 | tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, select1Want,
118 | tests.WithMcpMyToolId3NameAliceWant(mcpMyToolId3NameAliceWant),
119 | tests.WithMcpSelect1Want(mcpAuthRequiredWant),
120 | )
121 |
122 | delete1Want := "1"
123 | deleteManyWant := "2"
124 | runToolDeleteInvokeTest(t, delete1Want, deleteManyWant)
125 |
126 | insert1Want := `"68666e1035bb36bf1b4d47fb"`
127 | insertManyWant := `["68667a6436ec7d0363668db7","68667a6436ec7d0363668db8","68667a6436ec7d0363668db9"]`
128 | runToolInsertInvokeTest(t, insert1Want, insertManyWant)
129 |
130 | update1Want := "1"
131 | updateManyWant := "[2,0,2]"
132 | runToolUpdateInvokeTest(t, update1Want, updateManyWant)
133 |
134 | aggregate1Want := `[{"id":2}]`
135 | aggregateManyWant := `[{"id":500},{"id":501}]`
136 | runToolAggregateInvokeTest(t, aggregate1Want, aggregateManyWant)
137 | }
138 |
139 | func runToolDeleteInvokeTest(t *testing.T, delete1Want, deleteManyWant string) {
140 | // Test tool invoke endpoint
141 | invokeTcs := []struct {
142 | name string
143 | api string
144 | requestHeader map[string]string
145 | requestBody io.Reader
146 | want string
147 | isErr bool
148 | }{
149 | {
150 | name: "invoke my-delete-one-tool",
151 | api: "http://127.0.0.1:5000/api/tool/my-delete-one-tool/invoke",
152 | requestHeader: map[string]string{},
153 | requestBody: bytes.NewBuffer([]byte(`{ "id" : 100 }`)),
154 | want: delete1Want,
155 | isErr: false,
156 | },
157 | {
158 | name: "invoke my-delete-many-tool",
159 | api: "http://127.0.0.1:5000/api/tool/my-delete-many-tool/invoke",
160 | requestHeader: map[string]string{},
161 | requestBody: bytes.NewBuffer([]byte(`{ "id" : 101 }`)),
162 | want: deleteManyWant,
163 | isErr: false,
164 | },
165 | }
166 |
167 | for _, tc := range invokeTcs {
168 |
169 | t.Run(tc.name, func(t *testing.T) {
170 | // Send Tool invocation request
171 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
172 | if err != nil {
173 | t.Fatalf("unable to create request: %s", err)
174 | }
175 | req.Header.Add("Content-type", "application/json")
176 | for k, v := range tc.requestHeader {
177 | req.Header.Add(k, v)
178 | }
179 | resp, err := http.DefaultClient.Do(req)
180 | if err != nil {
181 | t.Fatalf("unable to send request: %s", err)
182 | }
183 | defer resp.Body.Close()
184 |
185 | if resp.StatusCode != http.StatusOK {
186 | if tc.isErr {
187 | return
188 | }
189 | bodyBytes, _ := io.ReadAll(resp.Body)
190 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
191 | }
192 |
193 | // Check response body
194 | var body map[string]interface{}
195 | err = json.NewDecoder(resp.Body).Decode(&body)
196 | if err != nil {
197 | t.Fatalf("error parsing response body")
198 | }
199 |
200 | got, ok := body["result"].(string)
201 | if !ok {
202 | t.Fatalf("unable to find result in response body")
203 | }
204 |
205 | if got != tc.want {
206 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
207 | }
208 | })
209 | }
210 | }
211 |
212 | func runToolInsertInvokeTest(t *testing.T, insert1Want, insertManyWant string) {
213 | // Test tool invoke endpoint
214 | invokeTcs := []struct {
215 | name string
216 | api string
217 | requestHeader map[string]string
218 | requestBody io.Reader
219 | want string
220 | isErr bool
221 | }{
222 | {
223 | name: "invoke my-insert-one-tool",
224 | api: "http://127.0.0.1:5000/api/tool/my-insert-one-tool/invoke",
225 | requestHeader: map[string]string{},
226 | requestBody: bytes.NewBuffer([]byte(`{ "data" : "{ \"_id\": { \"$oid\": \"68666e1035bb36bf1b4d47fb\" }, \"id\" : 200 }" }"`)),
227 | want: insert1Want,
228 | isErr: false,
229 | },
230 | {
231 | name: "invoke my-insert-many-tool",
232 | api: "http://127.0.0.1:5000/api/tool/my-insert-many-tool/invoke",
233 | requestHeader: map[string]string{},
234 | requestBody: bytes.NewBuffer([]byte(`{ "data" : "[{ \"_id\": { \"$oid\": \"68667a6436ec7d0363668db7\"} , \"id\" : 201 }, { \"_id\" : { \"$oid\": \"68667a6436ec7d0363668db8\"}, \"id\" : 202 }, { \"_id\": { \"$oid\": \"68667a6436ec7d0363668db9\"}, \"id\": 203 }]" }`)),
235 | want: insertManyWant,
236 | isErr: false,
237 | },
238 | }
239 |
240 | for _, tc := range invokeTcs {
241 |
242 | t.Run(tc.name, func(t *testing.T) {
243 | // Send Tool invocation request
244 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
245 | if err != nil {
246 | t.Fatalf("unable to create request: %s", err)
247 | }
248 | req.Header.Add("Content-type", "application/json")
249 | for k, v := range tc.requestHeader {
250 | req.Header.Add(k, v)
251 | }
252 | resp, err := http.DefaultClient.Do(req)
253 | if err != nil {
254 | t.Fatalf("unable to send request: %s", err)
255 | }
256 | defer resp.Body.Close()
257 |
258 | if resp.StatusCode != http.StatusOK {
259 | if tc.isErr {
260 | return
261 | }
262 | bodyBytes, _ := io.ReadAll(resp.Body)
263 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
264 | }
265 |
266 | // Check response body
267 | var body map[string]interface{}
268 | err = json.NewDecoder(resp.Body).Decode(&body)
269 | if err != nil {
270 | t.Fatalf("error parsing response body")
271 | }
272 |
273 | got, ok := body["result"].(string)
274 | if !ok {
275 | t.Fatalf("unable to find result in response body")
276 | }
277 |
278 | if got != tc.want {
279 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
280 | }
281 | })
282 | }
283 | }
284 |
285 | func runToolUpdateInvokeTest(t *testing.T, update1Want, updateManyWant string) {
286 | // Test tool invoke endpoint
287 | invokeTcs := []struct {
288 | name string
289 | api string
290 | requestHeader map[string]string
291 | requestBody io.Reader
292 | want string
293 | isErr bool
294 | }{
295 | {
296 | name: "invoke my-update-one-tool",
297 | api: "http://127.0.0.1:5000/api/tool/my-update-one-tool/invoke",
298 | requestHeader: map[string]string{},
299 | requestBody: bytes.NewBuffer([]byte(`{ "id": 300, "name": "Bob" }`)),
300 | want: update1Want,
301 | isErr: false,
302 | },
303 | {
304 | name: "invoke my-update-many-tool",
305 | api: "http://127.0.0.1:5000/api/tool/my-update-many-tool/invoke",
306 | requestHeader: map[string]string{},
307 | requestBody: bytes.NewBuffer([]byte(`{ "id": 400, "name" : "Alice" }`)),
308 | want: updateManyWant,
309 | isErr: false,
310 | },
311 | }
312 |
313 | for _, tc := range invokeTcs {
314 |
315 | t.Run(tc.name, func(t *testing.T) {
316 | // Send Tool invocation request
317 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
318 | if err != nil {
319 | t.Fatalf("unable to create request: %s", err)
320 | }
321 | req.Header.Add("Content-type", "application/json")
322 | for k, v := range tc.requestHeader {
323 | req.Header.Add(k, v)
324 | }
325 | resp, err := http.DefaultClient.Do(req)
326 | if err != nil {
327 | t.Fatalf("unable to send request: %s", err)
328 | }
329 | defer resp.Body.Close()
330 |
331 | if resp.StatusCode != http.StatusOK {
332 | if tc.isErr {
333 | return
334 | }
335 | bodyBytes, _ := io.ReadAll(resp.Body)
336 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
337 | }
338 |
339 | // Check response body
340 | var body map[string]interface{}
341 | err = json.NewDecoder(resp.Body).Decode(&body)
342 | if err != nil {
343 | t.Fatalf("error parsing response body")
344 | }
345 |
346 | got, ok := body["result"].(string)
347 | if !ok {
348 | t.Fatalf("unable to find result in response body")
349 | }
350 |
351 | if got != tc.want {
352 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
353 | }
354 | })
355 | }
356 | }
357 | func runToolAggregateInvokeTest(t *testing.T, aggregate1Want string, aggregateManyWant string) {
358 | // Test tool invoke endpoint
359 | invokeTcs := []struct {
360 | name string
361 | api string
362 | requestHeader map[string]string
363 | requestBody io.Reader
364 | want string
365 | isErr bool
366 | }{
367 | {
368 | name: "invoke my-aggregate-tool",
369 | api: "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
370 | requestHeader: map[string]string{},
371 | requestBody: bytes.NewBuffer([]byte(`{ "name": "Jane" }`)),
372 | want: aggregate1Want,
373 | isErr: false,
374 | },
375 | {
376 | name: "invoke my-aggregate-tool",
377 | api: "http://127.0.0.1:5000/api/tool/my-aggregate-tool/invoke",
378 | requestHeader: map[string]string{},
379 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
380 | want: aggregateManyWant,
381 | isErr: false,
382 | },
383 | {
384 | name: "invoke my-read-only-aggregate-tool",
385 | api: "http://127.0.0.1:5000/api/tool/my-read-only-aggregate-tool/invoke",
386 | requestHeader: map[string]string{},
387 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
388 | want: "",
389 | isErr: true,
390 | },
391 | {
392 | name: "invoke my-read-write-aggregate-tool",
393 | api: "http://127.0.0.1:5000/api/tool/my-read-write-aggregate-tool/invoke",
394 | requestHeader: map[string]string{},
395 | requestBody: bytes.NewBuffer([]byte(`{ "name" : "ToBeAggregated" }`)),
396 | want: "[]",
397 | isErr: false,
398 | },
399 | }
400 |
401 | for _, tc := range invokeTcs {
402 |
403 | t.Run(tc.name, func(t *testing.T) {
404 | // Send Tool invocation request
405 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
406 | if err != nil {
407 | t.Fatalf("unable to create request: %s", err)
408 | }
409 | req.Header.Add("Content-type", "application/json")
410 | for k, v := range tc.requestHeader {
411 | req.Header.Add(k, v)
412 | }
413 | resp, err := http.DefaultClient.Do(req)
414 | if err != nil {
415 | t.Fatalf("unable to send request: %s", err)
416 | }
417 | defer resp.Body.Close()
418 |
419 | if resp.StatusCode != http.StatusOK {
420 | if tc.isErr {
421 | return
422 | }
423 | bodyBytes, _ := io.ReadAll(resp.Body)
424 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
425 | }
426 |
427 | // Check response body
428 | var body map[string]interface{}
429 | err = json.NewDecoder(resp.Body).Decode(&body)
430 | if err != nil {
431 | t.Fatalf("error parsing response body")
432 | }
433 |
434 | got, ok := body["result"].(string)
435 | if !ok {
436 | t.Fatalf("unable to find result in response body")
437 | }
438 |
439 | if got != tc.want {
440 | t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
441 | }
442 | })
443 | }
444 | }
445 |
446 | func setupMongoDB(t *testing.T, ctx context.Context, database *mongo.Database) func(*testing.T) {
447 | collectionName := "test_collection"
448 |
449 | if err := database.Collection(collectionName).Drop(ctx); err != nil {
450 | t.Logf("Warning: failed to drop collection before setup: %v", err)
451 | }
452 |
453 | documents := []map[string]any{
454 | {"_id": 1, "id": 1, "name": "Alice", "email": ServiceAccountEmail},
455 | {"_id": 14, "id": 2, "name": "FakeAlice", "email": "[email protected]"},
456 | {"_id": 2, "id": 2, "name": "Jane"},
457 | {"_id": 3, "id": 3, "name": "Sid"},
458 | {"_id": 5, "id": 3, "name": "Alice", "email": "[email protected]"},
459 | {"_id": 6, "id": 100, "name": "ToBeDeleted", "email": "[email protected]"},
460 | {"_id": 7, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
461 | {"_id": 8, "id": 101, "name": "ToBeDeleted", "email": "[email protected]"},
462 | {"_id": 9, "id": 300, "name": "ToBeUpdatedToBob", "email": "[email protected]"},
463 | {"_id": 10, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
464 | {"_id": 11, "id": 400, "name": "ToBeUpdatedToAlice", "email": "[email protected]"},
465 | {"_id": 12, "id": 500, "name": "ToBeAggregated", "email": "[email protected]"},
466 | {"_id": 13, "id": 501, "name": "ToBeAggregated", "email": "[email protected]"},
467 | }
468 | for _, doc := range documents {
469 | _, err := database.Collection(collectionName).InsertOne(ctx, doc)
470 | if err != nil {
471 | t.Fatalf("unable to insert test data: %s", err)
472 | }
473 | }
474 |
475 | return func(t *testing.T) {
476 | // tear down test
477 | err := database.Collection(collectionName).Drop(ctx)
478 | if err != nil {
479 | t.Errorf("Teardown failed: %s", err)
480 | }
481 | }
482 |
483 | }
484 |
485 | func getMongoDBToolsConfig(sourceConfig map[string]any, toolKind string) map[string]any {
486 | toolsFile := map[string]any{
487 | "sources": map[string]any{
488 | "my-instance": sourceConfig,
489 | },
490 | "authServices": map[string]any{
491 | "my-google-auth": map[string]any{
492 | "kind": "google",
493 | "clientId": tests.ClientId,
494 | },
495 | },
496 | "tools": map[string]any{
497 | "my-simple-tool": map[string]any{
498 | "kind": "mongodb-find-one",
499 | "source": "my-instance",
500 | "description": "Simple tool to test end to end functionality.",
501 | "collection": "test_collection",
502 | "filterPayload": `{ "_id" : 3 }`,
503 | "filterParams": []any{},
504 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
505 | "database": MongoDbDatabase,
506 | },
507 | "my-tool": map[string]any{
508 | "kind": toolKind,
509 | "source": "my-instance",
510 | "description": "Tool to test invocation with params.",
511 | "authRequired": []string{},
512 | "collection": "test_collection",
513 | "filterPayload": `{ "id" : {{ .id }}, "name" : {{json .name }} }`,
514 | "filterParams": []map[string]any{
515 | {
516 | "name": "id",
517 | "type": "integer",
518 | "description": "user id",
519 | },
520 | {
521 | "name": "name",
522 | "type": "string",
523 | "description": "user name",
524 | },
525 | },
526 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
527 | "database": MongoDbDatabase,
528 | "limit": 10,
529 | },
530 | "my-tool-by-id": map[string]any{
531 | "kind": toolKind,
532 | "source": "my-instance",
533 | "description": "Tool to test invocation with params.",
534 | "authRequired": []string{},
535 | "collection": "test_collection",
536 | "filterPayload": `{ "id" : {{ .id }} }`,
537 | "filterParams": []map[string]any{
538 | {
539 | "name": "id",
540 | "type": "integer",
541 | "description": "user id",
542 | },
543 | },
544 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
545 | "database": MongoDbDatabase,
546 | "limit": 10,
547 | },
548 | "my-tool-by-name": map[string]any{
549 | "kind": toolKind,
550 | "source": "my-instance",
551 | "description": "Tool to test invocation with params.",
552 | "authRequired": []string{},
553 | "collection": "test_collection",
554 | "filterPayload": `{ "name" : {{json .name }} }`,
555 | "filterParams": []map[string]any{
556 | {
557 | "name": "name",
558 | "type": "string",
559 | "description": "user name",
560 | "required": false,
561 | },
562 | },
563 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
564 | "database": MongoDbDatabase,
565 | "limit": 10,
566 | },
567 | "my-array-tool": map[string]any{
568 | "kind": toolKind,
569 | "source": "my-instance",
570 | "description": "Tool to test invocation with array.",
571 | "authRequired": []string{},
572 | "collection": "test_collection",
573 | "filterPayload": `{ "name": { "$in": {{json .nameArray}} }, "_id": 5 }`,
574 | "filterParams": []map[string]any{
575 | {
576 | "name": "nameArray",
577 | "type": "array",
578 | "description": "user names",
579 | "items": map[string]any{
580 | "name": "username",
581 | "type": "string",
582 | "description": "string item"},
583 | },
584 | },
585 | "projectPayload": `{ "_id": 1, "id": 1, "name" : 1 }`,
586 | "database": MongoDbDatabase,
587 | "limit": 10,
588 | },
589 | "my-auth-tool": map[string]any{
590 | "kind": toolKind,
591 | "source": "my-instance",
592 | "description": "Tool to test authenticated parameters.",
593 | "authRequired": []string{},
594 | "collection": "test_collection",
595 | "filterPayload": `{ "email" : {{json .email }} }`,
596 | "filterParams": []map[string]any{
597 | {
598 | "name": "email",
599 | "type": "string",
600 | "description": "user email",
601 | "authServices": []map[string]string{
602 | {
603 | "name": "my-google-auth",
604 | "field": "email",
605 | },
606 | },
607 | },
608 | },
609 | "projectPayload": `{ "_id": 0, "name" : 1 }`,
610 | "database": MongoDbDatabase,
611 | "limit": 10,
612 | },
613 | "my-auth-required-tool": map[string]any{
614 | "kind": toolKind,
615 | "source": "my-instance",
616 | "description": "Tool to test auth required invocation.",
617 | "authRequired": []string{
618 | "my-google-auth",
619 | },
620 | "collection": "test_collection",
621 | "filterPayload": `{ "_id": 3, "id": 3 }`,
622 | "filterParams": []any{},
623 | "database": MongoDbDatabase,
624 | "limit": 10,
625 | },
626 | "my-fail-tool": map[string]any{
627 | "kind": toolKind,
628 | "source": "my-instance",
629 | "description": "Tool to test statement with incorrect syntax.",
630 | "authRequired": []string{},
631 | "collection": "test_collection",
632 | "filterPayload": `{ "id" ; 1 }"}`,
633 | "filterParams": []any{},
634 | "database": MongoDbDatabase,
635 | "limit": 10,
636 | },
637 | "my-delete-one-tool": map[string]any{
638 | "kind": "mongodb-delete-one",
639 | "source": "my-instance",
640 | "description": "Tool to test deleting an entry.",
641 | "authRequired": []string{},
642 | "collection": "test_collection",
643 | "filterPayload": `{ "id" : 100 }"}`,
644 | "filterParams": []any{},
645 | "database": MongoDbDatabase,
646 | },
647 | "my-delete-many-tool": map[string]any{
648 | "kind": "mongodb-delete-many",
649 | "source": "my-instance",
650 | "description": "Tool to test deleting multiple entries.",
651 | "authRequired": []string{},
652 | "collection": "test_collection",
653 | "filterPayload": `{ "id" : 101 }"}`,
654 | "filterParams": []any{},
655 | "database": MongoDbDatabase,
656 | },
657 | "my-insert-one-tool": map[string]any{
658 | "kind": "mongodb-insert-one",
659 | "source": "my-instance",
660 | "description": "Tool to test inserting an entry.",
661 | "authRequired": []string{},
662 | "collection": "test_collection",
663 | "canonical": true,
664 | "database": MongoDbDatabase,
665 | },
666 | "my-insert-many-tool": map[string]any{
667 | "kind": "mongodb-insert-many",
668 | "source": "my-instance",
669 | "description": "Tool to test inserting multiple entries.",
670 | "authRequired": []string{},
671 | "collection": "test_collection",
672 | "canonical": true,
673 | "database": MongoDbDatabase,
674 | },
675 | "my-update-one-tool": map[string]any{
676 | "kind": "mongodb-update-one",
677 | "source": "my-instance",
678 | "description": "Tool to test updating an entry.",
679 | "authRequired": []string{},
680 | "collection": "test_collection",
681 | "canonical": true,
682 | "filterPayload": `{ "id" : {{ .id }} }`,
683 | "filterParams": []map[string]any{
684 | {
685 | "name": "id",
686 | "type": "integer",
687 | "description": "id",
688 | },
689 | },
690 | "updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
691 | "updateParams": []map[string]any{
692 | {
693 | "name": "name",
694 | "type": "string",
695 | "description": "user name",
696 | },
697 | },
698 | "database": MongoDbDatabase,
699 | },
700 | "my-update-many-tool": map[string]any{
701 | "kind": "mongodb-update-many",
702 | "source": "my-instance",
703 | "description": "Tool to test updating multiple entries.",
704 | "authRequired": []string{},
705 | "collection": "test_collection",
706 | "canonical": true,
707 | "filterPayload": `{ "id" : {{ .id }} }`,
708 | "filterParams": []map[string]any{
709 | {
710 | "name": "id",
711 | "type": "integer",
712 | "description": "id",
713 | },
714 | },
715 | "updatePayload": `{ "$set" : { "name": {{json .name}} } }`,
716 | "updateParams": []map[string]any{
717 | {
718 | "name": "name",
719 | "type": "string",
720 | "description": "user name",
721 | },
722 | },
723 | "database": MongoDbDatabase,
724 | },
725 | "my-aggregate-tool": map[string]any{
726 | "kind": "mongodb-aggregate",
727 | "source": "my-instance",
728 | "description": "Tool to test an aggregation.",
729 | "authRequired": []string{},
730 | "collection": "test_collection",
731 | "canonical": true,
732 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$project" : { "id" : 1, "_id" : 0 }}]`,
733 | "pipelineParams": []map[string]any{
734 | {
735 | "name": "name",
736 | "type": "string",
737 | "description": "user name",
738 | },
739 | },
740 | "database": MongoDbDatabase,
741 | },
742 | "my-read-only-aggregate-tool": map[string]any{
743 | "kind": "mongodb-aggregate",
744 | "source": "my-instance",
745 | "description": "Tool to test an aggregation.",
746 | "authRequired": []string{},
747 | "collection": "test_collection",
748 | "canonical": true,
749 | "readOnly": true,
750 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
751 | "pipelineParams": []map[string]any{
752 | {
753 | "name": "name",
754 | "type": "string",
755 | "description": "user name",
756 | },
757 | },
758 | "database": MongoDbDatabase,
759 | },
760 | "my-read-write-aggregate-tool": map[string]any{
761 | "kind": "mongodb-aggregate",
762 | "source": "my-instance",
763 | "description": "Tool to test an aggregation.",
764 | "authRequired": []string{},
765 | "collection": "test_collection",
766 | "canonical": true,
767 | "readOnly": false,
768 | "pipelinePayload": `[{ "$match" : { "name": {{json .name}} } }, { "$out" : "target_collection" }]`,
769 | "pipelineParams": []map[string]any{
770 | {
771 | "name": "name",
772 | "type": "string",
773 | "description": "user name",
774 | },
775 | },
776 | "database": MongoDbDatabase,
777 | },
778 | },
779 | }
780 |
781 | return toolsFile
782 |
783 | }
784 |
```
--------------------------------------------------------------------------------
/docs/en/samples/bigquery/local_quickstart.md:
--------------------------------------------------------------------------------
```markdown
1 | ---
2 | title: "Quickstart (Local with BigQuery)"
3 | type: docs
4 | weight: 1
5 | description: >
6 | How to get started running Toolbox locally with Python, BigQuery, and
7 | LangGraph, LlamaIndex, or ADK.
8 | ---
9 |
10 | [](https://colab.research.google.com/github/googleapis/genai-toolbox/blob/main/docs/en/samples/bigquery/colab_quickstart_bigquery.ipynb)
12 |
13 | ## Before you begin
14 |
15 | This guide assumes you have already done the following:
16 |
17 | 1. Installed [Python 3.10+][install-python] (including [pip][install-pip] and
18 | your preferred virtual environment tool for managing dependencies e.g.
19 | [venv][install-venv]).
20 | 1. Installed and configured the [Google Cloud SDK (gcloud CLI)][install-gcloud].
21 | 1. Authenticated with Google Cloud for Application Default Credentials (ADC):
22 |
23 | ```bash
24 | gcloud auth login --update-adc
25 | ```
26 |
27 | 1. Set your default Google Cloud project (replace `YOUR_PROJECT_ID` with your
28 | actual project ID):
29 |
30 | ```bash
31 | gcloud config set project YOUR_PROJECT_ID
32 | export GOOGLE_CLOUD_PROJECT=YOUR_PROJECT_ID
33 | ```
34 |
35 | Toolbox and the client libraries will use this project for BigQuery, unless
36 | overridden in configurations.
37 | 1. [Enabled the BigQuery API][enable-bq-api] in your Google Cloud project.
38 | 1. Installed the BigQuery client library for Python:
39 |
40 | ```bash
41 | pip install google-cloud-bigquery
42 | ```
43 |
44 | 1. Completed setup for usage with an LLM model such as
45 | {{< tabpane text=true persist=header >}}
46 | {{% tab header="Core" lang="en" %}}
47 |
48 | - [langchain-vertexai](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/#setup)
49 | package.
50 |
51 | - [langchain-google-genai](https://python.langchain.com/docs/integrations/chat/google_generative_ai/#setup)
52 | package.
53 |
54 | - [langchain-anthropic](https://python.langchain.com/docs/integrations/chat/anthropic/#setup)
55 | package.
56 | {{% /tab %}}
57 | {{% tab header="LangChain" lang="en" %}}
58 | - [langchain-vertexai](https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm/#setup)
59 | package.
60 |
61 | - [langchain-google-genai](https://python.langchain.com/docs/integrations/chat/google_generative_ai/#setup)
62 | package.
63 |
64 | - [langchain-anthropic](https://python.langchain.com/docs/integrations/chat/anthropic/#setup)
65 | package.
66 | {{% /tab %}}
67 | {{% tab header="LlamaIndex" lang="en" %}}
68 | - [llama-index-llms-google-genai](https://pypi.org/project/llama-index-llms-google-genai/)
69 | package.
70 |
71 | - [llama-index-llms-anthropic](https://docs.llamaindex.ai/en/stable/examples/llm/anthropic)
72 | package.
73 | {{% /tab %}}
74 | {{% tab header="ADK" lang="en" %}}
75 | - [google-adk](https://pypi.org/project/google-adk/) package.
76 | {{% /tab %}}
77 | {{< /tabpane >}}
78 |
79 | [install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
80 | [install-pip]: https://pip.pypa.io/en/stable/installation/
81 | [install-venv]:
82 | https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
83 | [install-gcloud]: https://cloud.google.com/sdk/docs/install
84 | [enable-bq-api]:
85 | https://cloud.google.com/bigquery/docs/quickstarts/query-public-dataset-console#before-you-begin
86 |
87 | ## Step 1: Set up your BigQuery Dataset and Table
88 |
89 | In this section, we will create a BigQuery dataset and a table, then insert some
90 | data that needs to be accessed by our agent. BigQuery operations are performed
91 | against your configured Google Cloud project.
92 |
93 | 1. Create a new BigQuery dataset (replace `YOUR_DATASET_NAME` with your desired
94 | dataset name, e.g., `toolbox_ds`, and optionally specify a location like `US`
95 | or `EU`):
96 |
97 | ```bash
98 | export BQ_DATASET_NAME="YOUR_DATASET_NAME" # e.g., toolbox_ds
99 | export BQ_LOCATION="US" # e.g., US, EU, asia-northeast1
100 |
101 | bq --location=$BQ_LOCATION mk $BQ_DATASET_NAME
102 | ```
103 |
104 | You can also do this through the [Google Cloud
105 | Console](https://console.cloud.google.com/bigquery).
106 |
107 | {{< notice tip >}}
108 | For a real application, ensure that the service account or user running Toolbox
109 | has the necessary IAM permissions (e.g., BigQuery Data Editor, BigQuery User)
110 | on the dataset or project. For this local quickstart with user credentials,
111 | your own permissions will apply.
112 | {{< /notice >}}
113 |
114 | 1. The hotels table needs to be defined in your new dataset for use with the bq
115 | query command. First, create a file named `create_hotels_table.sql` with the
116 | following content:
117 |
118 | ```sql
119 | CREATE TABLE IF NOT EXISTS `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (
120 | id INT64 NOT NULL,
121 | name STRING NOT NULL,
122 | location STRING NOT NULL,
123 | price_tier STRING NOT NULL,
124 | checkin_date DATE NOT NULL,
125 | checkout_date DATE NOT NULL,
126 | booked BOOLEAN NOT NULL
127 | );
128 | ```
129 |
130 | > **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
131 | > with your actual project ID and dataset name.
132 |
133 | Then run the command below to execute the sql query:
134 |
135 | ```bash
136 | bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < create_hotels_table.sql
137 | ```
138 |
139 | 1. Next, populate the hotels table with some initial data. To do this, create a
140 | file named `insert_hotels_data.sql` and add the following SQL INSERT
141 | statement to it.
142 |
143 | ```sql
144 | INSERT INTO `YOUR_PROJECT_ID.YOUR_DATASET_NAME.hotels` (id, name, location, price_tier, checkin_date, checkout_date, booked)
145 | VALUES
146 | (1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-20', '2024-04-22', FALSE),
147 | (2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', FALSE),
148 | (3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', FALSE),
149 | (4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-05', '2024-04-24', FALSE),
150 | (5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-01', '2024-04-23', FALSE),
151 | (6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', FALSE),
152 | (7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-02', '2024-04-27', FALSE),
153 | (8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-09', '2024-04-24', FALSE),
154 | (9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', FALSE),
155 | (10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', FALSE);
156 | ```
157 |
158 | > **Note:** Replace `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` in the SQL
159 | > with your actual project ID and dataset name.
160 |
161 | Then run the command below to execute the sql query:
162 |
163 | ```bash
164 | bq query --project_id=$GOOGLE_CLOUD_PROJECT --dataset_id=$BQ_DATASET_NAME --use_legacy_sql=false < insert_hotels_data.sql
165 | ```
166 |
167 | ## Step 2: Install and configure Toolbox
168 |
169 | In this section, we will download Toolbox, configure our tools in a `tools.yaml`
170 | to use BigQuery, and then run the Toolbox server.
171 |
172 | 1. Download the latest version of Toolbox as a binary:
173 |
174 | {{< notice tip >}}
175 | Select the
176 | [correct binary](https://github.com/googleapis/genai-toolbox/releases)
177 | corresponding to your OS and CPU architecture.
178 | {{< /notice >}}
179 | <!-- {x-release-please-start-version} -->
180 | ```bash
181 | export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
182 | curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
183 | ```
184 | <!-- {x-release-please-end} -->
185 |
186 | 1. Make the binary executable:
187 |
188 | ```bash
189 | chmod +x toolbox
190 | ```
191 |
192 | 1. Write the following into a `tools.yaml` file. You must replace the
193 | `YOUR_PROJECT_ID` and `YOUR_DATASET_NAME` placeholder in the config with your
194 | actual BigQuery project and dataset name. The `location` field is optional;
195 | if not specified, it defaults to 'us'. The table name `hotels` is used
196 | directly in the statements.
197 |
198 | {{< notice tip >}}
199 | Authentication with BigQuery is handled via Application Default Credentials
200 | (ADC). Ensure you have run `gcloud auth application-default login`.
201 | {{< /notice >}}
202 |
203 | ```yaml
204 | sources:
205 | my-bigquery-source:
206 | kind: bigquery
207 | project: YOUR_PROJECT_ID
208 | location: us
209 | tools:
210 | search-hotels-by-name:
211 | kind: bigquery-sql
212 | source: my-bigquery-source
213 | description: Search for hotels based on name.
214 | parameters:
215 | - name: name
216 | type: string
217 | description: The name of the hotel.
218 | statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(name) LIKE LOWER(CONCAT('%', @name, '%'));
219 | search-hotels-by-location:
220 | kind: bigquery-sql
221 | source: my-bigquery-source
222 | description: Search for hotels based on location.
223 | parameters:
224 | - name: location
225 | type: string
226 | description: The location of the hotel.
227 | statement: SELECT * FROM `YOUR_DATASET_NAME.hotels` WHERE LOWER(location) LIKE LOWER(CONCAT('%', @location, '%'));
228 | book-hotel:
229 | kind: bigquery-sql
230 | source: my-bigquery-source
231 | description: >-
232 | Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
233 | parameters:
234 | - name: hotel_id
235 | type: integer
236 | description: The ID of the hotel to book.
237 | statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = TRUE WHERE id = @hotel_id;
238 | update-hotel:
239 | kind: bigquery-sql
240 | source: my-bigquery-source
241 | description: >-
242 | Update a hotel's check-in and check-out dates by its ID. Returns a message indicating whether the hotel was successfully updated or not.
243 | parameters:
244 | - name: checkin_date
245 | type: string
246 | description: The new check-in date of the hotel.
247 | - name: checkout_date
248 | type: string
249 | description: The new check-out date of the hotel.
250 | - name: hotel_id
251 | type: integer
252 | description: The ID of the hotel to update.
253 | statement: >-
254 | UPDATE `YOUR_DATASET_NAME.hotels` SET checkin_date = PARSE_DATE('%Y-%m-%d', @checkin_date), checkout_date = PARSE_DATE('%Y-%m-%d', @checkout_date) WHERE id = @hotel_id;
255 | cancel-hotel:
256 | kind: bigquery-sql
257 | source: my-bigquery-source
258 | description: Cancel a hotel by its ID.
259 | parameters:
260 | - name: hotel_id
261 | type: integer
262 | description: The ID of the hotel to cancel.
263 | statement: UPDATE `YOUR_DATASET_NAME.hotels` SET booked = FALSE WHERE id = @hotel_id;
264 | ```
265 |
266 | **Important Note on `toolsets`**: The `tools.yaml` content above does not
267 | include a `toolsets` section. The Python agent examples in Step 3 (e.g.,
268 | `await toolbox_client.load_toolset("my-toolset")`) rely on a toolset named
269 | `my-toolset`. To make those examples work, you will need to add a `toolsets`
270 | section to your `tools.yaml` file, for example:
271 |
272 | ```yaml
273 | # Add this to your tools.yaml if using load_toolset("my-toolset")
274 | # Ensure it's at the same indentation level as 'sources:' and 'tools:'
275 | toolsets:
276 | my-toolset:
277 | - search-hotels-by-name
278 | - search-hotels-by-location
279 | - book-hotel
280 | - update-hotel
281 | - cancel-hotel
282 | ```
283 |
284 | Alternatively, you can modify the agent code to load tools individually
285 | (e.g., using `await toolbox_client.load_tool("search-hotels-by-name")`).
286 |
287 | For more info on tools, check out the [Resources](../../resources/) section
288 | of the docs.
289 |
290 | 1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
291 |
292 | ```bash
293 | ./toolbox --tools-file "tools.yaml"
294 | ```
295 |
296 | {{< notice note >}}
297 | Toolbox enables dynamic reloading by default. To disable, use the
298 | `--disable-reload` flag.
299 | {{< /notice >}}
300 |
301 | ## Step 3: Connect your agent to Toolbox
302 |
303 | In this section, we will write and run an agent that will load the Tools
304 | from Toolbox.
305 |
306 | {{< notice tip>}} If you prefer to experiment within a Google Colab environment,
307 | you can connect to a
308 | [local runtime](https://research.google.com/colaboratory/local-runtimes.html).
309 | {{< /notice >}}
310 |
311 | 1. In a new terminal, install the SDK package.
312 |
313 | {{< tabpane persist=header >}}
314 | {{< tab header="Core" lang="bash" >}}
315 |
316 | pip install toolbox-core
317 | {{< /tab >}}
318 | {{< tab header="Langchain" lang="bash" >}}
319 |
320 | pip install toolbox-langchain
321 | {{< /tab >}}
322 | {{< tab header="LlamaIndex" lang="bash" >}}
323 |
324 | pip install toolbox-llamaindex
325 | {{< /tab >}}
326 | {{< tab header="ADK" lang="bash" >}}
327 |
328 | pip install google-adk
329 | {{< /tab >}}
330 |
331 | {{< /tabpane >}}
332 |
333 | 1. Install other required dependencies:
334 |
335 | {{< tabpane persist=header >}}
336 | {{< tab header="Core" lang="bash" >}}
337 |
338 | # TODO(developer): replace with correct package if needed
339 |
340 | pip install langgraph langchain-google-vertexai
341 |
342 | # pip install langchain-google-genai
343 |
344 | # pip install langchain-anthropic
345 |
346 | {{< /tab >}}
347 | {{< tab header="Langchain" lang="bash" >}}
348 |
349 | # TODO(developer): replace with correct package if needed
350 |
351 | pip install langgraph langchain-google-vertexai
352 |
353 | # pip install langchain-google-genai
354 |
355 | # pip install langchain-anthropic
356 |
357 | {{< /tab >}}
358 | {{< tab header="LlamaIndex" lang="bash" >}}
359 |
360 | # TODO(developer): replace with correct package if needed
361 |
362 | pip install llama-index-llms-google-genai
363 |
364 | # pip install llama-index-llms-anthropic
365 |
366 | {{< /tab >}}
367 | {{< tab header="ADK" lang="bash" >}}
368 | pip install toolbox-core
369 | {{< /tab >}}
370 | {{< /tabpane >}}
371 |
372 | 1. Create a new file named `hotel_agent.py` and copy the following
373 | code to create an agent:
374 | {{< tabpane persist=header >}}
375 | {{< tab header="Core" lang="python" >}}
376 |
377 | import asyncio
378 |
379 | from google import genai
380 | from google.genai.types import (
381 | Content,
382 | FunctionDeclaration,
383 | GenerateContentConfig,
384 | Part,
385 | Tool,
386 | )
387 |
388 | from toolbox_core import ToolboxClient
389 |
390 | prompt = """
391 | You're a helpful hotel assistant. You handle hotel searching, booking and
392 | cancellations. When the user searches for a hotel, mention it's name, id,
393 | location and price tier. Always mention hotel id while performing any
394 | searches. This is very important for any operations. For any bookings or
395 | cancellations, please provide the appropriate confirmation. Be sure to
396 | update checkin or checkout dates if mentioned by the user.
397 | Don't ask for confirmations from the user.
398 | """
399 |
400 | queries = [
401 | "Find hotels in Basel with Basel in it's name.",
402 | "Please book the hotel Hilton Basel for me.",
403 | "This is too expensive. Please cancel it.",
404 | "Please book Hyatt Regency for me",
405 | "My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
406 | ]
407 |
408 | async def run_application():
409 | async with ToolboxClient("<http://127.0.0.1:5000>") as toolbox_client:
410 |
411 | # The toolbox_tools list contains Python callables (functions/methods) designed for LLM tool-use
412 | # integration. While this example uses Google's genai client, these callables can be adapted for
413 | # various function-calling or agent frameworks. For easier integration with supported frameworks
414 | # (https://github.com/googleapis/mcp-toolbox-python-sdk/tree/main/packages), use the
415 | # provided wrapper packages, which handle framework-specific boilerplate.
416 | toolbox_tools = await toolbox_client.load_toolset("my-toolset")
417 | genai_client = genai.Client(
418 | vertexai=True, project="project-id", location="us-central1"
419 | )
420 |
421 | genai_tools = [
422 | Tool(
423 | function_declarations=[
424 | FunctionDeclaration.from_callable_with_api_option(callable=tool)
425 | ]
426 | )
427 | for tool in toolbox_tools
428 | ]
429 | history = []
430 | for query in queries:
431 | user_prompt_content = Content(
432 | role="user",
433 | parts=[Part.from_text(text=query)],
434 | )
435 | history.append(user_prompt_content)
436 |
437 | response = genai_client.models.generate_content(
438 | model="gemini-2.0-flash-001",
439 | contents=history,
440 | config=GenerateContentConfig(
441 | system_instruction=prompt,
442 | tools=genai_tools,
443 | ),
444 | )
445 | history.append(response.candidates[0].content)
446 | function_response_parts = []
447 | for function_call in response.function_calls:
448 | fn_name = function_call.name
449 | # The tools are sorted alphabetically
450 | if fn_name == "search-hotels-by-name":
451 | function_result = await toolbox_tools[3](**function_call.args)
452 | elif fn_name == "search-hotels-by-location":
453 | function_result = await toolbox_tools[2](**function_call.args)
454 | elif fn_name == "book-hotel":
455 | function_result = await toolbox_tools[0](**function_call.args)
456 | elif fn_name == "update-hotel":
457 | function_result = await toolbox_tools[4](**function_call.args)
458 | elif fn_name == "cancel-hotel":
459 | function_result = await toolbox_tools[1](**function_call.args)
460 | else:
461 | raise ValueError("Function name not present.")
462 | function_response = {"result": function_result}
463 | function_response_part = Part.from_function_response(
464 | name=function_call.name,
465 | response=function_response,
466 | )
467 | function_response_parts.append(function_response_part)
468 |
469 | if function_response_parts:
470 | tool_response_content = Content(role="tool", parts=function_response_parts)
471 | history.append(tool_response_content)
472 |
473 | response2 = genai_client.models.generate_content(
474 | model="gemini-2.0-flash-001",
475 | contents=history,
476 | config=GenerateContentConfig(
477 | tools=genai_tools,
478 | ),
479 | )
480 | final_model_response_content = response2.candidates[0].content
481 | history.append(final_model_response_content)
482 | print(response2.text)
483 |
484 | asyncio.run(run_application())
485 | {{< /tab >}}
486 | {{< tab header="LangChain" lang="python" >}}
487 |
488 | import asyncio
489 | from langgraph.prebuilt import create_react_agent
490 |
491 | # TODO(developer): replace this with another import if needed
492 |
493 | from langchain_google_vertexai import ChatVertexAI
494 |
495 | # from langchain_google_genai import ChatGoogleGenerativeAI
496 |
497 | # from langchain_anthropic import ChatAnthropic
498 |
499 | from langgraph.checkpoint.memory import MemorySaver
500 |
501 | from toolbox_langchain import ToolboxClient
502 |
503 | prompt = """
504 | You're a helpful hotel assistant. You handle hotel searching, booking and
505 | cancellations. When the user searches for a hotel, mention it's name, id,
506 | location and price tier. Always mention hotel ids while performing any
507 | searches. This is very important for any operations. For any bookings or
508 | cancellations, please provide the appropriate confirmation. Be sure to
509 | update checkin or checkout dates if mentioned by the user.
510 | Don't ask for confirmations from the user.
511 | """
512 |
513 | queries = [
514 | "Find hotels in Basel with Basel in its name.",
515 | "Can you book the Hilton Basel for me?",
516 | "Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
517 | "My check in dates would be from April 10, 2024 to April 19, 2024.",
518 | ]
519 |
520 | async def main():
521 | # TODO(developer): replace this with another model if needed
522 | model = ChatVertexAI(model_name="gemini-2.0-flash-001")
523 | # model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
524 | # model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
525 |
526 | # Load the tools from the Toolbox server
527 | client = ToolboxClient("http://127.0.0.1:5000")
528 | tools = await client.aload_toolset()
529 |
530 | agent = create_react_agent(model, tools, checkpointer=MemorySaver())
531 |
532 | config = {"configurable": {"thread_id": "thread-1"}}
533 | for query in queries:
534 | inputs = {"messages": [("user", prompt + query)]}
535 | response = await agent.ainvoke(inputs, stream_mode="values", config=config)
536 | print(response["messages"][-1].content)
537 |
538 | asyncio.run(main())
539 | {{< /tab >}}
540 | {{< tab header="LlamaIndex" lang="python" >}}
541 | import asyncio
542 | import os
543 |
544 | from llama_index.core.agent.workflow import AgentWorkflow
545 |
546 | from llama_index.core.workflow import Context
547 |
548 | # TODO(developer): replace this with another import if needed
549 |
550 | from llama_index.llms.google_genai import GoogleGenAI
551 |
552 | # from llama_index.llms.anthropic import Anthropic
553 |
554 | from toolbox_llamaindex import ToolboxClient
555 |
556 | prompt = """
557 | You're a helpful hotel assistant. You handle hotel searching, booking and
558 | cancellations. When the user searches for a hotel, mention it's name, id,
559 | location and price tier. Always mention hotel ids while performing any
560 | searches. This is very important for any operations. For any bookings or
561 | cancellations, please provide the appropriate confirmation. Be sure to
562 | update checkin or checkout dates if mentioned by the user.
563 | Don't ask for confirmations from the user.
564 | """
565 |
566 | queries = [
567 | "Find hotels in Basel with Basel in it's name.",
568 | "Can you book the Hilton Basel for me?",
569 | "Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
570 | "My check in dates would be from April 10, 2024 to April 19, 2024.",
571 | ]
572 |
573 | async def main():
574 | # TODO(developer): replace this with another model if needed
575 | llm = GoogleGenAI(
576 | model="gemini-2.0-flash-001",
577 | vertexai_config={"location": "us-central1"},
578 | )
579 | # llm = GoogleGenAI(
580 | # api_key=os.getenv("GOOGLE_API_KEY"),
581 | # model="gemini-2.0-flash-001",
582 | # )
583 | # llm = Anthropic(
584 | # model="claude-3-7-sonnet-latest",
585 | # api_key=os.getenv("ANTHROPIC_API_KEY")
586 | # )
587 |
588 | # Load the tools from the Toolbox server
589 | client = ToolboxClient("http://127.0.0.1:5000")
590 | tools = await client.aload_toolset()
591 |
592 | agent = AgentWorkflow.from_tools_or_functions(
593 | tools,
594 | llm=llm,
595 | system_prompt=prompt,
596 | )
597 | ctx = Context(agent)
598 | for query in queries:
599 | response = await agent.arun(user_msg=query, ctx=ctx)
600 | print(f"---- {query} ----")
601 | print(str(response))
602 |
603 | asyncio.run(main())
604 | {{< /tab >}}
605 | {{< tab header="ADK" lang="python" >}}
606 | from google.adk.agents import Agent
607 | from google.adk.runners import Runner
608 | from google.adk.sessions import InMemorySessionService
609 | from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
610 | from google.genai import types # For constructing message content
611 | from toolbox_core import ToolboxSyncClient
612 |
613 | import os
614 | os.environ['GOOGLE_GENAI_USE_VERTEXAI'] = 'True'
615 |
616 | # TODO(developer): Replace 'YOUR_PROJECT_ID' with your Google Cloud Project ID
617 |
618 | os.environ['GOOGLE_CLOUD_PROJECT'] = 'YOUR_PROJECT_ID'
619 |
620 | # TODO(developer): Replace 'us-central1' with your Google Cloud Location (region)
621 |
622 | os.environ['GOOGLE_CLOUD_LOCATION'] = 'us-central1'
623 |
624 | # --- Load Tools from Toolbox ---
625 |
626 | # TODO(developer): Ensure the Toolbox server is running at <http://127.0.0.1:5000>
627 |
628 | with ToolboxSyncClient("<http://127.0.0.1:5000>") as toolbox_client:
629 | # TODO(developer): Replace "my-toolset" with the actual ID of your toolset as configured in your MCP Toolbox server.
630 | agent_toolset = toolbox_client.load_toolset("my-toolset")
631 |
632 | # --- Define the Agent's Prompt ---
633 | prompt = """
634 | You're a helpful hotel assistant. You handle hotel searching, booking and
635 | cancellations. When the user searches for a hotel, mention it's name, id,
636 | location and price tier. Always mention hotel ids while performing any
637 | searches. This is very important for any operations. For any bookings or
638 | cancellations, please provide the appropriate confirmation. Be sure to
639 | update checkin or checkout dates if mentioned by the user.
640 | Don't ask for confirmations from the user.
641 | """
642 |
643 | # --- Configure the Agent ---
644 |
645 | root_agent = Agent(
646 | model='gemini-2.0-flash-001',
647 | name='hotel_agent',
648 | description='A helpful AI assistant that can search and book hotels.',
649 | instruction=prompt,
650 | tools=agent_toolset, # Pass the loaded toolset
651 | )
652 |
653 | # --- Initialize Services for Running the Agent ---
654 | session_service = InMemorySessionService()
655 | artifacts_service = InMemoryArtifactService()
656 | # Create a new session for the interaction.
657 | session = session_service.create_session(
658 | state={}, app_name='hotel_agent', user_id='123'
659 | )
660 |
661 | runner = Runner(
662 | app_name='hotel_agent',
663 | agent=root_agent,
664 | artifact_service=artifacts_service,
665 | session_service=session_service,
666 | )
667 |
668 | # --- Define Queries and Run the Agent ---
669 | queries = [
670 | "Find hotels in Basel with Basel in it's name.",
671 | "Can you book the Hilton Basel for me?",
672 | "Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
673 | "My check in dates would be from April 10, 2024 to April 19, 2024.",
674 | ]
675 |
676 | for query in queries:
677 | content = types.Content(role='user', parts=[types.Part(text=query)])
678 | events = runner.run(session_id=session.id,
679 | user_id='123', new_message=content)
680 |
681 | responses = (
682 | part.text
683 | for event in events
684 | for part in event.content.parts
685 | if part.text is not None
686 | )
687 |
688 | for text in responses:
689 | print(text)
690 | {{< /tab >}}
691 | {{< /tabpane >}}
692 |
693 | {{< tabpane text=true persist=header >}}
694 | {{% tab header="Core" lang="en" %}}
695 | To learn more about the Core SDK, check out the [Toolbox Core SDK
696 | documentation.](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md)
697 | {{% /tab %}}
698 | {{% tab header="Langchain" lang="en" %}}
699 | To learn more about Agents in LangChain, check out the [LangGraph Agent
700 | documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
701 | {{% /tab %}}
702 | {{% tab header="LlamaIndex" lang="en" %}}
703 | To learn more about Agents in LlamaIndex, check out the [LlamaIndex
704 | AgentWorkflow
705 | documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
706 | {{% /tab %}}
707 | {{% tab header="ADK" lang="en" %}}
708 | To learn more about Agents in ADK, check out the [ADK
709 | documentation.](https://google.github.io/adk-docs/)
710 | {{% /tab %}}
711 | {{< /tabpane >}}
712 |
713 | 1. Run your agent, and observe the results:
714 |
715 | ```sh
716 | python hotel_agent.py
717 | ```
718 |
```