This is page 27 of 44. Use http://codebase.md/googleapis/genai-toolbox?page={x} to view the full context.
# Directory Structure
```
├── .ci
│ ├── continuous.release.cloudbuild.yaml
│ ├── generate_release_table.sh
│ ├── integration.cloudbuild.yaml
│ ├── quickstart_test
│ │ ├── go.integration.cloudbuild.yaml
│ │ ├── js.integration.cloudbuild.yaml
│ │ ├── py.integration.cloudbuild.yaml
│ │ ├── run_go_tests.sh
│ │ ├── run_js_tests.sh
│ │ ├── run_py_tests.sh
│ │ └── setup_hotels_sample.sql
│ ├── test_with_coverage.sh
│ └── versioned.release.cloudbuild.yaml
├── .github
│ ├── auto-label.yaml
│ ├── blunderbuss.yml
│ ├── CODEOWNERS
│ ├── header-checker-lint.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── question.yml
│ ├── label-sync.yml
│ ├── labels.yaml
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-please.yml
│ ├── renovate.json5
│ ├── sync-repo-settings.yaml
│ └── workflows
│ ├── cloud_build_failure_reporter.yml
│ ├── deploy_dev_docs.yaml
│ ├── deploy_previous_version_docs.yaml
│ ├── deploy_versioned_docs.yaml
│ ├── docs_deploy.yaml
│ ├── docs_preview_clean.yaml
│ ├── docs_preview_deploy.yaml
│ ├── lint.yaml
│ ├── schedule_reporter.yml
│ ├── sync-labels.yaml
│ └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│ ├── archetypes
│ │ └── default.md
│ ├── assets
│ │ ├── icons
│ │ │ └── logo.svg
│ │ └── scss
│ │ ├── _styles_project.scss
│ │ └── _variables_project.scss
│ ├── go.mod
│ ├── go.sum
│ ├── hugo.toml
│ ├── layouts
│ │ ├── _default
│ │ │ └── home.releases.releases
│ │ ├── index.llms-full.txt
│ │ ├── index.llms.txt
│ │ ├── partials
│ │ │ ├── hooks
│ │ │ │ └── head-end.html
│ │ │ ├── navbar-version-selector.html
│ │ │ ├── page-meta-links.html
│ │ │ └── td
│ │ │ └── render-heading.html
│ │ ├── robot.txt
│ │ └── shortcodes
│ │ ├── include.html
│ │ ├── ipynb.html
│ │ └── regionInclude.html
│ ├── package-lock.json
│ ├── package.json
│ └── static
│ ├── favicons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ └── favicon.ico
│ └── js
│ └── w3.js
├── CHANGELOG.md
├── cmd
│ ├── options_test.go
│ ├── options.go
│ ├── root_test.go
│ ├── root.go
│ └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│ └── en
│ ├── _index.md
│ ├── about
│ │ ├── _index.md
│ │ └── faq.md
│ ├── concepts
│ │ ├── _index.md
│ │ └── telemetry
│ │ ├── index.md
│ │ ├── telemetry_flow.png
│ │ └── telemetry_traces.png
│ ├── getting-started
│ │ ├── _index.md
│ │ ├── colab_quickstart.ipynb
│ │ ├── configure.md
│ │ ├── introduction
│ │ │ ├── _index.md
│ │ │ └── architecture.png
│ │ ├── local_quickstart_go.md
│ │ ├── local_quickstart_js.md
│ │ ├── local_quickstart.md
│ │ ├── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── quickstart
│ │ ├── go
│ │ │ ├── adkgo
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── genkit
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── langchain
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ ├── openAI
│ │ │ │ ├── go.mod
│ │ │ │ ├── go.sum
│ │ │ │ └── quickstart.go
│ │ │ └── quickstart_test.go
│ │ ├── golden.txt
│ │ ├── js
│ │ │ ├── genAI
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── genkit
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── langchain
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ ├── llamaindex
│ │ │ │ ├── package-lock.json
│ │ │ │ ├── package.json
│ │ │ │ └── quickstart.js
│ │ │ └── quickstart.test.js
│ │ ├── python
│ │ │ ├── __init__.py
│ │ │ ├── adk
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── core
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── langchain
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ ├── llamaindex
│ │ │ │ ├── quickstart.py
│ │ │ │ └── requirements.txt
│ │ │ └── quickstart_test.py
│ │ └── shared
│ │ ├── cloud_setup.md
│ │ ├── configure_toolbox.md
│ │ └── database_setup.md
│ ├── how-to
│ │ ├── _index.md
│ │ ├── connect_via_geminicli.md
│ │ ├── connect_via_mcp.md
│ │ ├── connect-ide
│ │ │ ├── _index.md
│ │ │ ├── alloydb_pg_admin_mcp.md
│ │ │ ├── alloydb_pg_mcp.md
│ │ │ ├── bigquery_mcp.md
│ │ │ ├── cloud_sql_mssql_admin_mcp.md
│ │ │ ├── cloud_sql_mssql_mcp.md
│ │ │ ├── cloud_sql_mysql_admin_mcp.md
│ │ │ ├── cloud_sql_mysql_mcp.md
│ │ │ ├── cloud_sql_pg_admin_mcp.md
│ │ │ ├── cloud_sql_pg_mcp.md
│ │ │ ├── firestore_mcp.md
│ │ │ ├── looker_mcp.md
│ │ │ ├── mssql_mcp.md
│ │ │ ├── mysql_mcp.md
│ │ │ ├── neo4j_mcp.md
│ │ │ ├── postgres_mcp.md
│ │ │ ├── spanner_mcp.md
│ │ │ └── sqlite_mcp.md
│ │ ├── deploy_docker.md
│ │ ├── deploy_gke.md
│ │ ├── deploy_toolbox.md
│ │ ├── export_telemetry.md
│ │ └── toolbox-ui
│ │ ├── edit-headers.gif
│ │ ├── edit-headers.png
│ │ ├── index.md
│ │ ├── optional-param-checked.png
│ │ ├── optional-param-unchecked.png
│ │ ├── run-tool.gif
│ │ ├── tools.png
│ │ └── toolsets.png
│ ├── reference
│ │ ├── _index.md
│ │ ├── cli.md
│ │ └── prebuilt-tools.md
│ ├── resources
│ │ ├── _index.md
│ │ ├── authServices
│ │ │ ├── _index.md
│ │ │ └── google.md
│ │ ├── sources
│ │ │ ├── _index.md
│ │ │ ├── alloydb-admin.md
│ │ │ ├── alloydb-pg.md
│ │ │ ├── bigquery.md
│ │ │ ├── bigtable.md
│ │ │ ├── cassandra.md
│ │ │ ├── clickhouse.md
│ │ │ ├── cloud-healthcare.md
│ │ │ ├── cloud-monitoring.md
│ │ │ ├── cloud-sql-admin.md
│ │ │ ├── cloud-sql-mssql.md
│ │ │ ├── cloud-sql-mysql.md
│ │ │ ├── cloud-sql-pg.md
│ │ │ ├── couchbase.md
│ │ │ ├── dataplex.md
│ │ │ ├── dgraph.md
│ │ │ ├── elasticsearch.md
│ │ │ ├── firebird.md
│ │ │ ├── firestore.md
│ │ │ ├── http.md
│ │ │ ├── looker.md
│ │ │ ├── mindsdb.md
│ │ │ ├── mongodb.md
│ │ │ ├── mssql.md
│ │ │ ├── mysql.md
│ │ │ ├── neo4j.md
│ │ │ ├── oceanbase.md
│ │ │ ├── oracle.md
│ │ │ ├── postgres.md
│ │ │ ├── redis.md
│ │ │ ├── serverless-spark.md
│ │ │ ├── singlestore.md
│ │ │ ├── spanner.md
│ │ │ ├── sqlite.md
│ │ │ ├── tidb.md
│ │ │ ├── trino.md
│ │ │ ├── valkey.md
│ │ │ └── yugabytedb.md
│ │ └── tools
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── alloydb-create-cluster.md
│ │ │ ├── alloydb-create-instance.md
│ │ │ ├── alloydb-create-user.md
│ │ │ ├── alloydb-get-cluster.md
│ │ │ ├── alloydb-get-instance.md
│ │ │ ├── alloydb-get-user.md
│ │ │ ├── alloydb-list-clusters.md
│ │ │ ├── alloydb-list-instances.md
│ │ │ ├── alloydb-list-users.md
│ │ │ └── alloydb-wait-for-operation.md
│ │ ├── alloydbainl
│ │ │ ├── _index.md
│ │ │ └── alloydb-ai-nl.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── bigquery-analyze-contribution.md
│ │ │ ├── bigquery-conversational-analytics.md
│ │ │ ├── bigquery-execute-sql.md
│ │ │ ├── bigquery-forecast.md
│ │ │ ├── bigquery-get-dataset-info.md
│ │ │ ├── bigquery-get-table-info.md
│ │ │ ├── bigquery-list-dataset-ids.md
│ │ │ ├── bigquery-list-table-ids.md
│ │ │ ├── bigquery-search-catalog.md
│ │ │ └── bigquery-sql.md
│ │ ├── bigtable
│ │ │ ├── _index.md
│ │ │ └── bigtable-sql.md
│ │ ├── cassandra
│ │ │ ├── _index.md
│ │ │ └── cassandra-cql.md
│ │ ├── clickhouse
│ │ │ ├── _index.md
│ │ │ ├── clickhouse-execute-sql.md
│ │ │ ├── clickhouse-list-databases.md
│ │ │ ├── clickhouse-list-tables.md
│ │ │ └── clickhouse-sql.md
│ │ ├── cloudhealthcare
│ │ │ ├── _index.md
│ │ │ ├── cloud-healthcare-fhir-fetch-page.md
│ │ │ ├── cloud-healthcare-fhir-patient-everything.md
│ │ │ ├── cloud-healthcare-fhir-patient-search.md
│ │ │ ├── cloud-healthcare-get-dataset.md
│ │ │ ├── cloud-healthcare-get-dicom-store-metrics.md
│ │ │ ├── cloud-healthcare-get-dicom-store.md
│ │ │ ├── cloud-healthcare-get-fhir-resource.md
│ │ │ ├── cloud-healthcare-get-fhir-store-metrics.md
│ │ │ ├── cloud-healthcare-get-fhir-store.md
│ │ │ ├── cloud-healthcare-list-dicom-stores.md
│ │ │ ├── cloud-healthcare-list-fhir-stores.md
│ │ │ ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│ │ │ ├── cloud-healthcare-search-dicom-instances.md
│ │ │ ├── cloud-healthcare-search-dicom-series.md
│ │ │ └── cloud-healthcare-search-dicom-studies.md
│ │ ├── cloudmonitoring
│ │ │ ├── _index.md
│ │ │ └── cloud-monitoring-query-prometheus.md
│ │ ├── cloudsql
│ │ │ ├── _index.md
│ │ │ ├── cloudsqlcreatedatabase.md
│ │ │ ├── cloudsqlcreateusers.md
│ │ │ ├── cloudsqlgetinstances.md
│ │ │ ├── cloudsqllistdatabases.md
│ │ │ ├── cloudsqllistinstances.md
│ │ │ ├── cloudsqlmssqlcreateinstance.md
│ │ │ ├── cloudsqlmysqlcreateinstance.md
│ │ │ ├── cloudsqlpgcreateinstances.md
│ │ │ └── cloudsqlwaitforoperation.md
│ │ ├── couchbase
│ │ │ ├── _index.md
│ │ │ └── couchbase-sql.md
│ │ ├── dataform
│ │ │ ├── _index.md
│ │ │ └── dataform-compile-local.md
│ │ ├── dataplex
│ │ │ ├── _index.md
│ │ │ ├── dataplex-lookup-entry.md
│ │ │ ├── dataplex-search-aspect-types.md
│ │ │ └── dataplex-search-entries.md
│ │ ├── dgraph
│ │ │ ├── _index.md
│ │ │ └── dgraph-dql.md
│ │ ├── elasticsearch
│ │ │ ├── _index.md
│ │ │ └── elasticsearch-esql.md
│ │ ├── firebird
│ │ │ ├── _index.md
│ │ │ ├── firebird-execute-sql.md
│ │ │ └── firebird-sql.md
│ │ ├── firestore
│ │ │ ├── _index.md
│ │ │ ├── firestore-add-documents.md
│ │ │ ├── firestore-delete-documents.md
│ │ │ ├── firestore-get-documents.md
│ │ │ ├── firestore-get-rules.md
│ │ │ ├── firestore-list-collections.md
│ │ │ ├── firestore-query-collection.md
│ │ │ ├── firestore-query.md
│ │ │ ├── firestore-update-document.md
│ │ │ └── firestore-validate-rules.md
│ │ ├── http
│ │ │ ├── _index.md
│ │ │ └── http.md
│ │ ├── looker
│ │ │ ├── _index.md
│ │ │ ├── looker-add-dashboard-element.md
│ │ │ ├── looker-conversational-analytics.md
│ │ │ ├── looker-create-project-file.md
│ │ │ ├── looker-delete-project-file.md
│ │ │ ├── looker-dev-mode.md
│ │ │ ├── looker-get-connection-databases.md
│ │ │ ├── looker-get-connection-schemas.md
│ │ │ ├── looker-get-connection-table-columns.md
│ │ │ ├── looker-get-connection-tables.md
│ │ │ ├── looker-get-connections.md
│ │ │ ├── looker-get-dashboards.md
│ │ │ ├── looker-get-dimensions.md
│ │ │ ├── looker-get-explores.md
│ │ │ ├── looker-get-filters.md
│ │ │ ├── looker-get-looks.md
│ │ │ ├── looker-get-measures.md
│ │ │ ├── looker-get-models.md
│ │ │ ├── looker-get-parameters.md
│ │ │ ├── looker-get-project-file.md
│ │ │ ├── looker-get-project-files.md
│ │ │ ├── looker-get-projects.md
│ │ │ ├── looker-health-analyze.md
│ │ │ ├── looker-health-pulse.md
│ │ │ ├── looker-health-vacuum.md
│ │ │ ├── looker-make-dashboard.md
│ │ │ ├── looker-make-look.md
│ │ │ ├── looker-query-sql.md
│ │ │ ├── looker-query-url.md
│ │ │ ├── looker-query.md
│ │ │ ├── looker-run-dashboard.md
│ │ │ ├── looker-run-look.md
│ │ │ └── looker-update-project-file.md
│ │ ├── mindsdb
│ │ │ ├── _index.md
│ │ │ ├── mindsdb-execute-sql.md
│ │ │ └── mindsdb-sql.md
│ │ ├── mongodb
│ │ │ ├── _index.md
│ │ │ ├── mongodb-aggregate.md
│ │ │ ├── mongodb-delete-many.md
│ │ │ ├── mongodb-delete-one.md
│ │ │ ├── mongodb-find-one.md
│ │ │ ├── mongodb-find.md
│ │ │ ├── mongodb-insert-many.md
│ │ │ ├── mongodb-insert-one.md
│ │ │ ├── mongodb-update-many.md
│ │ │ └── mongodb-update-one.md
│ │ ├── mssql
│ │ │ ├── _index.md
│ │ │ ├── mssql-execute-sql.md
│ │ │ ├── mssql-list-tables.md
│ │ │ └── mssql-sql.md
│ │ ├── mysql
│ │ │ ├── _index.md
│ │ │ ├── mysql-execute-sql.md
│ │ │ ├── mysql-list-active-queries.md
│ │ │ ├── mysql-list-table-fragmentation.md
│ │ │ ├── mysql-list-tables-missing-unique-indexes.md
│ │ │ ├── mysql-list-tables.md
│ │ │ └── mysql-sql.md
│ │ ├── neo4j
│ │ │ ├── _index.md
│ │ │ ├── neo4j-cypher.md
│ │ │ ├── neo4j-execute-cypher.md
│ │ │ └── neo4j-schema.md
│ │ ├── oceanbase
│ │ │ ├── _index.md
│ │ │ ├── oceanbase-execute-sql.md
│ │ │ └── oceanbase-sql.md
│ │ ├── oracle
│ │ │ ├── _index.md
│ │ │ ├── oracle-execute-sql.md
│ │ │ └── oracle-sql.md
│ │ ├── postgres
│ │ │ ├── _index.md
│ │ │ ├── postgres-execute-sql.md
│ │ │ ├── postgres-list-active-queries.md
│ │ │ ├── postgres-list-available-extensions.md
│ │ │ ├── postgres-list-installed-extensions.md
│ │ │ ├── postgres-list-schemas.md
│ │ │ ├── postgres-list-tables.md
│ │ │ ├── postgres-list-views.md
│ │ │ └── postgres-sql.md
│ │ ├── redis
│ │ │ ├── _index.md
│ │ │ └── redis.md
│ │ ├── serverless-spark
│ │ │ ├── _index.md
│ │ │ ├── serverless-spark-cancel-batch.md
│ │ │ ├── serverless-spark-get-batch.md
│ │ │ └── serverless-spark-list-batches.md
│ │ ├── singlestore
│ │ │ ├── _index.md
│ │ │ ├── singlestore-execute-sql.md
│ │ │ └── singlestore-sql.md
│ │ ├── spanner
│ │ │ ├── _index.md
│ │ │ ├── spanner-execute-sql.md
│ │ │ ├── spanner-list-tables.md
│ │ │ └── spanner-sql.md
│ │ ├── sqlite
│ │ │ ├── _index.md
│ │ │ ├── sqlite-execute-sql.md
│ │ │ └── sqlite-sql.md
│ │ ├── tidb
│ │ │ ├── _index.md
│ │ │ ├── tidb-execute-sql.md
│ │ │ └── tidb-sql.md
│ │ ├── trino
│ │ │ ├── _index.md
│ │ │ ├── trino-execute-sql.md
│ │ │ └── trino-sql.md
│ │ ├── utility
│ │ │ ├── _index.md
│ │ │ └── wait.md
│ │ ├── valkey
│ │ │ ├── _index.md
│ │ │ └── valkey.md
│ │ └── yuagbytedb
│ │ ├── _index.md
│ │ └── yugabytedb-sql.md
│ ├── samples
│ │ ├── _index.md
│ │ ├── alloydb
│ │ │ ├── _index.md
│ │ │ ├── ai-nl
│ │ │ │ ├── alloydb_ai_nl.ipynb
│ │ │ │ └── index.md
│ │ │ └── mcp_quickstart.md
│ │ ├── bigquery
│ │ │ ├── _index.md
│ │ │ ├── colab_quickstart_bigquery.ipynb
│ │ │ ├── local_quickstart.md
│ │ │ └── mcp_quickstart
│ │ │ ├── _index.md
│ │ │ ├── inspector_tools.png
│ │ │ └── inspector.png
│ │ └── looker
│ │ ├── _index.md
│ │ ├── looker_gemini_oauth
│ │ │ ├── _index.md
│ │ │ ├── authenticated.png
│ │ │ ├── authorize.png
│ │ │ └── registration.png
│ │ ├── looker_gemini.md
│ │ └── looker_mcp_inspector
│ │ ├── _index.md
│ │ ├── inspector_tools.png
│ │ └── inspector.png
│ └── sdks
│ ├── _index.md
│ ├── go-sdk.md
│ ├── js-sdk.md
│ └── python-sdk.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│ ├── auth
│ │ ├── auth.go
│ │ └── google
│ │ └── google.go
│ ├── log
│ │ ├── handler.go
│ │ ├── log_test.go
│ │ ├── log.go
│ │ └── logger.go
│ ├── prebuiltconfigs
│ │ ├── prebuiltconfigs_test.go
│ │ ├── prebuiltconfigs.go
│ │ └── tools
│ │ ├── alloydb-postgres-admin.yaml
│ │ ├── alloydb-postgres-observability.yaml
│ │ ├── alloydb-postgres.yaml
│ │ ├── bigquery.yaml
│ │ ├── clickhouse.yaml
│ │ ├── cloud-healthcare.yaml
│ │ ├── cloud-sql-mssql-admin.yaml
│ │ ├── cloud-sql-mssql-observability.yaml
│ │ ├── cloud-sql-mssql.yaml
│ │ ├── cloud-sql-mysql-admin.yaml
│ │ ├── cloud-sql-mysql-observability.yaml
│ │ ├── cloud-sql-mysql.yaml
│ │ ├── cloud-sql-postgres-admin.yaml
│ │ ├── cloud-sql-postgres-observability.yaml
│ │ ├── cloud-sql-postgres.yaml
│ │ ├── dataplex.yaml
│ │ ├── elasticsearch.yaml
│ │ ├── firestore.yaml
│ │ ├── looker-conversational-analytics.yaml
│ │ ├── looker.yaml
│ │ ├── mindsdb.yaml
│ │ ├── mssql.yaml
│ │ ├── mysql.yaml
│ │ ├── neo4j.yaml
│ │ ├── oceanbase.yaml
│ │ ├── postgres.yaml
│ │ ├── serverless-spark.yaml
│ │ ├── singlestore.yaml
│ │ ├── spanner-postgres.yaml
│ │ ├── spanner.yaml
│ │ └── sqlite.yaml
│ ├── server
│ │ ├── api_test.go
│ │ ├── api.go
│ │ ├── common_test.go
│ │ ├── config.go
│ │ ├── mcp
│ │ │ ├── jsonrpc
│ │ │ │ ├── jsonrpc_test.go
│ │ │ │ └── jsonrpc.go
│ │ │ ├── mcp.go
│ │ │ ├── util
│ │ │ │ └── lifecycle.go
│ │ │ ├── v20241105
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ ├── v20250326
│ │ │ │ ├── method.go
│ │ │ │ └── types.go
│ │ │ └── v20250618
│ │ │ ├── method.go
│ │ │ └── types.go
│ │ ├── mcp_test.go
│ │ ├── mcp.go
│ │ ├── server_test.go
│ │ ├── server.go
│ │ ├── static
│ │ │ ├── assets
│ │ │ │ └── mcptoolboxlogo.png
│ │ │ ├── css
│ │ │ │ └── style.css
│ │ │ ├── index.html
│ │ │ ├── js
│ │ │ │ ├── auth.js
│ │ │ │ ├── loadTools.js
│ │ │ │ ├── mainContent.js
│ │ │ │ ├── navbar.js
│ │ │ │ ├── runTool.js
│ │ │ │ ├── toolDisplay.js
│ │ │ │ ├── tools.js
│ │ │ │ └── toolsets.js
│ │ │ ├── tools.html
│ │ │ └── toolsets.html
│ │ ├── web_test.go
│ │ └── web.go
│ ├── sources
│ │ ├── alloydbadmin
│ │ │ ├── alloydbadmin_test.go
│ │ │ └── alloydbadmin.go
│ │ ├── alloydbpg
│ │ │ ├── alloydb_pg_test.go
│ │ │ └── alloydb_pg.go
│ │ ├── bigquery
│ │ │ ├── bigquery_test.go
│ │ │ ├── bigquery.go
│ │ │ └── cache.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ ├── cassandra_test.go
│ │ │ └── cassandra.go
│ │ ├── clickhouse
│ │ │ ├── clickhouse_test.go
│ │ │ └── clickhouse.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloud_healthcare_test.go
│ │ │ └── cloud_healthcare.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloud_monitoring_test.go
│ │ │ └── cloud_monitoring.go
│ │ ├── cloudsqladmin
│ │ │ ├── cloud_sql_admin_test.go
│ │ │ └── cloud_sql_admin.go
│ │ ├── cloudsqlmssql
│ │ │ ├── cloud_sql_mssql_test.go
│ │ │ └── cloud_sql_mssql.go
│ │ ├── cloudsqlmysql
│ │ │ ├── cloud_sql_mysql_test.go
│ │ │ └── cloud_sql_mysql.go
│ │ ├── cloudsqlpg
│ │ │ ├── cloud_sql_pg_test.go
│ │ │ └── cloud_sql_pg.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataplex
│ │ │ ├── dataplex_test.go
│ │ │ └── dataplex.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── dialect.go
│ │ ├── elasticsearch
│ │ │ ├── elasticsearch_test.go
│ │ │ └── elasticsearch.go
│ │ ├── firebird
│ │ │ ├── firebird_test.go
│ │ │ └── firebird.go
│ │ ├── firestore
│ │ │ ├── firestore_test.go
│ │ │ └── firestore.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── ip_type.go
│ │ ├── looker
│ │ │ ├── looker_test.go
│ │ │ └── looker.go
│ │ ├── mindsdb
│ │ │ ├── mindsdb_test.go
│ │ │ └── mindsdb.go
│ │ ├── mongodb
│ │ │ ├── mongodb_test.go
│ │ │ └── mongodb.go
│ │ ├── mssql
│ │ │ ├── mssql_test.go
│ │ │ └── mssql.go
│ │ ├── mysql
│ │ │ ├── mysql_test.go
│ │ │ └── mysql.go
│ │ ├── neo4j
│ │ │ ├── neo4j_test.go
│ │ │ └── neo4j.go
│ │ ├── oceanbase
│ │ │ ├── oceanbase_test.go
│ │ │ └── oceanbase.go
│ │ ├── oracle
│ │ │ └── oracle.go
│ │ ├── postgres
│ │ │ ├── postgres_test.go
│ │ │ └── postgres.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlessspark_test.go
│ │ │ └── serverlessspark.go
│ │ ├── singlestore
│ │ │ ├── singlestore_test.go
│ │ │ └── singlestore.go
│ │ ├── sources.go
│ │ ├── spanner
│ │ │ ├── spanner_test.go
│ │ │ └── spanner.go
│ │ ├── sqlite
│ │ │ ├── sqlite_test.go
│ │ │ └── sqlite.go
│ │ ├── tidb
│ │ │ ├── tidb_test.go
│ │ │ └── tidb.go
│ │ ├── trino
│ │ │ ├── trino_test.go
│ │ │ └── trino.go
│ │ ├── util.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedb
│ │ ├── yugabytedb_test.go
│ │ └── yugabytedb.go
│ ├── telemetry
│ │ ├── instrumentation.go
│ │ └── telemetry.go
│ ├── testutils
│ │ └── testutils.go
│ ├── tools
│ │ ├── alloydb
│ │ │ ├── alloydbcreatecluster
│ │ │ │ ├── alloydbcreatecluster_test.go
│ │ │ │ └── alloydbcreatecluster.go
│ │ │ ├── alloydbcreateinstance
│ │ │ │ ├── alloydbcreateinstance_test.go
│ │ │ │ └── alloydbcreateinstance.go
│ │ │ ├── alloydbcreateuser
│ │ │ │ ├── alloydbcreateuser_test.go
│ │ │ │ └── alloydbcreateuser.go
│ │ │ ├── alloydbgetcluster
│ │ │ │ ├── alloydbgetcluster_test.go
│ │ │ │ └── alloydbgetcluster.go
│ │ │ ├── alloydbgetinstance
│ │ │ │ ├── alloydbgetinstance_test.go
│ │ │ │ └── alloydbgetinstance.go
│ │ │ ├── alloydbgetuser
│ │ │ │ ├── alloydbgetuser_test.go
│ │ │ │ └── alloydbgetuser.go
│ │ │ ├── alloydblistclusters
│ │ │ │ ├── alloydblistclusters_test.go
│ │ │ │ └── alloydblistclusters.go
│ │ │ ├── alloydblistinstances
│ │ │ │ ├── alloydblistinstances_test.go
│ │ │ │ └── alloydblistinstances.go
│ │ │ ├── alloydblistusers
│ │ │ │ ├── alloydblistusers_test.go
│ │ │ │ └── alloydblistusers.go
│ │ │ └── alloydbwaitforoperation
│ │ │ ├── alloydbwaitforoperation_test.go
│ │ │ └── alloydbwaitforoperation.go
│ │ ├── alloydbainl
│ │ │ ├── alloydbainl_test.go
│ │ │ └── alloydbainl.go
│ │ ├── bigquery
│ │ │ ├── bigqueryanalyzecontribution
│ │ │ │ ├── bigqueryanalyzecontribution_test.go
│ │ │ │ └── bigqueryanalyzecontribution.go
│ │ │ ├── bigquerycommon
│ │ │ │ ├── table_name_parser_test.go
│ │ │ │ ├── table_name_parser.go
│ │ │ │ └── util.go
│ │ │ ├── bigqueryconversationalanalytics
│ │ │ │ ├── bigqueryconversationalanalytics_test.go
│ │ │ │ └── bigqueryconversationalanalytics.go
│ │ │ ├── bigqueryexecutesql
│ │ │ │ ├── bigqueryexecutesql_test.go
│ │ │ │ └── bigqueryexecutesql.go
│ │ │ ├── bigqueryforecast
│ │ │ │ ├── bigqueryforecast_test.go
│ │ │ │ └── bigqueryforecast.go
│ │ │ ├── bigquerygetdatasetinfo
│ │ │ │ ├── bigquerygetdatasetinfo_test.go
│ │ │ │ └── bigquerygetdatasetinfo.go
│ │ │ ├── bigquerygettableinfo
│ │ │ │ ├── bigquerygettableinfo_test.go
│ │ │ │ └── bigquerygettableinfo.go
│ │ │ ├── bigquerylistdatasetids
│ │ │ │ ├── bigquerylistdatasetids_test.go
│ │ │ │ └── bigquerylistdatasetids.go
│ │ │ ├── bigquerylisttableids
│ │ │ │ ├── bigquerylisttableids_test.go
│ │ │ │ └── bigquerylisttableids.go
│ │ │ ├── bigquerysearchcatalog
│ │ │ │ ├── bigquerysearchcatalog_test.go
│ │ │ │ └── bigquerysearchcatalog.go
│ │ │ └── bigquerysql
│ │ │ ├── bigquerysql_test.go
│ │ │ └── bigquerysql.go
│ │ ├── bigtable
│ │ │ ├── bigtable_test.go
│ │ │ └── bigtable.go
│ │ ├── cassandra
│ │ │ └── cassandracql
│ │ │ ├── cassandracql_test.go
│ │ │ └── cassandracql.go
│ │ ├── clickhouse
│ │ │ ├── clickhouseexecutesql
│ │ │ │ ├── clickhouseexecutesql_test.go
│ │ │ │ └── clickhouseexecutesql.go
│ │ │ ├── clickhouselistdatabases
│ │ │ │ ├── clickhouselistdatabases_test.go
│ │ │ │ └── clickhouselistdatabases.go
│ │ │ ├── clickhouselisttables
│ │ │ │ ├── clickhouselisttables_test.go
│ │ │ │ └── clickhouselisttables.go
│ │ │ └── clickhousesql
│ │ │ ├── clickhousesql_test.go
│ │ │ └── clickhousesql.go
│ │ ├── cloudhealthcare
│ │ │ ├── cloudhealthcarefhirfetchpage
│ │ │ │ ├── cloudhealthcarefhirfetchpage_test.go
│ │ │ │ └── cloudhealthcarefhirfetchpage.go
│ │ │ ├── cloudhealthcarefhirpatienteverything
│ │ │ │ ├── cloudhealthcarefhirpatienteverything_test.go
│ │ │ │ └── cloudhealthcarefhirpatienteverything.go
│ │ │ ├── cloudhealthcarefhirpatientsearch
│ │ │ │ ├── cloudhealthcarefhirpatientsearch_test.go
│ │ │ │ └── cloudhealthcarefhirpatientsearch.go
│ │ │ ├── cloudhealthcaregetdataset
│ │ │ │ ├── cloudhealthcaregetdataset_test.go
│ │ │ │ └── cloudhealthcaregetdataset.go
│ │ │ ├── cloudhealthcaregetdicomstore
│ │ │ │ ├── cloudhealthcaregetdicomstore_test.go
│ │ │ │ └── cloudhealthcaregetdicomstore.go
│ │ │ ├── cloudhealthcaregetdicomstoremetrics
│ │ │ │ ├── cloudhealthcaregetdicomstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetdicomstoremetrics.go
│ │ │ ├── cloudhealthcaregetfhirresource
│ │ │ │ ├── cloudhealthcaregetfhirresource_test.go
│ │ │ │ └── cloudhealthcaregetfhirresource.go
│ │ │ ├── cloudhealthcaregetfhirstore
│ │ │ │ ├── cloudhealthcaregetfhirstore_test.go
│ │ │ │ └── cloudhealthcaregetfhirstore.go
│ │ │ ├── cloudhealthcaregetfhirstoremetrics
│ │ │ │ ├── cloudhealthcaregetfhirstoremetrics_test.go
│ │ │ │ └── cloudhealthcaregetfhirstoremetrics.go
│ │ │ ├── cloudhealthcarelistdicomstores
│ │ │ │ ├── cloudhealthcarelistdicomstores_test.go
│ │ │ │ └── cloudhealthcarelistdicomstores.go
│ │ │ ├── cloudhealthcarelistfhirstores
│ │ │ │ ├── cloudhealthcarelistfhirstores_test.go
│ │ │ │ └── cloudhealthcarelistfhirstores.go
│ │ │ ├── cloudhealthcareretrieverendereddicominstance
│ │ │ │ ├── cloudhealthcareretrieverendereddicominstance_test.go
│ │ │ │ └── cloudhealthcareretrieverendereddicominstance.go
│ │ │ ├── cloudhealthcaresearchdicominstances
│ │ │ │ ├── cloudhealthcaresearchdicominstances_test.go
│ │ │ │ └── cloudhealthcaresearchdicominstances.go
│ │ │ ├── cloudhealthcaresearchdicomseries
│ │ │ │ ├── cloudhealthcaresearchdicomseries_test.go
│ │ │ │ └── cloudhealthcaresearchdicomseries.go
│ │ │ ├── cloudhealthcaresearchdicomstudies
│ │ │ │ ├── cloudhealthcaresearchdicomstudies_test.go
│ │ │ │ └── cloudhealthcaresearchdicomstudies.go
│ │ │ └── common
│ │ │ └── util.go
│ │ ├── cloudmonitoring
│ │ │ ├── cloudmonitoring_test.go
│ │ │ └── cloudmonitoring.go
│ │ ├── cloudsql
│ │ │ ├── cloudsqlcreatedatabase
│ │ │ │ ├── cloudsqlcreatedatabase_test.go
│ │ │ │ └── cloudsqlcreatedatabase.go
│ │ │ ├── cloudsqlcreateusers
│ │ │ │ ├── cloudsqlcreateusers_test.go
│ │ │ │ └── cloudsqlcreateusers.go
│ │ │ ├── cloudsqlgetinstances
│ │ │ │ ├── cloudsqlgetinstances_test.go
│ │ │ │ └── cloudsqlgetinstances.go
│ │ │ ├── cloudsqllistdatabases
│ │ │ │ ├── cloudsqllistdatabases_test.go
│ │ │ │ └── cloudsqllistdatabases.go
│ │ │ ├── cloudsqllistinstances
│ │ │ │ ├── cloudsqllistinstances_test.go
│ │ │ │ └── cloudsqllistinstances.go
│ │ │ └── cloudsqlwaitforoperation
│ │ │ ├── cloudsqlwaitforoperation_test.go
│ │ │ └── cloudsqlwaitforoperation.go
│ │ ├── cloudsqlmssql
│ │ │ └── cloudsqlmssqlcreateinstance
│ │ │ ├── cloudsqlmssqlcreateinstance_test.go
│ │ │ └── cloudsqlmssqlcreateinstance.go
│ │ ├── cloudsqlmysql
│ │ │ └── cloudsqlmysqlcreateinstance
│ │ │ ├── cloudsqlmysqlcreateinstance_test.go
│ │ │ └── cloudsqlmysqlcreateinstance.go
│ │ ├── cloudsqlpg
│ │ │ └── cloudsqlpgcreateinstances
│ │ │ ├── cloudsqlpgcreateinstances_test.go
│ │ │ └── cloudsqlpgcreateinstances.go
│ │ ├── common_test.go
│ │ ├── common.go
│ │ ├── couchbase
│ │ │ ├── couchbase_test.go
│ │ │ └── couchbase.go
│ │ ├── dataform
│ │ │ └── dataformcompilelocal
│ │ │ ├── dataformcompilelocal_test.go
│ │ │ └── dataformcompilelocal.go
│ │ ├── dataplex
│ │ │ ├── dataplexlookupentry
│ │ │ │ ├── dataplexlookupentry_test.go
│ │ │ │ └── dataplexlookupentry.go
│ │ │ ├── dataplexsearchaspecttypes
│ │ │ │ ├── dataplexsearchaspecttypes_test.go
│ │ │ │ └── dataplexsearchaspecttypes.go
│ │ │ └── dataplexsearchentries
│ │ │ ├── dataplexsearchentries_test.go
│ │ │ └── dataplexsearchentries.go
│ │ ├── dgraph
│ │ │ ├── dgraph_test.go
│ │ │ └── dgraph.go
│ │ ├── elasticsearch
│ │ │ └── elasticsearchesql
│ │ │ ├── elasticsearchesql_test.go
│ │ │ └── elasticsearchesql.go
│ │ ├── firebird
│ │ │ ├── firebirdexecutesql
│ │ │ │ ├── firebirdexecutesql_test.go
│ │ │ │ └── firebirdexecutesql.go
│ │ │ └── firebirdsql
│ │ │ ├── firebirdsql_test.go
│ │ │ └── firebirdsql.go
│ │ ├── firestore
│ │ │ ├── firestoreadddocuments
│ │ │ │ ├── firestoreadddocuments_test.go
│ │ │ │ └── firestoreadddocuments.go
│ │ │ ├── firestoredeletedocuments
│ │ │ │ ├── firestoredeletedocuments_test.go
│ │ │ │ └── firestoredeletedocuments.go
│ │ │ ├── firestoregetdocuments
│ │ │ │ ├── firestoregetdocuments_test.go
│ │ │ │ └── firestoregetdocuments.go
│ │ │ ├── firestoregetrules
│ │ │ │ ├── firestoregetrules_test.go
│ │ │ │ └── firestoregetrules.go
│ │ │ ├── firestorelistcollections
│ │ │ │ ├── firestorelistcollections_test.go
│ │ │ │ └── firestorelistcollections.go
│ │ │ ├── firestorequery
│ │ │ │ ├── firestorequery_test.go
│ │ │ │ └── firestorequery.go
│ │ │ ├── firestorequerycollection
│ │ │ │ ├── firestorequerycollection_test.go
│ │ │ │ └── firestorequerycollection.go
│ │ │ ├── firestoreupdatedocument
│ │ │ │ ├── firestoreupdatedocument_test.go
│ │ │ │ └── firestoreupdatedocument.go
│ │ │ ├── firestorevalidaterules
│ │ │ │ ├── firestorevalidaterules_test.go
│ │ │ │ └── firestorevalidaterules.go
│ │ │ └── util
│ │ │ ├── converter_test.go
│ │ │ ├── converter.go
│ │ │ ├── validator_test.go
│ │ │ └── validator.go
│ │ ├── http
│ │ │ ├── http_test.go
│ │ │ └── http.go
│ │ ├── http_method.go
│ │ ├── looker
│ │ │ ├── lookeradddashboardelement
│ │ │ │ ├── lookeradddashboardelement_test.go
│ │ │ │ └── lookeradddashboardelement.go
│ │ │ ├── lookercommon
│ │ │ │ ├── lookercommon_test.go
│ │ │ │ └── lookercommon.go
│ │ │ ├── lookerconversationalanalytics
│ │ │ │ ├── lookerconversationalanalytics_test.go
│ │ │ │ └── lookerconversationalanalytics.go
│ │ │ ├── lookercreateprojectfile
│ │ │ │ ├── lookercreateprojectfile_test.go
│ │ │ │ └── lookercreateprojectfile.go
│ │ │ ├── lookerdeleteprojectfile
│ │ │ │ ├── lookerdeleteprojectfile_test.go
│ │ │ │ └── lookerdeleteprojectfile.go
│ │ │ ├── lookerdevmode
│ │ │ │ ├── lookerdevmode_test.go
│ │ │ │ └── lookerdevmode.go
│ │ │ ├── lookergetconnectiondatabases
│ │ │ │ ├── lookergetconnectiondatabases_test.go
│ │ │ │ └── lookergetconnectiondatabases.go
│ │ │ ├── lookergetconnections
│ │ │ │ ├── lookergetconnections_test.go
│ │ │ │ └── lookergetconnections.go
│ │ │ ├── lookergetconnectionschemas
│ │ │ │ ├── lookergetconnectionschemas_test.go
│ │ │ │ └── lookergetconnectionschemas.go
│ │ │ ├── lookergetconnectiontablecolumns
│ │ │ │ ├── lookergetconnectiontablecolumns_test.go
│ │ │ │ └── lookergetconnectiontablecolumns.go
│ │ │ ├── lookergetconnectiontables
│ │ │ │ ├── lookergetconnectiontables_test.go
│ │ │ │ └── lookergetconnectiontables.go
│ │ │ ├── lookergetdashboards
│ │ │ │ ├── lookergetdashboards_test.go
│ │ │ │ └── lookergetdashboards.go
│ │ │ ├── lookergetdimensions
│ │ │ │ ├── lookergetdimensions_test.go
│ │ │ │ └── lookergetdimensions.go
│ │ │ ├── lookergetexplores
│ │ │ │ ├── lookergetexplores_test.go
│ │ │ │ └── lookergetexplores.go
│ │ │ ├── lookergetfilters
│ │ │ │ ├── lookergetfilters_test.go
│ │ │ │ └── lookergetfilters.go
│ │ │ ├── lookergetlooks
│ │ │ │ ├── lookergetlooks_test.go
│ │ │ │ └── lookergetlooks.go
│ │ │ ├── lookergetmeasures
│ │ │ │ ├── lookergetmeasures_test.go
│ │ │ │ └── lookergetmeasures.go
│ │ │ ├── lookergetmodels
│ │ │ │ ├── lookergetmodels_test.go
│ │ │ │ └── lookergetmodels.go
│ │ │ ├── lookergetparameters
│ │ │ │ ├── lookergetparameters_test.go
│ │ │ │ └── lookergetparameters.go
│ │ │ ├── lookergetprojectfile
│ │ │ │ ├── lookergetprojectfile_test.go
│ │ │ │ └── lookergetprojectfile.go
│ │ │ ├── lookergetprojectfiles
│ │ │ │ ├── lookergetprojectfiles_test.go
│ │ │ │ └── lookergetprojectfiles.go
│ │ │ ├── lookergetprojects
│ │ │ │ ├── lookergetprojects_test.go
│ │ │ │ └── lookergetprojects.go
│ │ │ ├── lookerhealthanalyze
│ │ │ │ ├── lookerhealthanalyze_test.go
│ │ │ │ └── lookerhealthanalyze.go
│ │ │ ├── lookerhealthpulse
│ │ │ │ ├── lookerhealthpulse_test.go
│ │ │ │ └── lookerhealthpulse.go
│ │ │ ├── lookerhealthvacuum
│ │ │ │ ├── lookerhealthvacuum_test.go
│ │ │ │ └── lookerhealthvacuum.go
│ │ │ ├── lookermakedashboard
│ │ │ │ ├── lookermakedashboard_test.go
│ │ │ │ └── lookermakedashboard.go
│ │ │ ├── lookermakelook
│ │ │ │ ├── lookermakelook_test.go
│ │ │ │ └── lookermakelook.go
│ │ │ ├── lookerquery
│ │ │ │ ├── lookerquery_test.go
│ │ │ │ └── lookerquery.go
│ │ │ ├── lookerquerysql
│ │ │ │ ├── lookerquerysql_test.go
│ │ │ │ └── lookerquerysql.go
│ │ │ ├── lookerqueryurl
│ │ │ │ ├── lookerqueryurl_test.go
│ │ │ │ └── lookerqueryurl.go
│ │ │ ├── lookerrundashboard
│ │ │ │ ├── lookerrundashboard_test.go
│ │ │ │ └── lookerrundashboard.go
│ │ │ ├── lookerrunlook
│ │ │ │ ├── lookerrunlook_test.go
│ │ │ │ └── lookerrunlook.go
│ │ │ └── lookerupdateprojectfile
│ │ │ ├── lookerupdateprojectfile_test.go
│ │ │ └── lookerupdateprojectfile.go
│ │ ├── mindsdb
│ │ │ ├── mindsdbexecutesql
│ │ │ │ ├── mindsdbexecutesql_test.go
│ │ │ │ └── mindsdbexecutesql.go
│ │ │ └── mindsdbsql
│ │ │ ├── mindsdbsql_test.go
│ │ │ └── mindsdbsql.go
│ │ ├── mongodb
│ │ │ ├── mongodbaggregate
│ │ │ │ ├── mongodbaggregate_test.go
│ │ │ │ └── mongodbaggregate.go
│ │ │ ├── mongodbdeletemany
│ │ │ │ ├── mongodbdeletemany_test.go
│ │ │ │ └── mongodbdeletemany.go
│ │ │ ├── mongodbdeleteone
│ │ │ │ ├── mongodbdeleteone_test.go
│ │ │ │ └── mongodbdeleteone.go
│ │ │ ├── mongodbfind
│ │ │ │ ├── mongodbfind_test.go
│ │ │ │ └── mongodbfind.go
│ │ │ ├── mongodbfindone
│ │ │ │ ├── mongodbfindone_test.go
│ │ │ │ └── mongodbfindone.go
│ │ │ ├── mongodbinsertmany
│ │ │ │ ├── mongodbinsertmany_test.go
│ │ │ │ └── mongodbinsertmany.go
│ │ │ ├── mongodbinsertone
│ │ │ │ ├── mongodbinsertone_test.go
│ │ │ │ └── mongodbinsertone.go
│ │ │ ├── mongodbupdatemany
│ │ │ │ ├── mongodbupdatemany_test.go
│ │ │ │ └── mongodbupdatemany.go
│ │ │ └── mongodbupdateone
│ │ │ ├── mongodbupdateone_test.go
│ │ │ └── mongodbupdateone.go
│ │ ├── mssql
│ │ │ ├── mssqlexecutesql
│ │ │ │ ├── mssqlexecutesql_test.go
│ │ │ │ └── mssqlexecutesql.go
│ │ │ ├── mssqllisttables
│ │ │ │ ├── mssqllisttables_test.go
│ │ │ │ └── mssqllisttables.go
│ │ │ └── mssqlsql
│ │ │ ├── mssqlsql_test.go
│ │ │ └── mssqlsql.go
│ │ ├── mysql
│ │ │ ├── mysqlcommon
│ │ │ │ └── mysqlcommon.go
│ │ │ ├── mysqlexecutesql
│ │ │ │ ├── mysqlexecutesql_test.go
│ │ │ │ └── mysqlexecutesql.go
│ │ │ ├── mysqllistactivequeries
│ │ │ │ ├── mysqllistactivequeries_test.go
│ │ │ │ └── mysqllistactivequeries.go
│ │ │ ├── mysqllisttablefragmentation
│ │ │ │ ├── mysqllisttablefragmentation_test.go
│ │ │ │ └── mysqllisttablefragmentation.go
│ │ │ ├── mysqllisttables
│ │ │ │ ├── mysqllisttables_test.go
│ │ │ │ └── mysqllisttables.go
│ │ │ ├── mysqllisttablesmissinguniqueindexes
│ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go
│ │ │ │ └── mysqllisttablesmissinguniqueindexes.go
│ │ │ └── mysqlsql
│ │ │ ├── mysqlsql_test.go
│ │ │ └── mysqlsql.go
│ │ ├── neo4j
│ │ │ ├── neo4jcypher
│ │ │ │ ├── neo4jcypher_test.go
│ │ │ │ └── neo4jcypher.go
│ │ │ ├── neo4jexecutecypher
│ │ │ │ ├── classifier
│ │ │ │ │ ├── classifier_test.go
│ │ │ │ │ └── classifier.go
│ │ │ │ ├── neo4jexecutecypher_test.go
│ │ │ │ └── neo4jexecutecypher.go
│ │ │ └── neo4jschema
│ │ │ ├── cache
│ │ │ │ ├── cache_test.go
│ │ │ │ └── cache.go
│ │ │ ├── helpers
│ │ │ │ ├── helpers_test.go
│ │ │ │ └── helpers.go
│ │ │ ├── neo4jschema_test.go
│ │ │ ├── neo4jschema.go
│ │ │ └── types
│ │ │ └── types.go
│ │ ├── oceanbase
│ │ │ ├── oceanbaseexecutesql
│ │ │ │ ├── oceanbaseexecutesql_test.go
│ │ │ │ └── oceanbaseexecutesql.go
│ │ │ └── oceanbasesql
│ │ │ ├── oceanbasesql_test.go
│ │ │ └── oceanbasesql.go
│ │ ├── oracle
│ │ │ ├── oracleexecutesql
│ │ │ │ └── oracleexecutesql.go
│ │ │ └── oraclesql
│ │ │ └── oraclesql.go
│ │ ├── parameters_test.go
│ │ ├── parameters.go
│ │ ├── postgres
│ │ │ ├── postgresexecutesql
│ │ │ │ ├── postgresexecutesql_test.go
│ │ │ │ └── postgresexecutesql.go
│ │ │ ├── postgreslistactivequeries
│ │ │ │ ├── postgreslistactivequeries_test.go
│ │ │ │ └── postgreslistactivequeries.go
│ │ │ ├── postgreslistavailableextensions
│ │ │ │ ├── postgreslistavailableextensions_test.go
│ │ │ │ └── postgreslistavailableextensions.go
│ │ │ ├── postgreslistinstalledextensions
│ │ │ │ ├── postgreslistinstalledextensions_test.go
│ │ │ │ └── postgreslistinstalledextensions.go
│ │ │ ├── postgreslistschemas
│ │ │ │ ├── postgreslistschemas_test.go
│ │ │ │ └── postgreslistschemas.go
│ │ │ ├── postgreslisttables
│ │ │ │ ├── postgreslisttables_test.go
│ │ │ │ └── postgreslisttables.go
│ │ │ ├── postgreslistviews
│ │ │ │ ├── postgreslistviews_test.go
│ │ │ │ └── postgreslistviews.go
│ │ │ └── postgressql
│ │ │ ├── postgressql_test.go
│ │ │ └── postgressql.go
│ │ ├── redis
│ │ │ ├── redis_test.go
│ │ │ └── redis.go
│ │ ├── serverlessspark
│ │ │ ├── serverlesssparkcancelbatch
│ │ │ │ ├── serverlesssparkcancelbatch_test.go
│ │ │ │ └── serverlesssparkcancelbatch.go
│ │ │ ├── serverlesssparkgetbatch
│ │ │ │ ├── serverlesssparkgetbatch_test.go
│ │ │ │ └── serverlesssparkgetbatch.go
│ │ │ └── serverlesssparklistbatches
│ │ │ ├── serverlesssparklistbatches_test.go
│ │ │ └── serverlesssparklistbatches.go
│ │ ├── singlestore
│ │ │ ├── singlestoreexecutesql
│ │ │ │ ├── singlestoreexecutesql_test.go
│ │ │ │ └── singlestoreexecutesql.go
│ │ │ └── singlestoresql
│ │ │ ├── singlestoresql_test.go
│ │ │ └── singlestoresql.go
│ │ ├── spanner
│ │ │ ├── spannerexecutesql
│ │ │ │ ├── spannerexecutesql_test.go
│ │ │ │ └── spannerexecutesql.go
│ │ │ ├── spannerlisttables
│ │ │ │ ├── spannerlisttables_test.go
│ │ │ │ └── spannerlisttables.go
│ │ │ └── spannersql
│ │ │ ├── spanner_test.go
│ │ │ └── spannersql.go
│ │ ├── sqlite
│ │ │ ├── sqliteexecutesql
│ │ │ │ ├── sqliteexecutesql_test.go
│ │ │ │ └── sqliteexecutesql.go
│ │ │ └── sqlitesql
│ │ │ ├── sqlitesql_test.go
│ │ │ └── sqlitesql.go
│ │ ├── tidb
│ │ │ ├── tidbexecutesql
│ │ │ │ ├── tidbexecutesql_test.go
│ │ │ │ └── tidbexecutesql.go
│ │ │ └── tidbsql
│ │ │ ├── tidbsql_test.go
│ │ │ └── tidbsql.go
│ │ ├── tools_test.go
│ │ ├── tools.go
│ │ ├── toolsets.go
│ │ ├── trino
│ │ │ ├── trinoexecutesql
│ │ │ │ ├── trinoexecutesql_test.go
│ │ │ │ └── trinoexecutesql.go
│ │ │ └── trinosql
│ │ │ ├── trinosql_test.go
│ │ │ └── trinosql.go
│ │ ├── utility
│ │ │ └── wait
│ │ │ ├── wait_test.go
│ │ │ └── wait.go
│ │ ├── valkey
│ │ │ ├── valkey_test.go
│ │ │ └── valkey.go
│ │ └── yugabytedbsql
│ │ ├── yugabytedbsql_test.go
│ │ └── yugabytedbsql.go
│ └── util
│ ├── orderedmap
│ │ ├── orderedmap_test.go
│ │ └── orderedmap.go
│ └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
└── tests
├── alloydb
│ ├── alloydb_integration_test.go
│ └── alloydb_wait_for_operation_test.go
├── alloydbainl
│ └── alloydb_ai_nl_integration_test.go
├── alloydbpg
│ └── alloydb_pg_integration_test.go
├── auth.go
├── bigquery
│ └── bigquery_integration_test.go
├── bigtable
│ └── bigtable_integration_test.go
├── cassandra
│ └── cassandra_integration_test.go
├── clickhouse
│ └── clickhouse_integration_test.go
├── cloudhealthcare
│ └── cloud_healthcare_integration_test.go
├── cloudmonitoring
│ └── cloud_monitoring_integration_test.go
├── cloudsql
│ ├── cloud_sql_create_database_test.go
│ ├── cloud_sql_create_users_test.go
│ ├── cloud_sql_get_instances_test.go
│ ├── cloud_sql_list_databases_test.go
│ ├── cloudsql_list_instances_test.go
│ └── cloudsql_wait_for_operation_test.go
├── cloudsqlmssql
│ ├── cloud_sql_mssql_create_instance_integration_test.go
│ └── cloud_sql_mssql_integration_test.go
├── cloudsqlmysql
│ ├── cloud_sql_mysql_create_instance_integration_test.go
│ └── cloud_sql_mysql_integration_test.go
├── cloudsqlpg
│ ├── cloud_sql_pg_create_instances_test.go
│ └── cloud_sql_pg_integration_test.go
├── common.go
├── couchbase
│ └── couchbase_integration_test.go
├── dataform
│ └── dataform_integration_test.go
├── dataplex
│ └── dataplex_integration_test.go
├── dgraph
│ └── dgraph_integration_test.go
├── elasticsearch
│ └── elasticsearch_integration_test.go
├── firebird
│ └── firebird_integration_test.go
├── firestore
│ └── firestore_integration_test.go
├── http
│ └── http_integration_test.go
├── looker
│ └── looker_integration_test.go
├── mindsdb
│ └── mindsdb_integration_test.go
├── mongodb
│ └── mongodb_integration_test.go
├── mssql
│ └── mssql_integration_test.go
├── mysql
│ └── mysql_integration_test.go
├── neo4j
│ └── neo4j_integration_test.go
├── oceanbase
│ └── oceanbase_integration_test.go
├── option.go
├── oracle
│ └── oracle_integration_test.go
├── postgres
│ └── postgres_integration_test.go
├── redis
│ └── redis_test.go
├── server.go
├── serverlessspark
│ └── serverless_spark_integration_test.go
├── singlestore
│ └── singlestore_integration_test.go
├── source.go
├── spanner
│ └── spanner_integration_test.go
├── sqlite
│ └── sqlite_integration_test.go
├── tidb
│ └── tidb_integration_test.go
├── tool.go
├── trino
│ └── trino_integration_test.go
├── utility
│ └── wait_integration_test.go
├── valkey
│ └── valkey_test.go
└── yugabytedb
└── yugabytedb_integration_test.go
```
# Files
--------------------------------------------------------------------------------
/docs/en/resources/tools/firestore/firestore-update-document.md:
--------------------------------------------------------------------------------
```markdown
---
title: "firestore-update-document"
type: docs
weight: 1
description: >
A "firestore-update-document" tool updates an existing document in Firestore.
aliases:
- /resources/tools/firestore-update-document
---
## Description
The `firestore-update-document` tool allows you to update existing documents in
Firestore. It supports all Firestore data types using Firestore's native JSON
format. The tool can perform both full document updates (replacing all fields)
or selective field updates using an update mask. When using an update mask,
fields referenced in the mask but not present in the document data will be
deleted from the document, following Firestore's native behavior.
## Parameters
| Parameter | Type | Required | Description |
|----------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `documentPath` | string | Yes | The path of the document which needs to be updated |
| `documentData` | map | Yes | The data to update in the document. Must use [Firestore's native JSON format](https://cloud.google.com/firestore/docs/reference/rest/Shared.Types/ArrayValue#Value) with typed values |
| `updateMask` | array | No | The selective fields to update. If not provided, all fields in documentData will be updated. When provided, only the specified fields will be updated. Fields referenced in the mask but not present in documentData will be deleted from the document |
| `returnData` | boolean | No | If set to true, the output will include the data of the updated document. Defaults to false to help avoid overloading the context |
## Output
The tool returns a map containing:
| Field | Type | Description |
|----------------|--------|---------------------------------------------------------------------------------------------|
| `documentPath` | string | The full path of the updated document |
| `updateTime` | string | The timestamp when the document was updated |
| `documentData` | map | The current data of the document after the update (only included when `returnData` is true) |
## Data Type Format
The tool requires Firestore's native JSON format for document data. Each field
must be wrapped with its type indicator:
### Basic Types
- **String**: `{"stringValue": "your string"}`
- **Integer**: `{"integerValue": "123"}` or `{"integerValue": 123}`
- **Double**: `{"doubleValue": 123.45}`
- **Boolean**: `{"booleanValue": true}`
- **Null**: `{"nullValue": null}`
- **Bytes**: `{"bytesValue": "base64EncodedString"}`
- **Timestamp**: `{"timestampValue": "2025-01-07T10:00:00Z"}` (RFC3339 format)
### Complex Types
- **GeoPoint**: `{"geoPointValue": {"latitude": 34.052235, "longitude": -118.243683}}`
- **Array**: `{"arrayValue": {"values": [{"stringValue": "item1"}, {"integerValue": "2"}]}}`
- **Map**: `{"mapValue": {"fields": {"key1": {"stringValue": "value1"}, "key2": {"booleanValue": true}}}}`
- **Reference**: `{"referenceValue": "collection/document"}`
## Update Modes
### Full Document Update (Merge All)
When `updateMask` is not provided, the tool performs a merge operation that
updates all fields specified in `documentData` while preserving other existing
fields in the document.
### Selective Field Update
When `updateMask` is provided, only the fields listed in the mask are updated.
This allows for precise control over which fields are modified, added, or
deleted. To delete a field, include it in the `updateMask` but omit it from
`documentData`.
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------------:|:------------:|----------------------------------------------------------|
| kind | string | true | Must be "firestore-update-document". |
| source | string | true | Name of the Firestore source to update documents in. |
| description | string | true | Description of the tool that is passed to the LLM. |
## Examples
### Basic Document Update (Full Merge)
```yaml
tools:
update-user-doc:
kind: firestore-update-document
source: my-firestore
description: Update a user document
```
Usage:
```json
{
"documentPath": "users/user123",
"documentData": {
"name": {
"stringValue": "Jane Doe"
},
"lastUpdated": {
"timestampValue": "2025-01-15T10:30:00Z"
},
"status": {
"stringValue": "active"
},
"score": {
"integerValue": "150"
}
}
}
```
### Selective Field Update with Update Mask
```json
{
"documentPath": "users/user123",
"documentData": {
"email": {
"stringValue": "[email protected]"
},
"profile": {
"mapValue": {
"fields": {
"bio": {
"stringValue": "Updated bio text"
},
"avatar": {
"stringValue": "https://example.com/new-avatar.jpg"
}
}
}
}
},
"updateMask": ["email", "profile.bio", "profile.avatar"]
}
```
### Update with Field Deletion
To delete fields, include them in the `updateMask` but omit them from `documentData`:
```json
{
"documentPath": "users/user123",
"documentData": {
"name": {
"stringValue": "John Smith"
}
},
"updateMask": ["name", "temporaryField", "obsoleteData"],
"returnData": true
}
```
In this example:
- `name` will be updated to "John Smith"
- `temporaryField` and `obsoleteData` will be deleted from the document (they are in the mask but not in the data)
### Complex Update with Nested Data
```json
{
"documentPath": "companies/company456",
"documentData": {
"metadata": {
"mapValue": {
"fields": {
"lastModified": {
"timestampValue": "2025-01-15T14:30:00Z"
},
"modifiedBy": {
"stringValue": "[email protected]"
}
}
}
},
"locations": {
"arrayValue": {
"values": [
{
"mapValue": {
"fields": {
"city": {
"stringValue": "San Francisco"
},
"coordinates": {
"geoPointValue": {
"latitude": 37.7749,
"longitude": -122.4194
}
}
}
}
},
{
"mapValue": {
"fields": {
"city": {
"stringValue": "New York"
},
"coordinates": {
"geoPointValue": {
"latitude": 40.7128,
"longitude": -74.0060
}
}
}
}
}
]
}
},
"revenue": {
"doubleValue": 5678901.23
}
},
"updateMask": ["metadata", "locations", "revenue"]
}
```
### Update with All Data Types
```json
{
"documentPath": "test-documents/doc789",
"documentData": {
"stringField": {
"stringValue": "Updated string"
},
"integerField": {
"integerValue": "999"
},
"doubleField": {
"doubleValue": 2.71828
},
"booleanField": {
"booleanValue": false
},
"nullField": {
"nullValue": null
},
"timestampField": {
"timestampValue": "2025-01-15T16:45:00Z"
},
"geoPointField": {
"geoPointValue": {
"latitude": 51.5074,
"longitude": -0.1278
}
},
"bytesField": {
"bytesValue": "VXBkYXRlZCBkYXRh"
},
"arrayField": {
"arrayValue": {
"values": [
{
"stringValue": "updated1"
},
{
"integerValue": "200"
},
{
"booleanValue": true
}
]
}
},
"mapField": {
"mapValue": {
"fields": {
"nestedString": {
"stringValue": "updated nested value"
},
"nestedNumber": {
"doubleValue": 88.88
}
}
}
},
"referenceField": {
"referenceValue": "users/updatedUser"
}
},
"returnData": true
}
```
## Authentication
The tool can be configured to require authentication:
```yaml
tools:
secure-update-doc:
kind: firestore-update-document
source: prod-firestore
description: Update documents with authentication required
authRequired:
- google-oauth
- api-key
```
## Error Handling
Common errors include:
- Document not found (when using update with a non-existent document)
- Invalid document path
- Missing or invalid document data
- Permission denied (if Firestore security rules block the operation)
- Invalid data type conversions
## Best Practices
1. **Use update masks for precision**: When you only need to update specific fields, use the `updateMask` parameter to avoid unintended changes
2. **Always use typed values**: Every field must be wrapped with its appropriate type indicator (e.g., `{"stringValue": "text"}`)
3. **Integer values can be strings**: The tool accepts integer values as strings (e.g., `{"integerValue": "1500"}`)
4. **Use returnData sparingly**: Only set to true when you need to verify the exact data after the update
5. **Validate data before sending**: Ensure your data matches Firestore's native JSON format
6. **Handle timestamps properly**: Use RFC3339 format for timestamp strings
7. **Base64 encode binary data**: Binary data must be base64 encoded in the `bytesValue` field
8. **Consider security rules**: Ensure your Firestore security rules allow document updates
9. **Delete fields using update mask**: To delete fields, include them in the `updateMask` but omit them from `documentData`
10. **Test with non-production data first**: Always test your updates on non-critical documents first
## Differences from Add Documents
- **Purpose**: Updates existing documents vs. creating new ones
- **Document must exist**: For standard updates (though not using updateMask will create if missing with given document id)
- **Update mask support**: Allows selective field updates
- **Field deletion**: Supports removing specific fields by including them in the mask but not in the data
- **Returns updateTime**: Instead of createTime
## Related Tools
- [`firestore-add-documents`](firestore-add-documents.md) - Add new documents to Firestore
- [`firestore-get-documents`](firestore-get-documents.md) - Retrieve documents by their paths
- [`firestore-query-collection`](firestore-query-collection.md) - Query documents in a collection
- [`firestore-delete-documents`](firestore-delete-documents.md) - Delete documents from Firestore
```
--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerycommon/table_name_parser.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bigquerycommon
import (
"fmt"
"strings"
"unicode"
)
// parserState defines the state of the SQL parser's state machine.
type parserState int
const (
stateNormal parserState = iota
// String states
stateInSingleQuoteString
stateInDoubleQuoteString
stateInTripleSingleQuoteString
stateInTripleDoubleQuoteString
stateInRawSingleQuoteString
stateInRawDoubleQuoteString
stateInRawTripleSingleQuoteString
stateInRawTripleDoubleQuoteString
// Comment states
stateInSingleLineCommentDash
stateInSingleLineCommentHash
stateInMultiLineComment
)
// SQL statement verbs
const (
verbCreate = "create"
verbAlter = "alter"
verbDrop = "drop"
verbSelect = "select"
verbInsert = "insert"
verbUpdate = "update"
verbDelete = "delete"
verbMerge = "merge"
)
var tableFollowsKeywords = map[string]bool{
"from": true,
"join": true,
"update": true,
"into": true, // INSERT INTO, MERGE INTO
"table": true, // CREATE TABLE, ALTER TABLE
"using": true, // MERGE ... USING
"insert": true, // INSERT my_table
"merge": true, // MERGE my_table
}
var tableContextExitKeywords = map[string]bool{
"where": true,
"group": true, // GROUP BY
"having": true,
"order": true, // ORDER BY
"limit": true,
"window": true,
"on": true, // JOIN ... ON
"set": true, // UPDATE ... SET
"when": true, // MERGE ... WHEN
}
// TableParser is the main entry point for parsing a SQL string to find all referenced table IDs.
// It handles multi-statement SQL, comments, and recursive parsing of EXECUTE IMMEDIATE statements.
func TableParser(sql, defaultProjectID string) ([]string, error) {
tableIDSet := make(map[string]struct{})
visitedSQLs := make(map[string]struct{})
if _, err := parseSQL(sql, defaultProjectID, tableIDSet, visitedSQLs, false); err != nil {
return nil, err
}
tableIDs := make([]string, 0, len(tableIDSet))
for id := range tableIDSet {
tableIDs = append(tableIDs, id)
}
return tableIDs, nil
}
// parseSQL is the core recursive function that processes SQL strings.
// It uses a state machine to find table names and recursively parse EXECUTE IMMEDIATE.
func parseSQL(sql, defaultProjectID string, tableIDSet map[string]struct{}, visitedSQLs map[string]struct{}, inSubquery bool) (int, error) {
// Prevent infinite recursion.
if _, ok := visitedSQLs[sql]; ok {
return len(sql), nil
}
visitedSQLs[sql] = struct{}{}
state := stateNormal
expectingTable := false
var lastTableKeyword, lastToken, statementVerb string
runes := []rune(sql)
for i := 0; i < len(runes); {
char := runes[i]
remaining := sql[i:]
switch state {
case stateNormal:
if strings.HasPrefix(remaining, "--") {
state = stateInSingleLineCommentDash
i += 2
continue
}
if strings.HasPrefix(remaining, "#") {
state = stateInSingleLineCommentHash
i++
continue
}
if strings.HasPrefix(remaining, "/*") {
state = stateInMultiLineComment
i += 2
continue
}
if char == '(' {
if expectingTable {
// The subquery starts after '('.
consumed, err := parseSQL(remaining[1:], defaultProjectID, tableIDSet, visitedSQLs, true)
if err != nil {
return 0, err
}
// Advance i by the length of the subquery + the opening parenthesis.
// The recursive call returns what it consumed, including the closing parenthesis.
i += consumed + 1
// For most keywords, we expect only one table. `from` can have multiple "tables" (subqueries).
if lastTableKeyword != "from" {
expectingTable = false
}
continue
}
}
if char == ')' {
if inSubquery {
return i + 1, nil
}
}
if char == ';' {
statementVerb = ""
lastToken = ""
i++
continue
}
// Raw strings must be checked before regular strings.
if strings.HasPrefix(remaining, "r'''") || strings.HasPrefix(remaining, "R'''") {
state = stateInRawTripleSingleQuoteString
i += 4
continue
}
if strings.HasPrefix(remaining, `r"""`) || strings.HasPrefix(remaining, `R"""`) {
state = stateInRawTripleDoubleQuoteString
i += 4
continue
}
if strings.HasPrefix(remaining, "r'") || strings.HasPrefix(remaining, "R'") {
state = stateInRawSingleQuoteString
i += 2
continue
}
if strings.HasPrefix(remaining, `r"`) || strings.HasPrefix(remaining, `R"`) {
state = stateInRawDoubleQuoteString
i += 2
continue
}
if strings.HasPrefix(remaining, "'''") {
state = stateInTripleSingleQuoteString
i += 3
continue
}
if strings.HasPrefix(remaining, `"""`) {
state = stateInTripleDoubleQuoteString
i += 3
continue
}
if char == '\'' {
state = stateInSingleQuoteString
i++
continue
}
if char == '"' {
state = stateInDoubleQuoteString
i++
continue
}
if unicode.IsLetter(char) || char == '`' {
parts, consumed, err := parseIdentifierSequence(remaining)
if err != nil {
return 0, err
}
if consumed == 0 {
i++
continue
}
if len(parts) == 1 {
keyword := strings.ToLower(parts[0])
switch keyword {
case "call":
return 0, fmt.Errorf("CALL is not allowed when dataset restrictions are in place, as the called procedure's contents cannot be safely analyzed")
case "immediate":
if lastToken == "execute" {
return 0, fmt.Errorf("EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place, as its contents cannot be safely analyzed")
}
case "procedure", "function":
if lastToken == "create" || lastToken == "create or replace" {
return 0, fmt.Errorf("unanalyzable statements like '%s %s' are not allowed", strings.ToUpper(lastToken), strings.ToUpper(keyword))
}
case verbCreate, verbAlter, verbDrop, verbSelect, verbInsert, verbUpdate, verbDelete, verbMerge:
if statementVerb == "" {
statementVerb = keyword
}
}
if statementVerb == verbCreate || statementVerb == verbAlter || statementVerb == verbDrop {
if keyword == "schema" || keyword == "dataset" {
return 0, fmt.Errorf("dataset-level operations like '%s %s' are not allowed when dataset restrictions are in place", strings.ToUpper(statementVerb), strings.ToUpper(keyword))
}
}
if _, ok := tableFollowsKeywords[keyword]; ok {
expectingTable = true
lastTableKeyword = keyword
} else if _, ok := tableContextExitKeywords[keyword]; ok {
expectingTable = false
lastTableKeyword = ""
}
if lastToken == "create" && keyword == "or" {
lastToken = "create or"
} else if lastToken == "create or" && keyword == "replace" {
lastToken = "create or replace"
} else {
lastToken = keyword
}
} else if len(parts) >= 2 {
// This is a multi-part identifier. If we were expecting a table, this is it.
if expectingTable {
tableID, err := formatTableID(parts, defaultProjectID)
if err != nil {
return 0, err
}
if tableID != "" {
tableIDSet[tableID] = struct{}{}
}
// For most keywords, we expect only one table.
if lastTableKeyword != "from" {
expectingTable = false
}
}
lastToken = ""
}
i += consumed
continue
}
i++
case stateInSingleQuoteString:
if char == '\\' {
i += 2 // Skip backslash and the escaped character.
continue
}
if char == '\'' {
state = stateNormal
}
i++
case stateInDoubleQuoteString:
if char == '\\' {
i += 2 // Skip backslash and the escaped character.
continue
}
if char == '"' {
state = stateNormal
}
i++
case stateInTripleSingleQuoteString:
if strings.HasPrefix(remaining, "'''") {
state = stateNormal
i += 3
} else {
i++
}
case stateInTripleDoubleQuoteString:
if strings.HasPrefix(remaining, `"""`) {
state = stateNormal
i += 3
} else {
i++
}
case stateInSingleLineCommentDash, stateInSingleLineCommentHash:
if char == '\n' {
state = stateNormal
}
i++
case stateInMultiLineComment:
if strings.HasPrefix(remaining, "*/") {
state = stateNormal
i += 2
} else {
i++
}
case stateInRawSingleQuoteString:
if char == '\'' {
state = stateNormal
}
i++
case stateInRawDoubleQuoteString:
if char == '"' {
state = stateNormal
}
i++
case stateInRawTripleSingleQuoteString:
if strings.HasPrefix(remaining, "'''") {
state = stateNormal
i += 3
} else {
i++
}
case stateInRawTripleDoubleQuoteString:
if strings.HasPrefix(remaining, `"""`) {
state = stateNormal
i += 3
} else {
i++
}
}
}
if inSubquery {
return 0, fmt.Errorf("unclosed subquery parenthesis")
}
return len(sql), nil
}
// parseIdentifierSequence parses a sequence of dot-separated identifiers.
// It returns the parts of the identifier, the number of characters consumed, and an error.
func parseIdentifierSequence(s string) ([]string, int, error) {
var parts []string
var totalConsumed int
for {
remaining := s[totalConsumed:]
trimmed := strings.TrimLeftFunc(remaining, unicode.IsSpace)
totalConsumed += len(remaining) - len(trimmed)
current := s[totalConsumed:]
if len(current) == 0 {
break
}
var part string
var consumed int
if current[0] == '`' {
end := strings.Index(current[1:], "`")
if end == -1 {
return nil, 0, fmt.Errorf("unclosed backtick identifier")
}
part = current[1 : end+1]
consumed = end + 2
} else if len(current) > 0 && unicode.IsLetter(rune(current[0])) {
end := strings.IndexFunc(current, func(r rune) bool {
return !unicode.IsLetter(r) && !unicode.IsNumber(r) && r != '_' && r != '-'
})
if end == -1 {
part = current
consumed = len(current)
} else {
part = current[:end]
consumed = end
}
} else {
break
}
if current[0] == '`' && strings.Contains(part, ".") {
// This handles cases like `project.dataset.table` but not `project.dataset`.table.
// If the character after the quoted identifier is not a dot, we treat it as a full name.
if len(current) <= consumed || current[consumed] != '.' {
parts = append(parts, strings.Split(part, ".")...)
totalConsumed += consumed
break
}
}
parts = append(parts, strings.Split(part, ".")...)
totalConsumed += consumed
if len(s) <= totalConsumed || s[totalConsumed] != '.' {
break
}
totalConsumed++
}
return parts, totalConsumed, nil
}
func formatTableID(parts []string, defaultProjectID string) (string, error) {
if len(parts) < 2 || len(parts) > 3 {
// Not a table identifier (could be a CTE, column, etc.).
// Return the consumed length so the main loop can skip this identifier.
return "", nil
}
var tableID string
if len(parts) == 3 { // project.dataset.table
tableID = strings.Join(parts, ".")
} else { // dataset.table
if defaultProjectID == "" {
return "", fmt.Errorf("query contains table '%s' without project ID, and no default project ID is provided", strings.Join(parts, "."))
}
tableID = fmt.Sprintf("%s.%s", defaultProjectID, strings.Join(parts, "."))
}
return tableID, nil
}
```
--------------------------------------------------------------------------------
/internal/tools/bigquery/bigqueryforecast/bigqueryforecast.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bigqueryforecast
import (
"context"
"fmt"
"strings"
bigqueryapi "cloud.google.com/go/bigquery"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
"github.com/googleapis/genai-toolbox/internal/tools"
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
"github.com/googleapis/genai-toolbox/internal/util"
bigqueryrestapi "google.golang.org/api/bigquery/v2"
"google.golang.org/api/iterator"
)
const kind string = "bigquery-forecast"
func init() {
if !tools.Register(kind, newConfig) {
panic(fmt.Sprintf("tool kind %q already registered", kind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
type compatibleSource interface {
BigQueryClient() *bigqueryapi.Client
BigQueryRestService() *bigqueryrestapi.Service
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
UseClientAuthorization() bool
IsDatasetAllowed(projectID, datasetID string) bool
BigQueryAllowedDatasets() []string
BigQuerySession() bigqueryds.BigQuerySessionProvider
}
// validate compatible sources are still compatible
var _ compatibleSource = &bigqueryds.Source{}
var compatibleSources = [...]string{bigqueryds.SourceKind}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
Source string `yaml:"source" validate:"required"`
Description string `yaml:"description" validate:"required"`
AuthRequired []string `yaml:"authRequired"`
}
// validate interface
var _ tools.ToolConfig = Config{}
func (cfg Config) ToolConfigKind() string {
return kind
}
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
// verify source exists
rawS, ok := srcs[cfg.Source]
if !ok {
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
}
// verify the source is compatible
s, ok := rawS.(compatibleSource)
if !ok {
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
allowedDatasets := s.BigQueryAllowedDatasets()
historyDataDescription := "The table id or the query of the history time series data."
if len(allowedDatasets) > 0 {
datasetIDs := []string{}
for _, ds := range allowedDatasets {
datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds))
}
historyDataDescription += fmt.Sprintf(" The query or table must only access datasets from the following list: %s.", strings.Join(datasetIDs, ", "))
}
historyDataParameter := tools.NewStringParameter("history_data", historyDataDescription)
timestampColumnNameParameter := tools.NewStringParameter("timestamp_col",
"The name of the time series timestamp column.")
dataColumnNameParameter := tools.NewStringParameter("data_col",
"The name of the time series data column.")
idColumnNameParameter := tools.NewArrayParameterWithDefault("id_cols", []any{},
"An array of the time series id column names.",
tools.NewStringParameter("id_col", "The name of time series id column."))
horizonParameter := tools.NewIntParameterWithDefault("horizon", 10, "The number of forecasting steps.")
parameters := tools.Parameters{historyDataParameter,
timestampColumnNameParameter, dataColumnNameParameter, idColumnNameParameter, horizonParameter}
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
// finish tool setup
t := Tool{
Name: cfg.Name,
Kind: kind,
Parameters: parameters,
AuthRequired: cfg.AuthRequired,
UseClientOAuth: s.UseClientAuthorization(),
ClientCreator: s.BigQueryClientCreator(),
Client: s.BigQueryClient(),
RestService: s.BigQueryRestService(),
IsDatasetAllowed: s.IsDatasetAllowed,
SessionProvider: s.BigQuerySession(),
AllowedDatasets: allowedDatasets,
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
mcpManifest: mcpManifest,
}
return t, nil
}
// validate interface
var _ tools.Tool = Tool{}
type Tool struct {
Name string `yaml:"name"`
Kind string `yaml:"kind"`
AuthRequired []string `yaml:"authRequired"`
UseClientOAuth bool `yaml:"useClientOAuth"`
Parameters tools.Parameters `yaml:"parameters"`
Client *bigqueryapi.Client
RestService *bigqueryrestapi.Service
ClientCreator bigqueryds.BigqueryClientCreator
IsDatasetAllowed func(projectID, datasetID string) bool
AllowedDatasets []string
SessionProvider bigqueryds.BigQuerySessionProvider
manifest tools.Manifest
mcpManifest tools.McpManifest
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
paramsMap := params.AsMap()
historyData, ok := paramsMap["history_data"].(string)
if !ok {
return nil, fmt.Errorf("unable to cast history_data parameter %v", paramsMap["history_data"])
}
timestampCol, ok := paramsMap["timestamp_col"].(string)
if !ok {
return nil, fmt.Errorf("unable to cast timestamp_col parameter %v", paramsMap["timestamp_col"])
}
dataCol, ok := paramsMap["data_col"].(string)
if !ok {
return nil, fmt.Errorf("unable to cast data_col parameter %v", paramsMap["data_col"])
}
idColsRaw, ok := paramsMap["id_cols"].([]any)
if !ok {
return nil, fmt.Errorf("unable to cast id_cols parameter %v", paramsMap["id_cols"])
}
var idCols []string
for _, v := range idColsRaw {
s, ok := v.(string)
if !ok {
return nil, fmt.Errorf("id_cols contains non-string value: %v", v)
}
idCols = append(idCols, s)
}
horizon, ok := paramsMap["horizon"].(int)
if !ok {
if h, ok := paramsMap["horizon"].(float64); ok {
horizon = int(h)
} else {
return nil, fmt.Errorf("unable to cast horizon parameter %v", paramsMap["horizon"])
}
}
bqClient := t.Client
restService := t.RestService
var err error
// Initialize new client if using user OAuth token
if t.UseClientOAuth {
tokenStr, err := accessToken.ParseBearerToken()
if err != nil {
return nil, fmt.Errorf("error parsing access token: %w", err)
}
bqClient, restService, err = t.ClientCreator(tokenStr, false)
if err != nil {
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
}
}
var historyDataSource string
trimmedUpperHistoryData := strings.TrimSpace(strings.ToUpper(historyData))
if strings.HasPrefix(trimmedUpperHistoryData, "SELECT") || strings.HasPrefix(trimmedUpperHistoryData, "WITH") {
if len(t.AllowedDatasets) > 0 {
var connProps []*bigqueryapi.ConnectionProperty
session, err := t.SessionProvider(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
}
if session != nil {
connProps = []*bigqueryapi.ConnectionProperty{
{Key: "session_id", Value: session.ID},
}
}
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, t.Client.Project(), t.Client.Location, historyData, nil, connProps)
if err != nil {
return nil, fmt.Errorf("query validation failed: %w", err)
}
statementType := dryRunJob.Statistics.Query.StatementType
if statementType != "SELECT" {
return nil, fmt.Errorf("the 'history_data' parameter only supports a table ID or a SELECT query. The provided query has statement type '%s'", statementType)
}
queryStats := dryRunJob.Statistics.Query
if queryStats != nil {
for _, tableRef := range queryStats.ReferencedTables {
if !t.IsDatasetAllowed(tableRef.ProjectId, tableRef.DatasetId) {
return nil, fmt.Errorf("query in history_data accesses dataset '%s.%s', which is not in the allowed list", tableRef.ProjectId, tableRef.DatasetId)
}
}
} else {
return nil, fmt.Errorf("could not analyze query in history_data to validate against allowed datasets")
}
}
historyDataSource = fmt.Sprintf("(%s)", historyData)
} else {
if len(t.AllowedDatasets) > 0 {
parts := strings.Split(historyData, ".")
var projectID, datasetID string
switch len(parts) {
case 3: // project.dataset.table
projectID = parts[0]
datasetID = parts[1]
case 2: // dataset.table
projectID = t.Client.Project()
datasetID = parts[0]
default:
return nil, fmt.Errorf("invalid table ID format for 'history_data': %q. Expected 'dataset.table' or 'project.dataset.table'", historyData)
}
if !t.IsDatasetAllowed(projectID, datasetID) {
return nil, fmt.Errorf("access to dataset '%s.%s' (from table '%s') is not allowed", projectID, datasetID, historyData)
}
}
historyDataSource = fmt.Sprintf("TABLE `%s`", historyData)
}
idColsArg := ""
if len(idCols) > 0 {
idColsFormatted := fmt.Sprintf("['%s']", strings.Join(idCols, "', '"))
idColsArg = fmt.Sprintf(", id_cols => %s", idColsFormatted)
}
sql := fmt.Sprintf(`SELECT *
FROM AI.FORECAST(
%s,
data_col => '%s',
timestamp_col => '%s',
horizon => %d%s)`,
historyDataSource, dataCol, timestampCol, horizon, idColsArg)
// JobStatistics.QueryStatistics.StatementType
query := bqClient.Query(sql)
query.Location = bqClient.Location
session, err := t.SessionProvider(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
}
if session != nil {
// Add session ID to the connection properties for subsequent calls.
query.ConnectionProperties = []*bigqueryapi.ConnectionProperty{
{Key: "session_id", Value: session.ID},
}
}
// Log the query executed for debugging.
logger, err := util.LoggerFromContext(ctx)
if err != nil {
return nil, fmt.Errorf("error getting logger: %s", err)
}
logger.DebugContext(ctx, fmt.Sprintf("executing `%s` tool query: %s", kind, sql))
// This block handles SELECT statements, which return a row set.
// We iterate through the results, convert each row into a map of
// column names to values, and return the collection of rows.
var out []any
job, err := query.Run(ctx)
if err != nil {
return nil, fmt.Errorf("unable to execute query: %w", err)
}
it, err := job.Read(ctx)
if err != nil {
return nil, fmt.Errorf("unable to read query results: %w", err)
}
for {
var row map[string]bigqueryapi.Value
err = it.Next(&row)
if err == iterator.Done {
break
}
if err != nil {
return nil, fmt.Errorf("unable to iterate through query results: %w", err)
}
vMap := make(map[string]any)
for key, value := range row {
vMap[key] = value
}
out = append(out, vMap)
}
// If the query returned any rows, return them directly.
if len(out) > 0 {
return out, nil
}
// This handles the standard case for a SELECT query that successfully
return "The query returned 0 rows.", nil
}
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
return tools.ParseParams(t.Parameters, data, claims)
}
func (t Tool) Manifest() tools.Manifest {
return t.manifest
}
func (t Tool) McpManifest() tools.McpManifest {
return t.mcpManifest
}
func (t Tool) Authorized(verifiedAuthServices []string) bool {
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}
func (t Tool) RequiresClientAuthorization() bool {
return t.UseClientOAuth
}
```
--------------------------------------------------------------------------------
/internal/server/server.go:
--------------------------------------------------------------------------------
```go
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package server
import (
"context"
"fmt"
"io"
"net"
"net/http"
"strconv"
"strings"
"sync"
"time"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/go-chi/httplog/v2"
"github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/sources"
"github.com/googleapis/genai-toolbox/internal/telemetry"
"github.com/googleapis/genai-toolbox/internal/tools"
"github.com/googleapis/genai-toolbox/internal/util"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
)
// Server contains info for running an instance of Toolbox. Should be instantiated with NewServer().
type Server struct {
version string
srv *http.Server
listener net.Listener
root chi.Router
logger log.Logger
instrumentation *telemetry.Instrumentation
sseManager *sseManager
ResourceMgr *ResourceManager
}
// ResourceManager contains available resources for the server. Should be initialized with NewResourceManager().
type ResourceManager struct {
mu sync.RWMutex
sources map[string]sources.Source
authServices map[string]auth.AuthService
tools map[string]tools.Tool
toolsets map[string]tools.Toolset
}
func NewResourceManager(
sourcesMap map[string]sources.Source,
authServicesMap map[string]auth.AuthService,
toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset,
) *ResourceManager {
resourceMgr := &ResourceManager{
mu: sync.RWMutex{},
sources: sourcesMap,
authServices: authServicesMap,
tools: toolsMap,
toolsets: toolsetsMap,
}
return resourceMgr
}
func (r *ResourceManager) GetSource(sourceName string) (sources.Source, bool) {
r.mu.RLock()
defer r.mu.RUnlock()
source, ok := r.sources[sourceName]
return source, ok
}
func (r *ResourceManager) GetAuthService(authServiceName string) (auth.AuthService, bool) {
r.mu.RLock()
defer r.mu.RUnlock()
authService, ok := r.authServices[authServiceName]
return authService, ok
}
func (r *ResourceManager) GetTool(toolName string) (tools.Tool, bool) {
r.mu.RLock()
defer r.mu.RUnlock()
tool, ok := r.tools[toolName]
return tool, ok
}
func (r *ResourceManager) GetToolset(toolsetName string) (tools.Toolset, bool) {
r.mu.RLock()
defer r.mu.RUnlock()
toolset, ok := r.toolsets[toolsetName]
return toolset, ok
}
func (r *ResourceManager) SetResources(sourcesMap map[string]sources.Source, authServicesMap map[string]auth.AuthService, toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset) {
r.mu.Lock()
defer r.mu.Unlock()
r.sources = sourcesMap
r.authServices = authServicesMap
r.tools = toolsMap
r.toolsets = toolsetsMap
}
func (r *ResourceManager) GetAuthServiceMap() map[string]auth.AuthService {
r.mu.RLock()
defer r.mu.RUnlock()
return r.authServices
}
func (r *ResourceManager) GetToolsMap() map[string]tools.Tool {
r.mu.RLock()
defer r.mu.RUnlock()
return r.tools
}
func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
map[string]sources.Source,
map[string]auth.AuthService,
map[string]tools.Tool,
map[string]tools.Toolset,
error,
) {
ctx = util.WithUserAgent(ctx, cfg.Version)
instrumentation, err := util.InstrumentationFromContext(ctx)
if err != nil {
panic(err)
}
l, err := util.LoggerFromContext(ctx)
if err != nil {
panic(err)
}
// initialize and validate the sources from configs
sourcesMap := make(map[string]sources.Source)
for name, sc := range cfg.SourceConfigs {
s, err := func() (sources.Source, error) {
childCtx, span := instrumentation.Tracer.Start(
ctx,
"toolbox/server/source/init",
trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())),
trace.WithAttributes(attribute.String("source_name", name)),
)
defer span.End()
s, err := sc.Initialize(childCtx, instrumentation.Tracer)
if err != nil {
return nil, fmt.Errorf("unable to initialize source %q: %w", name, err)
}
return s, nil
}()
if err != nil {
return nil, nil, nil, nil, err
}
sourcesMap[name] = s
}
sourceNames := make([]string, 0, len(sourcesMap))
for name := range sourcesMap {
sourceNames = append(sourceNames, name)
}
l.InfoContext(ctx, fmt.Sprintf("Initialized %d sources: %s", len(sourcesMap), strings.Join(sourceNames, ", ")))
// initialize and validate the auth services from configs
authServicesMap := make(map[string]auth.AuthService)
for name, sc := range cfg.AuthServiceConfigs {
a, err := func() (auth.AuthService, error) {
_, span := instrumentation.Tracer.Start(
ctx,
"toolbox/server/auth/init",
trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())),
trace.WithAttributes(attribute.String("auth_name", name)),
)
defer span.End()
a, err := sc.Initialize()
if err != nil {
return nil, fmt.Errorf("unable to initialize auth service %q: %w", name, err)
}
return a, nil
}()
if err != nil {
return nil, nil, nil, nil, err
}
authServicesMap[name] = a
}
authServiceNames := make([]string, 0, len(authServicesMap))
for name := range authServicesMap {
authServiceNames = append(authServiceNames, name)
}
l.InfoContext(ctx, fmt.Sprintf("Initialized %d authServices: %s", len(authServicesMap), strings.Join(authServiceNames, ", ")))
// initialize and validate the tools from configs
toolsMap := make(map[string]tools.Tool)
for name, tc := range cfg.ToolConfigs {
t, err := func() (tools.Tool, error) {
_, span := instrumentation.Tracer.Start(
ctx,
"toolbox/server/tool/init",
trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())),
trace.WithAttributes(attribute.String("tool_name", name)),
)
defer span.End()
t, err := tc.Initialize(sourcesMap)
if err != nil {
return nil, fmt.Errorf("unable to initialize tool %q: %w", name, err)
}
return t, nil
}()
if err != nil {
return nil, nil, nil, nil, err
}
toolsMap[name] = t
}
toolNames := make([]string, 0, len(toolsMap))
for name := range toolsMap {
toolNames = append(toolNames, name)
}
l.InfoContext(ctx, fmt.Sprintf("Initialized %d tools: %s", len(toolsMap), strings.Join(toolNames, ", ")))
// create a default toolset that contains all tools
allToolNames := make([]string, 0, len(toolsMap))
for name := range toolsMap {
allToolNames = append(allToolNames, name)
}
if cfg.ToolsetConfigs == nil {
cfg.ToolsetConfigs = make(ToolsetConfigs)
}
cfg.ToolsetConfigs[""] = tools.ToolsetConfig{Name: "", ToolNames: allToolNames}
// initialize and validate the toolsets from configs
toolsetsMap := make(map[string]tools.Toolset)
for name, tc := range cfg.ToolsetConfigs {
t, err := func() (tools.Toolset, error) {
_, span := instrumentation.Tracer.Start(
ctx,
"toolbox/server/toolset/init",
trace.WithAttributes(attribute.String("toolset_name", name)),
)
defer span.End()
t, err := tc.Initialize(cfg.Version, toolsMap)
if err != nil {
return tools.Toolset{}, fmt.Errorf("unable to initialize toolset %q: %w", name, err)
}
return t, err
}()
if err != nil {
return nil, nil, nil, nil, err
}
toolsetsMap[name] = t
}
toolsetNames := make([]string, 0, len(toolsetsMap))
for name := range toolsetsMap {
if name == "" {
toolsetNames = append(toolsetNames, "default")
} else {
toolsetNames = append(toolsetNames, name)
}
}
l.InfoContext(ctx, fmt.Sprintf("Initialized %d toolsets: %s", len(toolsetsMap), strings.Join(toolsetNames, ", ")))
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
}
// NewServer returns a Server object based on provided Config.
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
instrumentation, err := util.InstrumentationFromContext(ctx)
if err != nil {
return nil, err
}
ctx, span := instrumentation.Tracer.Start(ctx, "toolbox/server/init")
defer span.End()
l, err := util.LoggerFromContext(ctx)
if err != nil {
return nil, err
}
// set up http serving
r := chi.NewRouter()
r.Use(middleware.Recoverer)
// logging
logLevel, err := log.SeverityToLevel(cfg.LogLevel.String())
if err != nil {
return nil, fmt.Errorf("unable to initialize http log: %w", err)
}
var httpOpts httplog.Options
switch cfg.LoggingFormat.String() {
case "json":
httpOpts = httplog.Options{
JSON: true,
LogLevel: logLevel,
Concise: true,
RequestHeaders: false,
MessageFieldName: "message",
SourceFieldName: "logging.googleapis.com/sourceLocation",
TimeFieldName: "timestamp",
LevelFieldName: "severity",
}
case "standard":
httpOpts = httplog.Options{
LogLevel: logLevel,
Concise: true,
RequestHeaders: false,
MessageFieldName: "message",
}
default:
return nil, fmt.Errorf("invalid Logging format: %q", cfg.LoggingFormat.String())
}
httpLogger := httplog.NewLogger("httplog", httpOpts)
r.Use(httplog.RequestLogger(httpLogger))
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := InitializeConfigs(ctx, cfg)
if err != nil {
return nil, fmt.Errorf("unable to initialize configs: %w", err)
}
addr := net.JoinHostPort(cfg.Address, strconv.Itoa(cfg.Port))
srv := &http.Server{Addr: addr, Handler: r}
sseManager := newSseManager(ctx)
resourceManager := NewResourceManager(sourcesMap, authServicesMap, toolsMap, toolsetsMap)
s := &Server{
version: cfg.Version,
srv: srv,
root: r,
logger: l,
instrumentation: instrumentation,
sseManager: sseManager,
ResourceMgr: resourceManager,
}
// control plane
apiR, err := apiRouter(s)
if err != nil {
return nil, err
}
r.Mount("/api", apiR)
mcpR, err := mcpRouter(s)
if err != nil {
return nil, err
}
r.Mount("/mcp", mcpR)
if cfg.UI {
webR, err := webRouter()
if err != nil {
return nil, err
}
r.Mount("/ui", webR)
}
// default endpoint for validating server is running
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("🧰 Hello, World! 🧰"))
})
return s, nil
}
// Listen starts a listener for the given Server instance.
func (s *Server) Listen(ctx context.Context) error {
ctx, cancel := context.WithCancel(ctx)
defer cancel()
if s.listener != nil {
return fmt.Errorf("server is already listening: %s", s.listener.Addr().String())
}
lc := net.ListenConfig{KeepAlive: 30 * time.Second}
var err error
if s.listener, err = lc.Listen(ctx, "tcp", s.srv.Addr); err != nil {
return fmt.Errorf("failed to open listener for %q: %w", s.srv.Addr, err)
}
s.logger.DebugContext(ctx, fmt.Sprintf("server listening on %s", s.srv.Addr))
return nil
}
// Serve starts an HTTP server for the given Server instance.
func (s *Server) Serve(ctx context.Context) error {
s.logger.DebugContext(ctx, "Starting a HTTP server.")
return s.srv.Serve(s.listener)
}
// ServeStdio starts a new stdio session for mcp.
func (s *Server) ServeStdio(ctx context.Context, stdin io.Reader, stdout io.Writer) error {
stdioServer := NewStdioSession(s, stdin, stdout)
return stdioServer.Start(ctx)
}
// Shutdown gracefully shuts down the server without interrupting any active
// connections. It uses http.Server.Shutdown() and has the same functionality.
func (s *Server) Shutdown(ctx context.Context) error {
s.logger.DebugContext(ctx, "shutting down the server.")
return s.srv.Shutdown(ctx)
}
```
--------------------------------------------------------------------------------
/.hugo/static/js/w3.js:
--------------------------------------------------------------------------------
```javascript
/* W3.JS 1.04 April 2019 by w3schools.com */
"use strict";
var w3 = {};
w3.hide = function (sel) {
w3.hideElements(w3.getElements(sel));
};
w3.hideElements = function (elements) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.hideElement(elements[i]);
}
};
w3.hideElement = function (element) {
w3.styleElement(element, "display", "none");
};
w3.show = function (sel, a) {
var elements = w3.getElements(sel);
if (a) {w3.hideElements(elements);}
w3.showElements(elements);
};
w3.showElements = function (elements) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.showElement(elements[i]);
}
};
w3.showElement = function (element) {
w3.styleElement(element, "display", "block");
};
w3.addStyle = function (sel, prop, val) {
w3.styleElements(w3.getElements(sel), prop, val);
};
w3.styleElements = function (elements, prop, val) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.styleElement(elements[i], prop, val);
}
};
w3.styleElement = function (element, prop, val) {
element.style.setProperty(prop, val);
};
w3.toggleShow = function (sel) {
var i, x = w3.getElements(sel), l = x.length;
for (i = 0; i < l; i++) {
if (x[i].style.display == "none") {
w3.styleElement(x[i], "display", "block");
} else {
w3.styleElement(x[i], "display", "none");
}
}
};
w3.addClass = function (sel, name) {
w3.addClassElements(w3.getElements(sel), name);
};
w3.addClassElements = function (elements, name) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.addClassElement(elements[i], name);
}
};
w3.addClassElement = function (element, name) {
var i, arr1, arr2;
arr1 = element.className.split(" ");
arr2 = name.split(" ");
for (i = 0; i < arr2.length; i++) {
if (arr1.indexOf(arr2[i]) == -1) {element.className += " " + arr2[i];}
}
};
w3.removeClass = function (sel, name) {
w3.removeClassElements(w3.getElements(sel), name);
};
w3.removeClassElements = function (elements, name) {
var i, l = elements.length, arr1, arr2, j;
for (i = 0; i < l; i++) {
w3.removeClassElement(elements[i], name);
}
};
w3.removeClassElement = function (element, name) {
var i, arr1, arr2;
arr1 = element.className.split(" ");
arr2 = name.split(" ");
for (i = 0; i < arr2.length; i++) {
while (arr1.indexOf(arr2[i]) > -1) {
arr1.splice(arr1.indexOf(arr2[i]), 1);
}
}
element.className = arr1.join(" ");
};
w3.toggleClass = function (sel, c1, c2) {
w3.toggleClassElements(w3.getElements(sel), c1, c2);
};
w3.toggleClassElements = function (elements, c1, c2) {
var i, l = elements.length;
for (i = 0; i < l; i++) {
w3.toggleClassElement(elements[i], c1, c2);
}
};
w3.toggleClassElement = function (element, c1, c2) {
var t1, t2, t1Arr, t2Arr, j, arr, allPresent;
t1 = (c1 || "");
t2 = (c2 || "");
t1Arr = t1.split(" ");
t2Arr = t2.split(" ");
arr = element.className.split(" ");
if (t2Arr.length == 0) {
allPresent = true;
for (j = 0; j < t1Arr.length; j++) {
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
}
if (allPresent) {
w3.removeClassElement(element, t1);
} else {
w3.addClassElement(element, t1);
}
} else {
allPresent = true;
for (j = 0; j < t1Arr.length; j++) {
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
}
if (allPresent) {
w3.removeClassElement(element, t1);
w3.addClassElement(element, t2);
} else {
w3.removeClassElement(element, t2);
w3.addClassElement(element, t1);
}
}
};
w3.getElements = function (id) {
if (typeof id == "object") {
return [id];
} else {
return document.querySelectorAll(id);
}
};
w3.filterHTML = function(id, sel, filter) {
var a, b, c, i, ii, iii, hit;
a = w3.getElements(id);
for (i = 0; i < a.length; i++) {
b = a[i].querySelectorAll(sel);
for (ii = 0; ii < b.length; ii++) {
hit = 0;
if (b[ii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
hit = 1;
}
c = b[ii].getElementsByTagName("*");
for (iii = 0; iii < c.length; iii++) {
if (c[iii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
hit = 1;
}
}
if (hit == 1) {
b[ii].style.display = "";
} else {
b[ii].style.display = "none";
}
}
}
};
w3.sortHTML = function(id, sel, sortvalue) {
var a, b, i, ii, y, bytt, v1, v2, cc, j;
a = w3.getElements(id);
for (i = 0; i < a.length; i++) {
for (j = 0; j < 2; j++) {
cc = 0;
y = 1;
while (y == 1) {
y = 0;
b = a[i].querySelectorAll(sel);
for (ii = 0; ii < (b.length - 1); ii++) {
bytt = 0;
if (sortvalue) {
v1 = b[ii].querySelector(sortvalue).innerText;
v2 = b[ii + 1].querySelector(sortvalue).innerText;
} else {
v1 = b[ii].innerText;
v2 = b[ii + 1].innerText;
}
v1 = v1.toLowerCase();
v2 = v2.toLowerCase();
if ((j == 0 && (v1 > v2)) || (j == 1 && (v1 < v2))) {
bytt = 1;
break;
}
}
if (bytt == 1) {
b[ii].parentNode.insertBefore(b[ii + 1], b[ii]);
y = 1;
cc++;
}
}
if (cc > 0) {break;}
}
}
};
w3.slideshow = function (sel, ms, func) {
var i, ss, x = w3.getElements(sel), l = x.length;
ss = {};
ss.current = 1;
ss.x = x;
ss.ondisplaychange = func;
if (!isNaN(ms) || ms == 0) {
ss.milliseconds = ms;
} else {
ss.milliseconds = 1000;
}
ss.start = function() {
ss.display(ss.current)
if (ss.ondisplaychange) {ss.ondisplaychange();}
if (ss.milliseconds > 0) {
window.clearTimeout(ss.timeout);
ss.timeout = window.setTimeout(ss.next, ss.milliseconds);
}
};
ss.next = function() {
ss.current += 1;
if (ss.current > ss.x.length) {ss.current = 1;}
ss.start();
};
ss.previous = function() {
ss.current -= 1;
if (ss.current < 1) {ss.current = ss.x.length;}
ss.start();
};
ss.display = function (n) {
w3.styleElements(ss.x, "display", "none");
w3.styleElement(ss.x[n - 1], "display", "block");
}
ss.start();
return ss;
};
w3.includeHTML = function(cb) {
var z, i, elmnt, file, xhttp;
z = document.getElementsByTagName("*");
for (i = 0; i < z.length; i++) {
elmnt = z[i];
file = elmnt.getAttribute("w3-include-html");
if (file) {
xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4) {
if (this.status == 200) {elmnt.innerHTML = this.responseText;}
if (this.status == 404) {
if (elmnt.getAttribute("w3-include-html-default")) {
elmnt.innerHTML = elmnt.getAttribute("w3-include-html-default");
}
else { elmnt.innerHTML = "Page not found."; }
}
elmnt.removeAttribute("w3-include-html");
w3.includeHTML(cb);
}
}
xhttp.open("GET", file, true);
xhttp.send();
return;
}
}
if (cb) cb();
};
w3.getHttpData = function (file, func) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
func(this.responseText);
}
});
};
w3.getHttpObject = function (file, func) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
func(JSON.parse(this.responseText));
}
});
};
w3.displayHttp = function (id, file) {
w3.http(file, function () {
if (this.readyState == 4 && this.status == 200) {
w3.displayObject(id, JSON.parse(this.responseText));
}
});
};
w3.http = function (target, readyfunc, xml, method) {
var httpObj;
if (!method) {method = "GET"; }
if (window.XMLHttpRequest) {
httpObj = new XMLHttpRequest();
} else if (window.ActiveXObject) {
httpObj = new ActiveXObject("Microsoft.XMLHTTP");
}
if (httpObj) {
if (readyfunc) {httpObj.onreadystatechange = readyfunc;}
httpObj.open(method, target, true);
httpObj.send(xml);
}
};
w3.getElementsByAttribute = function (x, att) {
var arr = [], arrCount = -1, i, l, y = x.getElementsByTagName("*"), z = att.toUpperCase();
l = y.length;
for (i = -1; i < l; i += 1) {
if (i == -1) {y[i] = x;}
if (y[i].getAttribute(z) !== null) {arrCount += 1; arr[arrCount] = y[i];}
}
return arr;
};
w3.dataObject = {},
w3.displayObject = function (id, data) {
var htmlObj, htmlTemplate, html, arr = [], a, l, rowClone, x, j, i, ii, cc, repeat, repeatObj, repeatX = "";
htmlObj = document.getElementById(id);
htmlTemplate = init_template(id, htmlObj);
html = htmlTemplate.cloneNode(true);
arr = w3.getElementsByAttribute(html, "w3-repeat");
l = arr.length;
for (j = (l - 1); j >= 0; j -= 1) {
cc = arr[j].getAttribute("w3-repeat").split(" ");
if (cc.length == 1) {
repeat = cc[0];
} else {
repeatX = cc[0];
repeat = cc[2];
}
arr[j].removeAttribute("w3-repeat");
repeatObj = data[repeat];
if (repeatObj && typeof repeatObj == "object" && repeatObj.length != "undefined") {
i = 0;
for (x in repeatObj) {
i += 1;
rowClone = arr[j];
rowClone = w3_replace_curly(rowClone, "element", repeatX, repeatObj[x]);
a = rowClone.attributes;
for (ii = 0; ii < a.length; ii += 1) {
a[ii].value = w3_replace_curly(a[ii], "attribute", repeatX, repeatObj[x]).value;
}
(i === repeatObj.length) ? arr[j].parentNode.replaceChild(rowClone, arr[j]) : arr[j].parentNode.insertBefore(rowClone, arr[j]);
}
} else {
console.log("w3-repeat must be an array. " + repeat + " is not an array.");
continue;
}
}
html = w3_replace_curly(html, "element");
htmlObj.parentNode.replaceChild(html, htmlObj);
function init_template(id, obj) {
var template;
template = obj.cloneNode(true);
if (w3.dataObject.hasOwnProperty(id)) {return w3.dataObject[id];}
w3.dataObject[id] = template;
return template;
}
function w3_replace_curly(elmnt, typ, repeatX, x) {
var value, rowClone, pos1, pos2, originalHTML, lookFor, lookForARR = [], i, cc, r;
rowClone = elmnt.cloneNode(true);
pos1 = 0;
while (pos1 > -1) {
originalHTML = (typ == "attribute") ? rowClone.value : rowClone.innerHTML;
pos1 = originalHTML.indexOf("{{", pos1);
if (pos1 === -1) {break;}
pos2 = originalHTML.indexOf("}}", pos1 + 1);
lookFor = originalHTML.substring(pos1 + 2, pos2);
lookForARR = lookFor.split("||");
value = undefined;
for (i = 0; i < lookForARR.length; i += 1) {
lookForARR[i] = lookForARR[i].replace(/^\s+|\s+$/gm, ''); //trim
if (x) {value = x[lookForARR[i]];}
if (value == undefined && data) {value = data[lookForARR[i]];}
if (value == undefined) {
cc = lookForARR[i].split(".");
if (cc[0] == repeatX) {value = x[cc[1]]; }
}
if (value == undefined) {
if (lookForARR[i] == repeatX) {value = x;}
}
if (value == undefined) {
if (lookForARR[i].substr(0, 1) == '"') {
value = lookForARR[i].replace(/"/g, "");
} else if (lookForARR[i].substr(0,1) == "'") {
value = lookForARR[i].replace(/'/g, "");
}
}
if (value != undefined) {break;}
}
if (value != undefined) {
r = "{{" + lookFor + "}}";
if (typ == "attribute") {
rowClone.value = rowClone.value.replace(r, value);
} else {
w3_replace_html(rowClone, r, value);
}
}
pos1 = pos1 + 1;
}
return rowClone;
}
function w3_replace_html(a, r, result) {
var b, l, i, a, x, j;
if (a.hasAttributes()) {
b = a.attributes;
l = b.length;
for (i = 0; i < l; i += 1) {
if (b[i].value.indexOf(r) > -1) {b[i].value = b[i].value.replace(r, result);}
}
}
x = a.getElementsByTagName("*");
l = x.length;
a.innerHTML = a.innerHTML.replace(r, result);
}
};
```
--------------------------------------------------------------------------------
/internal/tools/neo4j/neo4jschema/helpers/helpers.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package helpers provides utility functions for transforming and processing Neo4j
// schema data. It includes functions for converting raw query results from both
// APOC and native Cypher queries into a standardized, structured format.
package helpers
import (
"fmt"
"sort"
"github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types"
"github.com/neo4j/neo4j-go-driver/v5/neo4j"
)
// ConvertToStringSlice converts a slice of any type to a slice of strings.
// It uses fmt.Sprintf to perform the conversion for each element.
// Example:
//
// input: []any{"user", 123, true}
// output: []string{"user", "123", "true"}
func ConvertToStringSlice(slice []any) []string {
result := make([]string, len(slice))
for i, v := range slice {
result[i] = fmt.Sprintf("%v", v)
}
return result
}
// GetStringValue safely converts any value to its string representation.
// If the input value is nil, it returns an empty string.
func GetStringValue(val any) string {
if val == nil {
return ""
}
return fmt.Sprintf("%v", val)
}
// MapToAPOCSchema converts a raw map from a Cypher query into a structured
// APOCSchemaResult. This is a workaround for database drivers that may return
// complex nested structures as `map[string]any` instead of unmarshalling
// directly into a struct. It achieves this by marshalling the map to YAML and
// then unmarshalling into the target struct.
func MapToAPOCSchema(schemaMap map[string]any) (*types.APOCSchemaResult, error) {
schemaBytes, err := yaml.Marshal(schemaMap)
if err != nil {
return nil, fmt.Errorf("failed to marshal schema map: %w", err)
}
var entities map[string]types.APOCEntity
if err = yaml.Unmarshal(schemaBytes, &entities); err != nil {
return nil, fmt.Errorf("failed to unmarshal schema map into entities: %w", err)
}
return &types.APOCSchemaResult{Value: entities}, nil
}
// ProcessAPOCSchema transforms the nested result from the `apoc.meta.schema()`
// procedure into flat lists of node labels and relationships, along with
// aggregated database statistics. It iterates through entities, processes nodes,
// and extracts outgoing relationship information nested within those nodes.
func ProcessAPOCSchema(apocSchema *types.APOCSchemaResult) ([]types.NodeLabel, []types.Relationship, *types.Statistics) {
var nodeLabels []types.NodeLabel
relMap := make(map[string]*types.Relationship)
stats := &types.Statistics{
NodesByLabel: make(map[string]int64),
RelationshipsByType: make(map[string]int64),
PropertiesByLabel: make(map[string]int64),
PropertiesByRelType: make(map[string]int64),
}
for name, entity := range apocSchema.Value {
// We only process top-level entities of type "node". Relationship info is
// derived from the "relationships" field within each node entity.
if entity.Type != "node" {
continue
}
nodeLabel := types.NodeLabel{
Name: name,
Count: entity.Count,
Properties: extractAPOCProperties(entity.Properties),
}
nodeLabels = append(nodeLabels, nodeLabel)
// Aggregate statistics for the node.
stats.NodesByLabel[name] = entity.Count
stats.TotalNodes += entity.Count
propCount := int64(len(nodeLabel.Properties))
stats.PropertiesByLabel[name] = propCount
stats.TotalProperties += propCount * entity.Count
// Extract relationship information from the node.
for relName, relInfo := range entity.Relationships {
// Only process outgoing relationships to avoid double-counting.
if relInfo.Direction != "out" {
continue
}
rel, exists := relMap[relName]
if !exists {
rel = &types.Relationship{
Type: relName,
Properties: extractAPOCProperties(relInfo.Properties),
}
if len(relInfo.Labels) > 0 {
rel.EndNode = relInfo.Labels[0]
}
rel.StartNode = name
relMap[relName] = rel
}
rel.Count += relInfo.Count
}
}
// Consolidate the relationships from the map into a slice and update stats.
relationships := make([]types.Relationship, 0, len(relMap))
for _, rel := range relMap {
relationships = append(relationships, *rel)
stats.RelationshipsByType[rel.Type] = rel.Count
stats.TotalRelationships += rel.Count
propCount := int64(len(rel.Properties))
stats.PropertiesByRelType[rel.Type] = propCount
stats.TotalProperties += propCount * rel.Count
}
sortAndClean(nodeLabels, relationships, stats)
// Set empty maps and lists to nil for cleaner output.
if len(nodeLabels) == 0 {
nodeLabels = nil
}
if len(relationships) == 0 {
relationships = nil
}
return nodeLabels, relationships, stats
}
// ProcessNonAPOCSchema serves as an alternative to ProcessAPOCSchema for environments
// where APOC procedures are not available. It converts schema data gathered from
// multiple separate, native Cypher queries (providing node counts, property maps, etc.)
// into the same standardized, structured format.
func ProcessNonAPOCSchema(
nodeCounts map[string]int64,
nodePropsMap map[string]map[string]map[string]bool,
relCounts map[string]int64,
relPropsMap map[string]map[string]map[string]bool,
relConnectivity map[string]types.RelConnectivityInfo,
) ([]types.NodeLabel, []types.Relationship, *types.Statistics) {
stats := &types.Statistics{
NodesByLabel: make(map[string]int64),
RelationshipsByType: make(map[string]int64),
PropertiesByLabel: make(map[string]int64),
PropertiesByRelType: make(map[string]int64),
}
// Process node information.
nodeLabels := make([]types.NodeLabel, 0, len(nodeCounts))
for label, count := range nodeCounts {
properties := make([]types.PropertyInfo, 0)
if props, ok := nodePropsMap[label]; ok {
for key, typeSet := range props {
typeList := make([]string, 0, len(typeSet))
for tp := range typeSet {
typeList = append(typeList, tp)
}
sort.Strings(typeList)
properties = append(properties, types.PropertyInfo{Name: key, Types: typeList})
}
}
sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name })
nodeLabels = append(nodeLabels, types.NodeLabel{Name: label, Count: count, Properties: properties})
// Aggregate node statistics.
stats.NodesByLabel[label] = count
stats.TotalNodes += count
propCount := int64(len(properties))
stats.PropertiesByLabel[label] = propCount
stats.TotalProperties += propCount * count
}
// Process relationship information.
relationships := make([]types.Relationship, 0, len(relCounts))
for relType, count := range relCounts {
properties := make([]types.PropertyInfo, 0)
if props, ok := relPropsMap[relType]; ok {
for key, typeSet := range props {
typeList := make([]string, 0, len(typeSet))
for tp := range typeSet {
typeList = append(typeList, tp)
}
sort.Strings(typeList)
properties = append(properties, types.PropertyInfo{Name: key, Types: typeList})
}
}
sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name })
conn := relConnectivity[relType]
relationships = append(relationships, types.Relationship{
Type: relType,
Count: count,
StartNode: conn.StartNode,
EndNode: conn.EndNode,
Properties: properties,
})
// Aggregate relationship statistics.
stats.RelationshipsByType[relType] = count
stats.TotalRelationships += count
propCount := int64(len(properties))
stats.PropertiesByRelType[relType] = propCount
stats.TotalProperties += propCount * count
}
sortAndClean(nodeLabels, relationships, stats)
// Set empty maps and lists to nil for cleaner output.
if len(nodeLabels) == 0 {
nodeLabels = nil
}
if len(relationships) == 0 {
relationships = nil
}
return nodeLabels, relationships, stats
}
// extractAPOCProperties is a helper that converts a map of APOC property
// information into a slice of standardized PropertyInfo structs. The resulting
// slice is sorted by property name for consistent ordering.
func extractAPOCProperties(props map[string]types.APOCProperty) []types.PropertyInfo {
properties := make([]types.PropertyInfo, 0, len(props))
for name, info := range props {
properties = append(properties, types.PropertyInfo{
Name: name,
Types: []string{info.Type},
Indexed: info.Indexed,
Unique: info.Unique,
Mandatory: info.Existence,
})
}
sort.Slice(properties, func(i, j int) bool {
return properties[i].Name < properties[j].Name
})
return properties
}
// sortAndClean performs final processing on the schema data. It sorts node and
// relationship slices for consistent output, primarily by count (descending) and
// secondarily by name/type. It also sets any empty maps in the statistics
// struct to nil, which can simplify downstream serialization (e.g., omitting
// empty fields in JSON).
func sortAndClean(nodeLabels []types.NodeLabel, relationships []types.Relationship, stats *types.Statistics) {
// Sort nodes by count (desc) then name (asc).
sort.Slice(nodeLabels, func(i, j int) bool {
if nodeLabels[i].Count != nodeLabels[j].Count {
return nodeLabels[i].Count > nodeLabels[j].Count
}
return nodeLabels[i].Name < nodeLabels[j].Name
})
// Sort relationships by count (desc) then type (asc).
sort.Slice(relationships, func(i, j int) bool {
if relationships[i].Count != relationships[j].Count {
return relationships[i].Count > relationships[j].Count
}
return relationships[i].Type < relationships[j].Type
})
// Nil out empty maps for cleaner output.
if len(stats.NodesByLabel) == 0 {
stats.NodesByLabel = nil
}
if len(stats.RelationshipsByType) == 0 {
stats.RelationshipsByType = nil
}
if len(stats.PropertiesByLabel) == 0 {
stats.PropertiesByLabel = nil
}
if len(stats.PropertiesByRelType) == 0 {
stats.PropertiesByRelType = nil
}
}
// ConvertValue converts Neo4j value to JSON-compatible value.
func ConvertValue(value any) any {
switch v := value.(type) {
case nil, neo4j.InvalidValue:
return nil
case bool, string, int, int8, int16, int32, int64, float32, float64:
return v
case neo4j.Date, neo4j.LocalTime, neo4j.Time,
neo4j.LocalDateTime, neo4j.Duration:
if iv, ok := v.(types.ValueType); ok {
return iv.String()
}
case neo4j.Node:
return map[string]any{
"elementId": v.GetElementId(),
"labels": v.Labels,
"properties": ConvertValue(v.GetProperties()),
}
case neo4j.Relationship:
return map[string]any{
"elementId": v.GetElementId(),
"type": v.Type,
"startElementId": v.StartElementId,
"endElementId": v.EndElementId,
"properties": ConvertValue(v.GetProperties()),
}
case neo4j.Entity:
return map[string]any{
"elementId": v.GetElementId(),
"properties": ConvertValue(v.GetProperties()),
}
case neo4j.Path:
var nodes []any
var relationships []any
for _, r := range v.Relationships {
relationships = append(relationships, ConvertValue(r))
}
for _, n := range v.Nodes {
nodes = append(nodes, ConvertValue(n))
}
return map[string]any{
"nodes": nodes,
"relationships": relationships,
}
case neo4j.Record:
m := make(map[string]any)
for i, key := range v.Keys {
m[key] = ConvertValue(v.Values[i])
}
return m
case neo4j.Point2D:
return map[string]any{"x": v.X, "y": v.Y, "srid": v.SpatialRefId}
case neo4j.Point3D:
return map[string]any{"x": v.X, "y": v.Y, "z": v.Z, "srid": v.SpatialRefId}
case []any:
arr := make([]any, len(v))
for i, elem := range v {
arr[i] = ConvertValue(elem)
}
return arr
case map[string]any:
m := make(map[string]any)
for key, val := range v {
m[key] = ConvertValue(val)
}
return m
}
return fmt.Sprintf("%v", value)
}
```
--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/spanner-postgres.yaml:
--------------------------------------------------------------------------------
```yaml
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
spanner-source:
kind: "spanner"
project: ${SPANNER_PROJECT}
instance: ${SPANNER_INSTANCE}
database: ${SPANNER_DATABASE}
dialect: "postgresql"
tools:
execute_sql:
kind: spanner-execute-sql
source: spanner-source
description: Use this tool to execute DML SQL. Please use the PostgreSQL interface for Spanner.
execute_sql_dql:
kind: spanner-execute-sql
source: spanner-source
description: Use this tool to execute DQL SQL. Please use the PostgreSQL interface for Spanner.
readOnly: true
list_tables:
kind: spanner-sql
source: spanner-source
readOnly: true
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
statement: |
WITH table_info_cte AS (
SELECT
T.TABLE_SCHEMA,
T.TABLE_NAME,
T.TABLE_TYPE,
T.PARENT_TABLE_NAME,
T.ON_DELETE_ACTION
FROM INFORMATION_SCHEMA.TABLES AS T
WHERE
T.TABLE_SCHEMA = 'public'
AND T.TABLE_TYPE = 'BASE TABLE'
AND (
NULLIF(TRIM($1), '') IS NULL OR
T.TABLE_NAME IN (
SELECT table_name
FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
)
),
columns_info_cte AS (
SELECT
C.TABLE_SCHEMA,
C.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
'"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
'"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
'"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
'"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
'}'
) ORDER BY C.ORDINAL_POSITION
) AS columns_json_array_elements
FROM INFORMATION_SCHEMA.COLUMNS AS C
WHERE C.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
),
constraint_columns_agg_cte AS (
SELECT
CONSTRAINT_CATALOG,
CONSTRAINT_SCHEMA,
CONSTRAINT_NAME,
ARRAY_AGG('"' || REPLACE(COLUMN_NAME, '"', '\"') || '"' ORDER BY ORDINAL_POSITION) AS column_names_json_list
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
WHERE CONSTRAINT_SCHEMA = 'public'
GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
),
constraints_info_cte AS (
SELECT
TC.TABLE_SCHEMA,
TC.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
'"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
'"constraint_definition":',
CASE TC.CONSTRAINT_TYPE
WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
ELSE 'null'
END, ',',
'"constraint_columns":[', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '],',
'"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
'"foreign_key_referenced_columns":[', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), ']',
'}'
) ORDER BY TC.CONSTRAINT_NAME
) AS constraints_json_array_elements
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
LEFT JOIN constraint_columns_agg_cte AS KeyCols
ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
WHERE TC.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
),
index_key_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(
CONCAT(
'{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
'"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
) ORDER BY ORDINAL_POSITION
) AS key_column_json_details
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NOT NULL
AND TABLE_SCHEMA = 'public'
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
index_storing_columns_agg_cte AS (
SELECT
TABLE_CATALOG,
TABLE_SCHEMA,
TABLE_NAME,
INDEX_NAME,
ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
WHERE ORDINAL_POSITION IS NULL
AND TABLE_SCHEMA = 'public'
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
),
indexes_info_cte AS (
SELECT
I.TABLE_SCHEMA,
I.TABLE_NAME,
ARRAY_AGG(
CONCAT(
'{',
'"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
'"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
'"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
'"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
'"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
'"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
'"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
'}'
) ORDER BY I.INDEX_NAME
) AS indexes_json_array_elements
FROM INFORMATION_SCHEMA.INDEXES AS I
LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
WHERE I.TABLE_SCHEMA = 'public'
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
)
SELECT
TI.TABLE_SCHEMA AS schema_name,
TI.TABLE_NAME AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}')
ELSE
CONCAT(
'{',
'"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
'"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
'"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
'"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
'"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
'"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
'}'
)
END AS object_details
FROM table_info_cte AS TI
LEFT JOIN columns_info_cte AS CI
ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
LEFT JOIN constraints_info_cte AS CONSI
ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
LEFT JOIN indexes_info_cte AS II
ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME;
parameters:
- name: table_names
type: string
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
default: ""
- name: output_format
type: string
description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema."
default: "detailed"
toolsets:
spanner_postgres_database_tools:
- execute_sql
- execute_sql_dql
- list_tables
```
--------------------------------------------------------------------------------
/tests/bigtable/bigtable_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bigtable
import (
"bytes"
"context"
"encoding/binary"
"fmt"
"log"
"os"
"regexp"
"slices"
"strings"
"testing"
"time"
"cloud.google.com/go/bigtable"
"github.com/google/uuid"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/internal/tools"
"github.com/googleapis/genai-toolbox/tests"
)
var (
BigtableSourceKind = "bigtable"
BigtableToolKind = "bigtable-sql"
BigtableProject = os.Getenv("BIGTABLE_PROJECT")
BigtableInstance = os.Getenv("BIGTABLE_INSTANCE")
)
func getBigtableVars(t *testing.T) map[string]any {
switch "" {
case BigtableProject:
t.Fatal("'BIGTABLE_PROJECT' not set")
case BigtableInstance:
t.Fatal("'BIGTABLE_INSTANCE' not set")
}
return map[string]any{
"kind": BigtableSourceKind,
"project": BigtableProject,
"instance": BigtableInstance,
}
}
type TestRow struct {
RowKey string
ColumnName string
Data []byte
}
func TestBigtableToolEndpoints(t *testing.T) {
sourceConfig := getBigtableVars(t)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
var args []string
tableName := "param_table" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
tableNameTemplateParam := "tmpl_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
columnFamilyName := "cf"
muts, rowKeys := getTestData(columnFamilyName)
// Do not change the shape of statement without checking tests/common_test.go.
// The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251
paramTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE TO_INT64(cf['id']) = @id OR CAST(cf['name'] AS string) = @name;", tableName)
idParamTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE TO_INT64(cf['id']) = @id;", tableName)
nameParamTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE CAST(cf['name'] AS string) = @name;", tableName)
arrayTestStatement := fmt.Sprintf(
"SELECT TO_INT64(cf['id']) AS id, CAST(cf['name'] AS string) AS name FROM %s WHERE TO_INT64(cf['id']) IN UNNEST(@idArray) AND CAST(cf['name'] AS string) IN UNNEST(@nameArray);",
tableName,
)
teardownTable1 := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableName, columnFamilyName, muts, rowKeys)
defer teardownTable1(t)
// Do not change the shape of statement without checking tests/common_test.go.
// The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251
authToolStatement := fmt.Sprintf("SELECT CAST(cf['name'] AS string) as name FROM %s WHERE CAST(cf['email'] AS string) = @email;", tableNameAuth)
teardownTable2 := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableNameAuth, columnFamilyName, muts, rowKeys)
defer teardownTable2(t)
mutsTmpl, rowKeysTmpl := getTestDataTemplateParam(columnFamilyName)
teardownTableTmpl := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableNameTemplateParam, columnFamilyName, mutsTmpl, rowKeysTmpl)
defer teardownTableTmpl(t)
// Write config into a file and pass it to command
toolsFile := tests.GetToolsConfig(sourceConfig, BigtableToolKind, paramTestStatement, idParamTestStatement, nameParamTestStatement, arrayTestStatement, authToolStatement)
toolsFile = addTemplateParamConfig(t, toolsFile)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
// Get configs for tests
// Actual test parameters are set in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L250
select1Want := "[{\"$col1\":1}]"
myToolById4Want := `[{"id":4,"name":""}]`
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to prepare statement: rpc error: code = InvalidArgument desc = Syntax error: Unexpected identifier \"SELEC\" [at 1:1]"}],"isError":true}}`
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"$col1\":1}"}]}}`
nameFieldArray := `["CAST(cf['name'] AS string) as name"]`
nameColFilter := "CAST(cf['name'] AS string)"
// Run tests
tests.RunToolGetTest(t)
tests.RunToolInvokeTest(t, select1Want,
tests.WithMyToolById4Want(myToolById4Want),
)
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam,
tests.WithNameFieldArray(nameFieldArray),
tests.WithNameColFilter(nameColFilter),
tests.DisableDdlTest(),
tests.DisableInsertTest(),
)
}
func convertToBytes(v int) []byte {
binary1 := new(bytes.Buffer)
if err := binary.Write(binary1, binary.BigEndian, int64(v)); err != nil {
log.Fatalf("Unable to encode id: %v", err)
}
return binary1.Bytes()
}
func getTestData(columnFamilyName string) ([]*bigtable.Mutation, []string) {
muts := []*bigtable.Mutation{}
rowKeys := []string{}
var ids [4][]byte
for i := range ids {
ids[i] = convertToBytes(i + 1)
}
now := bigtable.Time(time.Now())
for rowKey, mutData := range map[string]map[string][]byte{
// Do not change the test data without checking tests/common_test.go.
// The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251
// Expected values are defined in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L229-L310
"row-01": {
"name": []byte("Alice"),
"email": []byte(tests.ServiceAccountEmail),
"id": ids[0],
},
"row-02": {
"name": []byte("Jane"),
"email": []byte("[email protected]"),
"id": ids[1],
},
"row-03": {
"name": []byte("Sid"),
"id": ids[2],
},
"row-04": {
"name": nil,
"id": ids[3],
},
} {
mut := bigtable.NewMutation()
for col, v := range mutData {
mut.Set(columnFamilyName, col, now, v)
}
muts = append(muts, mut)
rowKeys = append(rowKeys, rowKey)
}
return muts, rowKeys
}
func getTestDataTemplateParam(columnFamilyName string) ([]*bigtable.Mutation, []string) {
muts := []*bigtable.Mutation{}
rowKeys := []string{}
var ids [2][]byte
for i := range ids {
ids[i] = convertToBytes(i + 1)
}
now := bigtable.Time(time.Now())
for rowKey, mutData := range map[string]map[string][]byte{
// Do not change the test data without checking tests/common_test.go.
// The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251
// Expected values are defined in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L229-L310
"row-01": {
"name": []byte("Alex"),
"age": convertToBytes(21),
"id": ids[0],
},
"row-02": {
"name": []byte("Alice"),
"age": convertToBytes(100),
"id": ids[1],
},
} {
mut := bigtable.NewMutation()
for col, v := range mutData {
mut.Set(columnFamilyName, col, now, v)
}
muts = append(muts, mut)
rowKeys = append(rowKeys, rowKey)
}
return muts, rowKeys
}
func setupBtTable(t *testing.T, ctx context.Context, projectId string, instance string, tableName string, columnFamilyName string, muts []*bigtable.Mutation, rowKeys []string) func(*testing.T) {
// Creating clients
adminClient, err := bigtable.NewAdminClient(ctx, projectId, instance)
if err != nil {
t.Fatalf("NewAdminClient: %v", err)
}
client, err := bigtable.NewClient(ctx, projectId, instance)
if err != nil {
log.Fatalf("Could not create data operations client: %v", err)
}
defer client.Close()
// Creating tables
tables, err := adminClient.Tables(ctx)
if err != nil {
log.Fatalf("Could not fetch table list: %v", err)
}
if !slices.Contains(tables, tableName) {
log.Printf("Creating table %s", tableName)
if err := adminClient.CreateTable(ctx, tableName); err != nil {
log.Fatalf("Could not create table %s: %v", tableName, err)
}
}
tblInfo, err := adminClient.TableInfo(ctx, tableName)
if err != nil {
log.Fatalf("Could not read info for table %s: %v", tableName, err)
}
// Creating column family
if !slices.Contains(tblInfo.Families, columnFamilyName) {
if err := adminClient.CreateColumnFamily(ctx, tableName, columnFamilyName); err != nil {
log.Fatalf("Could not create column family %s: %v", columnFamilyName, err)
}
}
tbl := client.Open(tableName)
rowErrs, err := tbl.ApplyBulk(ctx, rowKeys, muts)
if err != nil {
log.Fatalf("Could not apply bulk row mutation: %v", err)
}
if rowErrs != nil {
for _, rowErr := range rowErrs {
log.Printf("Error writing row: %v", rowErr)
}
log.Fatalf("Could not write some rows")
}
// Writing data
return func(t *testing.T) {
// tear down test
if err = adminClient.DeleteTable(ctx, tableName); err != nil {
log.Fatalf("Teardown failed. Could not delete table %s: %v", tableName, err)
}
defer adminClient.Close()
}
}
func addTemplateParamConfig(t *testing.T, config map[string]any) map[string]any {
toolsMap, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
toolsMap["select-templateParams-tool"] = map[string]any{
"kind": "bigtable-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}};",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-templateParams-combined-tool"] = map[string]any{
"kind": "bigtable-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}} WHERE TO_INT64(cf['id']) = @id;",
"parameters": []tools.Parameter{tools.NewIntParameter("id", "the id of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
},
}
toolsMap["select-fields-templateParams-tool"] = map[string]any{
"kind": "bigtable-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT {{array .fields}}, FROM {{.tableName}};",
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewArrayParameter("fields", "The fields to select from", tools.NewStringParameter("field", "A field that will be returned from the query.")),
},
}
toolsMap["select-filter-templateParams-combined-tool"] = map[string]any{
"kind": "bigtable-sql",
"source": "my-instance",
"description": "Create table tool with template parameters",
"statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}} WHERE {{.columnFilter}} = @name;",
"parameters": []tools.Parameter{tools.NewStringParameter("name", "the name of the user")},
"templateParameters": []tools.Parameter{
tools.NewStringParameter("tableName", "some description"),
tools.NewStringParameter("columnFilter", "some description"),
},
}
config["tools"] = toolsMap
return config
}
```
--------------------------------------------------------------------------------
/docs/en/resources/tools/firestore/firestore-query.md:
--------------------------------------------------------------------------------
```markdown
---
title: "firestore-query"
type: docs
weight: 1
description: >
Query a Firestore collection with parameterizable filters and Firestore native JSON value types
aliases:
- /resources/tools/firestore-query
---
## Overview
The `firestore-query` tool allows you to query Firestore collections with
dynamic, parameterizable filters that support Firestore's native JSON value
types. This tool is designed for querying single collection, which is the
standard pattern in Firestore. The collection path itself can be parameterized,
making it flexible for various use cases. This tool is particularly useful when
you need to create reusable query templates with parameters that can be
substituted at runtime.
**Developer Note**: This tool serves as the general querying foundation that
developers can use to create custom tools with specific query patterns.
## Key Features
- **Parameterizable Queries**: Use Go template syntax to create dynamic queries
- **Dynamic Collection Paths**: The collection path can be parameterized for
flexibility
- **Native JSON Value Types**: Support for Firestore's typed values
(stringValue, integerValue, doubleValue, etc.)
- **Complex Filter Logic**: Support for AND/OR logical operators in filters
- **Template Substitution**: Dynamic collection paths, filters, and ordering
- **Query Analysis**: Optional query performance analysis with explain metrics
(non-parameterizable)
## Configuration
### Basic Configuration
```yaml
tools:
query_countries:
kind: firestore-query
source: my-firestore-source
description: Query countries with dynamic filters
collectionPath: "countries"
filters: |
{
"field": "continent",
"op": "==",
"value": {"stringValue": "{{.continent}}"}
}
parameters:
- name: continent
type: string
description: Continent to filter by
required: true
```
### Advanced Configuration with Complex Filters
```yaml
tools:
advanced_query:
kind: firestore-query
source: my-firestore-source
description: Advanced query with complex filters
collectionPath: "{{.collection}}"
filters: |
{
"or": [
{"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}},
{
"and": [
{"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}},
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}},
{"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}}
]
}
]
}
select:
- name
- status
- priority
orderBy:
field: "{{.sortField}}"
direction: "{{.sortDirection}}"
limit: 100
analyzeQuery: true
parameters:
- name: collection
type: string
description: Collection to query
required: true
- name: status
type: string
description: Status to filter by
required: true
- name: priority
type: string
description: Minimum priority value
required: true
- name: maxArea
type: float
description: Maximum area value
required: true
- name: isActive
type: boolean
description: Filter by active status
required: true
- name: sortField
type: string
description: Field to sort by
required: false
default: "createdAt"
- name: sortDirection
type: string
description: Sort direction (ASCENDING or DESCENDING)
required: false
default: "DESCENDING"
```
## Parameters
### Configuration Parameters
| Parameter | Type | Required | Description |
|------------------|---------|----------|-------------------------------------------------------------------------------------------------------------|
| `kind` | string | Yes | Must be `firestore-query` |
| `source` | string | Yes | Name of the Firestore source to use |
| `description` | string | Yes | Description of what this tool does |
| `collectionPath` | string | Yes | Path to the collection to query (supports templates) |
| `filters` | string | No | JSON string defining query filters (supports templates) |
| `select` | array | No | Fields to select from documents(supports templates - string or array) |
| `orderBy` | object | No | Ordering configuration with `field` and `direction`(supports templates for the value of field or direction) |
| `limit` | integer | No | Maximum number of documents to return (default: 100) (supports templates) |
| `analyzeQuery` | boolean | No | Whether to analyze query performance (default: false) |
| `parameters` | array | Yes | Parameter definitions for template substitution |
### Runtime Parameters
Runtime parameters are defined in the `parameters` array and can be used in
templates throughout the configuration.
## Filter Format
### Simple Filter
```json
{
"field": "age",
"op": ">",
"value": {"integerValue": "25"}
}
```
### AND Filter
```json
{
"and": [
{"field": "status", "op": "==", "value": {"stringValue": "active"}},
{"field": "age", "op": ">=", "value": {"integerValue": "18"}}
]
}
```
### OR Filter
```json
{
"or": [
{"field": "role", "op": "==", "value": {"stringValue": "admin"}},
{"field": "role", "op": "==", "value": {"stringValue": "moderator"}}
]
}
```
### Nested Filters
```json
{
"or": [
{"field": "type", "op": "==", "value": {"stringValue": "premium"}},
{
"and": [
{"field": "type", "op": "==", "value": {"stringValue": "standard"}},
{"field": "credits", "op": ">", "value": {"integerValue": "1000"}}
]
}
]
}
```
## Firestore Native Value Types
The tool supports all Firestore native JSON value types:
| Type | Format | Example |
|-----------|------------------------------------------------------|----------------------------------------------------------------|
| String | `{"stringValue": "text"}` | `{"stringValue": "{{.name}}"}` |
| Integer | `{"integerValue": "123"}` or `{"integerValue": 123}` | `{"integerValue": "{{.age}}"}` or `{"integerValue": {{.age}}}` |
| Double | `{"doubleValue": 45.67}` | `{"doubleValue": {{.price}}}` |
| Boolean | `{"booleanValue": true}` | `{"booleanValue": {{.active}}}` |
| Null | `{"nullValue": null}` | `{"nullValue": null}` |
| Timestamp | `{"timestampValue": "RFC3339"}` | `{"timestampValue": "{{.date}}"}` |
| GeoPoint | `{"geoPointValue": {"latitude": 0, "longitude": 0}}` | See below |
| Array | `{"arrayValue": {"values": [...]}}` | See below |
| Map | `{"mapValue": {"fields": {...}}}` | See below |
### Complex Type Examples
**GeoPoint:**
```json
{
"field": "location",
"op": "==",
"value": {
"geoPointValue": {
"latitude": 37.7749,
"longitude": -122.4194
}
}
}
```
**Array:**
```json
{
"field": "tags",
"op": "array-contains",
"value": {"stringValue": "{{.tag}}"}
}
```
## Supported Operators
- `<` - Less than
- `<=` - Less than or equal
- `>` - Greater than
- `>=` - Greater than or equal
- `==` - Equal
- `!=` - Not equal
- `array-contains` - Array contains value
- `array-contains-any` - Array contains any of the values
- `in` - Value is in array
- `not-in` - Value is not in array
## Examples
### Example 1: Query with Dynamic Collection Path
```yaml
tools:
user_documents:
kind: firestore-query
source: my-firestore
description: Query user-specific documents
collectionPath: "users/{{.userId}}/documents"
filters: |
{
"field": "type",
"op": "==",
"value": {"stringValue": "{{.docType}}"}
}
parameters:
- name: userId
type: string
description: User ID
required: true
- name: docType
type: string
description: Document type to filter
required: true
```
### Example 2: Complex Geographic Query
```yaml
tools:
location_search:
kind: firestore-query
source: my-firestore
description: Search locations by area and population
collectionPath: "cities"
filters: |
{
"and": [
{"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}},
{"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}},
{"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}
]
}
orderBy:
field: "population"
direction: "DESCENDING"
limit: 50
parameters:
- name: country
type: string
description: Country code
required: true
- name: minPopulation
type: string
description: Minimum population (as string for large numbers)
required: true
- name: maxArea
type: float
description: Maximum area in square kilometers
required: true
```
### Example 3: Time-based Query with Analysis
```yaml
tools:
activity_log:
kind: firestore-query
source: my-firestore
description: Query activity logs within time range
collectionPath: "logs"
filters: |
{
"and": [
{"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}},
{"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}},
{"field": "severity", "op": "in", "value": {"arrayValue": {"values": [
{"stringValue": "ERROR"},
{"stringValue": "CRITICAL"}
]}}}
]
}
select:
- timestamp
- message
- severity
- userId
orderBy:
field: "timestamp"
direction: "DESCENDING"
analyzeQuery: true
parameters:
- name: startTime
type: string
description: Start time in RFC3339 format
required: true
- name: endTime
type: string
description: End time in RFC3339 format
required: true
```
## Usage
### Invoking the Tool
```bash
# Using curl
curl -X POST http://localhost:5000/api/tool/your-tool-name/invoke \
-H "Content-Type: application/json" \
-d '{
"continent": "Europe",
"minPopulation": "1000000",
"maxArea": 500000.5,
"isActive": true
}'
```
### Response Format
**Without analyzeQuery:**
```json
[
{
"id": "doc1",
"path": "countries/doc1",
"data": {
"name": "France",
"continent": "Europe",
"population": 67000000,
"area": 551695
},
"createTime": "2024-01-01T00:00:00Z",
"updateTime": "2024-01-15T10:30:00Z"
}
]
```
**With analyzeQuery:**
```json
{
"documents": [...],
"explainMetrics": {
"planSummary": {
"indexesUsed": [...]
},
"executionStats": {
"resultsReturned": 10,
"executionDuration": "15ms",
"readOperations": 10
}
}
}
```
## Best Practices
1. **Use Typed Values**: Always use Firestore's native JSON value types for
proper type handling
2. **String Numbers for Large Integers**: Use string representation for large
integers to avoid precision loss
3. **Template Security**: Validate all template parameters to prevent injection
attacks
4. **Index Optimization**: Use `analyzeQuery` to identify missing indexes
5. **Limit Results**: Always set a reasonable `limit` to prevent excessive data
retrieval
6. **Field Selection**: Use `select` to retrieve only necessary fields
## Technical Notes
- Queries operate on a single collection (the standard Firestore pattern)
- Maximum of 100 filters per query (configurable)
- Template parameters must be properly escaped in JSON contexts
- Complex nested queries may require composite indexes
## See Also
- [firestore-query-collection](firestore-query-collection.md) -
Non-parameterizable query tool
- [Firestore Source Configuration](../../sources/firestore.md)
- [Firestore Query
Documentation](https://firebase.google.com/docs/firestore/query-data/queries)
```
--------------------------------------------------------------------------------
/tests/alloydbainl/alloydb_ai_nl_integration_test.go:
--------------------------------------------------------------------------------
```go
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package alloydbainl
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"os"
"reflect"
"regexp"
"strings"
"testing"
"time"
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
"github.com/googleapis/genai-toolbox/internal/testutils"
"github.com/googleapis/genai-toolbox/tests"
)
var (
AlloyDBAINLSourceKind = "alloydb-postgres"
AlloyDBAINLToolKind = "alloydb-ai-nl"
AlloyDBAINLProject = os.Getenv("ALLOYDB_AI_NL_PROJECT")
AlloyDBAINLRegion = os.Getenv("ALLOYDB_AI_NL_REGION")
AlloyDBAINLCluster = os.Getenv("ALLOYDB_AI_NL_CLUSTER")
AlloyDBAINLInstance = os.Getenv("ALLOYDB_AI_NL_INSTANCE")
AlloyDBAINLDatabase = os.Getenv("ALLOYDB_AI_NL_DATABASE")
AlloyDBAINLUser = os.Getenv("ALLOYDB_AI_NL_USER")
AlloyDBAINLPass = os.Getenv("ALLOYDB_AI_NL_PASS")
)
func getAlloyDBAINLVars(t *testing.T) map[string]any {
switch "" {
case AlloyDBAINLProject:
t.Fatal("'ALLOYDB_AI_NL_PROJECT' not set")
case AlloyDBAINLRegion:
t.Fatal("'ALLOYDB_AI_NL_REGION' not set")
case AlloyDBAINLCluster:
t.Fatal("'ALLOYDB_AI_NL_CLUSTER' not set")
case AlloyDBAINLInstance:
t.Fatal("'ALLOYDB_AI_NL_INSTANCE' not set")
case AlloyDBAINLDatabase:
t.Fatal("'ALLOYDB_AI_NL_DATABASE' not set")
case AlloyDBAINLUser:
t.Fatal("'ALLOYDB_AI_NL_USER' not set")
case AlloyDBAINLPass:
t.Fatal("'ALLOYDB_AI_NL_PASS' not set")
}
return map[string]any{
"kind": AlloyDBAINLSourceKind,
"project": AlloyDBAINLProject,
"cluster": AlloyDBAINLCluster,
"instance": AlloyDBAINLInstance,
"region": AlloyDBAINLRegion,
"database": AlloyDBAINLDatabase,
"user": AlloyDBAINLUser,
"password": AlloyDBAINLPass,
}
}
func TestAlloyDBAINLToolEndpoints(t *testing.T) {
sourceConfig := getAlloyDBAINLVars(t)
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
var args []string
// Write config into a file and pass it to command
toolsFile := getAINLToolsConfig(sourceConfig)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
}
defer cleanup()
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
if err != nil {
t.Logf("toolbox command logs: \n%s", out)
t.Fatalf("toolbox didn't start successfully: %s", err)
}
runAINLToolGetTest(t)
runAINLToolInvokeTest(t)
runAINLMCPToolCallMethod(t)
}
func runAINLToolGetTest(t *testing.T) {
// Test tool get endpoint
tcs := []struct {
name string
api string
want map[string]any
}{
{
name: "get my-simple-tool",
api: "http://127.0.0.1:5000/api/tool/my-simple-tool/",
want: map[string]any{
"my-simple-tool": map[string]any{
"description": "Simple tool to test end to end functionality.",
"parameters": []any{
map[string]any{
"name": "question",
"type": "string",
"required": true,
"description": "The natural language question to ask.",
"authSources": []any{},
},
},
"authRequired": []any{},
},
},
},
}
for _, tc := range tcs {
t.Run(tc.name, func(t *testing.T) {
resp, err := http.Get(tc.api)
if err != nil {
t.Fatalf("error when sending a request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("response status code is not 200")
}
var body map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&body)
if err != nil {
t.Fatalf("error parsing response body")
}
got, ok := body["tools"]
if !ok {
t.Fatalf("unable to find tools in response body")
}
if !reflect.DeepEqual(got, tc.want) {
t.Fatalf("got %q, want %q", got, tc.want)
}
})
}
}
func runAINLToolInvokeTest(t *testing.T) {
// Get ID token
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
if err != nil {
t.Fatalf("error getting Google ID token: %s", err)
}
// Test tool invoke endpoint
invokeTcs := []struct {
name string
api string
requestHeader map[string]string
requestBody io.Reader
want string
isErr bool
}{
{
name: "invoke my-simple-tool",
api: "http://127.0.0.1:5000/api/tool/my-simple-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
want: "[{\"execute_nl_query\":{\"?column?\":1}}]",
isErr: false,
},
{
name: "Invoke my-tool without parameters",
api: "http://127.0.0.1:5000/api/tool/my-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{}`)),
isErr: true,
},
{
name: "Invoke my-auth-tool with auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(`{"question": "can you show me the name of this user?"}`)),
want: "[{\"execute_nl_query\":{\"name\":\"Alice\"}}]",
isErr: false,
},
{
name: "Invoke my-auth-tool with invalid auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
isErr: true,
},
{
name: "Invoke my-auth-tool without auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
isErr: true,
},
{
name: "Invoke my-auth-required-tool with auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-required-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
isErr: false,
want: "[{\"execute_nl_query\":{\"?column?\":1}}]",
},
{
name: "Invoke my-auth-required-tool with invalid auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-required-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
isErr: true,
},
{
name: "Invoke my-auth-required-tool without auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke",
requestHeader: map[string]string{},
requestBody: bytes.NewBuffer([]byte(`{"question": "return the number 1"}`)),
isErr: true,
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
// Send Tool invocation request
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range tc.requestHeader {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
if tc.isErr == true {
return
}
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
}
// Check response body
var body map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&body)
if err != nil {
t.Fatalf("error parsing response body")
}
got, ok := body["result"].(string)
if !ok {
t.Fatalf("unable to find result in response body")
}
if got != tc.want {
t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
}
})
}
}
func getAINLToolsConfig(sourceConfig map[string]any) map[string]any {
// Write config into a file and pass it to command
toolsFile := map[string]any{
"sources": map[string]any{
"my-instance": sourceConfig,
},
"authServices": map[string]any{
"my-google-auth": map[string]any{
"kind": "google",
"clientId": tests.ClientId,
},
},
"tools": map[string]any{
"my-simple-tool": map[string]any{
"kind": AlloyDBAINLToolKind,
"source": "my-instance",
"description": "Simple tool to test end to end functionality.",
"nlConfig": "my_nl_config",
},
"my-auth-tool": map[string]any{
"kind": AlloyDBAINLToolKind,
"source": "my-instance",
"description": "Tool to test authenticated parameters.",
"nlConfig": "my_nl_config",
"nlConfigParameters": []map[string]any{
{
"name": "email",
"type": "string",
"description": "user email",
"authServices": []map[string]string{
{
"name": "my-google-auth",
"field": "email",
},
},
},
},
},
"my-auth-required-tool": map[string]any{
"kind": AlloyDBAINLToolKind,
"source": "my-instance",
"description": "Tool to test auth required invocation.",
"nlConfig": "my_nl_config",
"authRequired": []string{
"my-google-auth",
},
},
},
}
return toolsFile
}
func runAINLMCPToolCallMethod(t *testing.T) {
sessionId := tests.RunInitialize(t, "2024-11-05")
header := map[string]string{}
if sessionId != "" {
header["Mcp-Session-Id"] = sessionId
}
// Test tool invoke endpoint
invokeTcs := []struct {
name string
api string
requestBody jsonrpc.JSONRPCRequest
requestHeader map[string]string
want string
}{
{
name: "MCP Invoke my-simple-tool",
api: "http://127.0.0.1:5000/mcp",
requestHeader: map[string]string{},
requestBody: jsonrpc.JSONRPCRequest{
Jsonrpc: "2.0",
Id: "my-simple-tool",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "my-simple-tool",
"arguments": map[string]any{
"question": "return the number 1",
},
},
},
want: `{"jsonrpc":"2.0","id":"my-simple-tool","result":{"content":[{"type":"text","text":"{\"execute_nl_query\":{\"?column?\":1}}"}]}}`,
},
{
name: "MCP Invoke invalid tool",
api: "http://127.0.0.1:5000/mcp",
requestHeader: map[string]string{},
requestBody: jsonrpc.JSONRPCRequest{
Jsonrpc: "2.0",
Id: "invalid-tool",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "foo",
"arguments": map[string]any{},
},
},
want: `{"jsonrpc":"2.0","id":"invalid-tool","error":{"code":-32602,"message":"invalid tool name: tool with name \"foo\" does not exist"}}`,
},
{
name: "MCP Invoke my-auth-tool without parameters",
api: "http://127.0.0.1:5000/mcp",
requestHeader: map[string]string{},
requestBody: jsonrpc.JSONRPCRequest{
Jsonrpc: "2.0",
Id: "invoke-without-parameter",
Request: jsonrpc.Request{
Method: "tools/call",
},
Params: map[string]any{
"name": "my-auth-tool",
"arguments": map[string]any{},
},
},
want: `{"jsonrpc":"2.0","id":"invoke-without-parameter","error":{"code":-32602,"message":"provided parameters were invalid: parameter question is required"}}`,
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
reqMarshal, err := json.Marshal(tc.requestBody)
if err != nil {
t.Fatalf("unexpected error during marshaling of request body")
}
// Send Tool invocation request
req, err := http.NewRequest(http.MethodPost, tc.api, bytes.NewBuffer(reqMarshal))
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
for k, v := range header {
req.Header.Add(k, v)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
respBody, err := io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("unable to read request body: %s", err)
}
defer resp.Body.Close()
got := string(bytes.TrimSpace(respBody))
// Remove `\` and `"` for string comparison
got = strings.ReplaceAll(got, "\\", "")
want := strings.ReplaceAll(tc.want, "\\", "")
got = strings.ReplaceAll(got, "\"", "")
want = strings.ReplaceAll(want, "\"", "")
if !strings.Contains(got, want) {
t.Fatalf("Expected substring not found:\ngot: %q\nwant: %q (to be contained within got)", got, want)
}
})
}
}
```