#
tokens: 44489/50000 4/1089 files (page 56/76)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 56 of 76. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_prompts_with_coverage.sh
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .gemini
│   └── config.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   ├── trusted-contribution.yml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── link_checker_workflow.yaml
│       ├── lint.yaml
│       ├── publish-mcp.yml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── .lycheeignore
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   ├── ALLOYDBADMIN_README.md
│   ├── ALLOYDBPG_README.md
│   ├── BIGQUERY_README.md
│   ├── CLOUDSQLMSSQL_README.md
│   ├── CLOUDSQLMSSQLADMIN_README.md
│   ├── CLOUDSQLMYSQL_README.md
│   ├── CLOUDSQLMYSQLADMIN_README.md
│   ├── CLOUDSQLPG_README.md
│   ├── CLOUDSQLPGADMIN_README.md
│   ├── DATAPLEX_README.md
│   ├── en
│   │   ├── _index.md
│   │   ├── about
│   │   │   ├── _index.md
│   │   │   └── faq.md
│   │   ├── blogs
│   │   │   └── _index.md
│   │   ├── concepts
│   │   │   ├── _index.md
│   │   │   └── telemetry
│   │   │       ├── index.md
│   │   │       ├── telemetry_flow.png
│   │   │       └── telemetry_traces.png
│   │   ├── getting-started
│   │   │   ├── _index.md
│   │   │   ├── colab_quickstart.ipynb
│   │   │   ├── configure.md
│   │   │   ├── introduction
│   │   │   │   ├── _index.md
│   │   │   │   └── architecture.png
│   │   │   ├── local_quickstart_go.md
│   │   │   ├── local_quickstart_js.md
│   │   │   ├── local_quickstart.md
│   │   │   ├── mcp_quickstart
│   │   │   │   ├── _index.md
│   │   │   │   ├── inspector_tools.png
│   │   │   │   └── inspector.png
│   │   │   ├── prompts_quickstart_gemini_cli.md
│   │   │   └── quickstart
│   │   │       ├── go
│   │   │       │   ├── adkgo
│   │   │       │   │   ├── go.mod
│   │   │       │   │   ├── go.sum
│   │   │       │   │   └── quickstart.go
│   │   │       │   ├── genAI
│   │   │       │   │   ├── go.mod
│   │   │       │   │   ├── go.sum
│   │   │       │   │   └── quickstart.go
│   │   │       │   ├── genkit
│   │   │       │   │   ├── go.mod
│   │   │       │   │   ├── go.sum
│   │   │       │   │   └── quickstart.go
│   │   │       │   ├── langchain
│   │   │       │   │   ├── go.mod
│   │   │       │   │   ├── go.sum
│   │   │       │   │   └── quickstart.go
│   │   │       │   ├── openAI
│   │   │       │   │   ├── go.mod
│   │   │       │   │   ├── go.sum
│   │   │       │   │   └── quickstart.go
│   │   │       │   └── quickstart_test.go
│   │   │       ├── golden.txt
│   │   │       ├── js
│   │   │       │   ├── adk
│   │   │       │   │   ├── package-lock.json
│   │   │       │   │   ├── package.json
│   │   │       │   │   └── quickstart.js
│   │   │       │   ├── genAI
│   │   │       │   │   ├── package-lock.json
│   │   │       │   │   ├── package.json
│   │   │       │   │   └── quickstart.js
│   │   │       │   ├── genkit
│   │   │       │   │   ├── package-lock.json
│   │   │       │   │   ├── package.json
│   │   │       │   │   └── quickstart.js
│   │   │       │   ├── langchain
│   │   │       │   │   ├── package-lock.json
│   │   │       │   │   ├── package.json
│   │   │       │   │   └── quickstart.js
│   │   │       │   ├── llamaindex
│   │   │       │   │   ├── package-lock.json
│   │   │       │   │   ├── package.json
│   │   │       │   │   └── quickstart.js
│   │   │       │   └── quickstart.test.js
│   │   │       ├── python
│   │   │       │   ├── __init__.py
│   │   │       │   ├── adk
│   │   │       │   │   ├── quickstart.py
│   │   │       │   │   └── requirements.txt
│   │   │       │   ├── core
│   │   │       │   │   ├── quickstart.py
│   │   │       │   │   └── requirements.txt
│   │   │       │   ├── langchain
│   │   │       │   │   ├── quickstart.py
│   │   │       │   │   └── requirements.txt
│   │   │       │   ├── llamaindex
│   │   │       │   │   ├── quickstart.py
│   │   │       │   │   └── requirements.txt
│   │   │       │   └── quickstart_test.py
│   │   │       └── shared
│   │   │           ├── cloud_setup.md
│   │   │           ├── configure_toolbox.md
│   │   │           └── database_setup.md
│   │   ├── how-to
│   │   │   ├── _index.md
│   │   │   ├── connect_via_geminicli.md
│   │   │   ├── connect_via_mcp.md
│   │   │   ├── connect-ide
│   │   │   │   ├── _index.md
│   │   │   │   ├── alloydb_pg_admin_mcp.md
│   │   │   │   ├── alloydb_pg_mcp.md
│   │   │   │   ├── bigquery_mcp.md
│   │   │   │   ├── cloud_sql_mssql_admin_mcp.md
│   │   │   │   ├── cloud_sql_mssql_mcp.md
│   │   │   │   ├── cloud_sql_mysql_admin_mcp.md
│   │   │   │   ├── cloud_sql_mysql_mcp.md
│   │   │   │   ├── cloud_sql_pg_admin_mcp.md
│   │   │   │   ├── cloud_sql_pg_mcp.md
│   │   │   │   ├── firestore_mcp.md
│   │   │   │   ├── looker_mcp.md
│   │   │   │   ├── mssql_mcp.md
│   │   │   │   ├── mysql_mcp.md
│   │   │   │   ├── neo4j_mcp.md
│   │   │   │   ├── postgres_mcp.md
│   │   │   │   ├── spanner_mcp.md
│   │   │   │   └── sqlite_mcp.md
│   │   │   ├── deploy_adk_agent.md
│   │   │   ├── deploy_docker.md
│   │   │   ├── deploy_gke.md
│   │   │   ├── deploy_toolbox.md
│   │   │   ├── export_telemetry.md
│   │   │   └── toolbox-ui
│   │   │       ├── edit-headers.gif
│   │   │       ├── edit-headers.png
│   │   │       ├── index.md
│   │   │       ├── optional-param-checked.png
│   │   │       ├── optional-param-unchecked.png
│   │   │       ├── run-tool.gif
│   │   │       ├── tools.png
│   │   │       └── toolsets.png
│   │   ├── reference
│   │   │   ├── _index.md
│   │   │   ├── cli.md
│   │   │   └── prebuilt-tools.md
│   │   ├── resources
│   │   │   ├── _index.md
│   │   │   ├── authServices
│   │   │   │   ├── _index.md
│   │   │   │   └── google.md
│   │   │   ├── embeddingModels
│   │   │   │   ├── _index.md
│   │   │   │   └── gemini.md
│   │   │   ├── prompts
│   │   │   │   ├── _index.md
│   │   │   │   └── custom
│   │   │   │       └── _index.md
│   │   │   ├── sources
│   │   │   │   ├── _index.md
│   │   │   │   ├── alloydb-admin.md
│   │   │   │   ├── alloydb-pg.md
│   │   │   │   ├── bigquery.md
│   │   │   │   ├── bigtable.md
│   │   │   │   ├── cassandra.md
│   │   │   │   ├── clickhouse.md
│   │   │   │   ├── cloud-gda.md
│   │   │   │   ├── cloud-healthcare.md
│   │   │   │   ├── cloud-monitoring.md
│   │   │   │   ├── cloud-sql-admin.md
│   │   │   │   ├── cloud-sql-mssql.md
│   │   │   │   ├── cloud-sql-mysql.md
│   │   │   │   ├── cloud-sql-pg.md
│   │   │   │   ├── couchbase.md
│   │   │   │   ├── dataplex.md
│   │   │   │   ├── dgraph.md
│   │   │   │   ├── elasticsearch.md
│   │   │   │   ├── firebird.md
│   │   │   │   ├── firestore.md
│   │   │   │   ├── http.md
│   │   │   │   ├── looker.md
│   │   │   │   ├── mariadb.md
│   │   │   │   ├── mindsdb.md
│   │   │   │   ├── mongodb.md
│   │   │   │   ├── mssql.md
│   │   │   │   ├── mysql.md
│   │   │   │   ├── neo4j.md
│   │   │   │   ├── oceanbase.md
│   │   │   │   ├── oracle.md
│   │   │   │   ├── postgres.md
│   │   │   │   ├── redis.md
│   │   │   │   ├── serverless-spark.md
│   │   │   │   ├── singlestore.md
│   │   │   │   ├── snowflake.md
│   │   │   │   ├── spanner.md
│   │   │   │   ├── sqlite.md
│   │   │   │   ├── tidb.md
│   │   │   │   ├── trino.md
│   │   │   │   ├── valkey.md
│   │   │   │   └── yugabytedb.md
│   │   │   └── tools
│   │   │       ├── _index.md
│   │   │       ├── alloydb
│   │   │       │   ├── _index.md
│   │   │       │   ├── alloydb-create-cluster.md
│   │   │       │   ├── alloydb-create-instance.md
│   │   │       │   ├── alloydb-create-user.md
│   │   │       │   ├── alloydb-get-cluster.md
│   │   │       │   ├── alloydb-get-instance.md
│   │   │       │   ├── alloydb-get-user.md
│   │   │       │   ├── alloydb-list-clusters.md
│   │   │       │   ├── alloydb-list-instances.md
│   │   │       │   ├── alloydb-list-users.md
│   │   │       │   └── alloydb-wait-for-operation.md
│   │   │       ├── alloydbainl
│   │   │       │   ├── _index.md
│   │   │       │   └── alloydb-ai-nl.md
│   │   │       ├── bigquery
│   │   │       │   ├── _index.md
│   │   │       │   ├── bigquery-analyze-contribution.md
│   │   │       │   ├── bigquery-conversational-analytics.md
│   │   │       │   ├── bigquery-execute-sql.md
│   │   │       │   ├── bigquery-forecast.md
│   │   │       │   ├── bigquery-get-dataset-info.md
│   │   │       │   ├── bigquery-get-table-info.md
│   │   │       │   ├── bigquery-list-dataset-ids.md
│   │   │       │   ├── bigquery-list-table-ids.md
│   │   │       │   ├── bigquery-search-catalog.md
│   │   │       │   └── bigquery-sql.md
│   │   │       ├── bigtable
│   │   │       │   ├── _index.md
│   │   │       │   └── bigtable-sql.md
│   │   │       ├── cassandra
│   │   │       │   ├── _index.md
│   │   │       │   └── cassandra-cql.md
│   │   │       ├── clickhouse
│   │   │       │   ├── _index.md
│   │   │       │   ├── clickhouse-execute-sql.md
│   │   │       │   ├── clickhouse-list-databases.md
│   │   │       │   ├── clickhouse-list-tables.md
│   │   │       │   └── clickhouse-sql.md
│   │   │       ├── cloudgda
│   │   │       │   ├── _index.md
│   │   │       │   └── cloud-gda-query.md
│   │   │       ├── cloudhealthcare
│   │   │       │   ├── _index.md
│   │   │       │   ├── cloud-healthcare-fhir-fetch-page.md
│   │   │       │   ├── cloud-healthcare-fhir-patient-everything.md
│   │   │       │   ├── cloud-healthcare-fhir-patient-search.md
│   │   │       │   ├── cloud-healthcare-get-dataset.md
│   │   │       │   ├── cloud-healthcare-get-dicom-store-metrics.md
│   │   │       │   ├── cloud-healthcare-get-dicom-store.md
│   │   │       │   ├── cloud-healthcare-get-fhir-resource.md
│   │   │       │   ├── cloud-healthcare-get-fhir-store-metrics.md
│   │   │       │   ├── cloud-healthcare-get-fhir-store.md
│   │   │       │   ├── cloud-healthcare-list-dicom-stores.md
│   │   │       │   ├── cloud-healthcare-list-fhir-stores.md
│   │   │       │   ├── cloud-healthcare-retrieve-rendered-dicom-instance.md
│   │   │       │   ├── cloud-healthcare-search-dicom-instances.md
│   │   │       │   ├── cloud-healthcare-search-dicom-series.md
│   │   │       │   └── cloud-healthcare-search-dicom-studies.md
│   │   │       ├── cloudmonitoring
│   │   │       │   ├── _index.md
│   │   │       │   └── cloud-monitoring-query-prometheus.md
│   │   │       ├── cloudsql
│   │   │       │   ├── _index.md
│   │   │       │   ├── cloudsqlcloneinstance.md
│   │   │       │   ├── cloudsqlcreatedatabase.md
│   │   │       │   ├── cloudsqlcreateusers.md
│   │   │       │   ├── cloudsqlgetinstances.md
│   │   │       │   ├── cloudsqllistdatabases.md
│   │   │       │   ├── cloudsqllistinstances.md
│   │   │       │   ├── cloudsqlmssqlcreateinstance.md
│   │   │       │   ├── cloudsqlmysqlcreateinstance.md
│   │   │       │   ├── cloudsqlpgcreateinstances.md
│   │   │       │   ├── cloudsqlpgupgradeprecheck.md
│   │   │       │   └── cloudsqlwaitforoperation.md
│   │   │       ├── couchbase
│   │   │       │   ├── _index.md
│   │   │       │   └── couchbase-sql.md
│   │   │       ├── dataform
│   │   │       │   ├── _index.md
│   │   │       │   └── dataform-compile-local.md
│   │   │       ├── dataplex
│   │   │       │   ├── _index.md
│   │   │       │   ├── dataplex-lookup-entry.md
│   │   │       │   ├── dataplex-search-aspect-types.md
│   │   │       │   └── dataplex-search-entries.md
│   │   │       ├── dgraph
│   │   │       │   ├── _index.md
│   │   │       │   └── dgraph-dql.md
│   │   │       ├── elasticsearch
│   │   │       │   ├── _index.md
│   │   │       │   └── elasticsearch-esql.md
│   │   │       ├── firebird
│   │   │       │   ├── _index.md
│   │   │       │   ├── firebird-execute-sql.md
│   │   │       │   └── firebird-sql.md
│   │   │       ├── firestore
│   │   │       │   ├── _index.md
│   │   │       │   ├── firestore-add-documents.md
│   │   │       │   ├── firestore-delete-documents.md
│   │   │       │   ├── firestore-get-documents.md
│   │   │       │   ├── firestore-get-rules.md
│   │   │       │   ├── firestore-list-collections.md
│   │   │       │   ├── firestore-query-collection.md
│   │   │       │   ├── firestore-query.md
│   │   │       │   ├── firestore-update-document.md
│   │   │       │   └── firestore-validate-rules.md
│   │   │       ├── http
│   │   │       │   ├── _index.md
│   │   │       │   └── http.md
│   │   │       ├── looker
│   │   │       │   ├── _index.md
│   │   │       │   ├── looker-add-dashboard-element.md
│   │   │       │   ├── looker-add-dashboard-filter.md
│   │   │       │   ├── looker-conversational-analytics.md
│   │   │       │   ├── looker-create-project-file.md
│   │   │       │   ├── looker-delete-project-file.md
│   │   │       │   ├── looker-dev-mode.md
│   │   │       │   ├── looker-generate-embed-url.md
│   │   │       │   ├── looker-get-connection-databases.md
│   │   │       │   ├── looker-get-connection-schemas.md
│   │   │       │   ├── looker-get-connection-table-columns.md
│   │   │       │   ├── looker-get-connection-tables.md
│   │   │       │   ├── looker-get-connections.md
│   │   │       │   ├── looker-get-dashboards.md
│   │   │       │   ├── looker-get-dimensions.md
│   │   │       │   ├── looker-get-explores.md
│   │   │       │   ├── looker-get-filters.md
│   │   │       │   ├── looker-get-looks.md
│   │   │       │   ├── looker-get-measures.md
│   │   │       │   ├── looker-get-models.md
│   │   │       │   ├── looker-get-parameters.md
│   │   │       │   ├── looker-get-project-file.md
│   │   │       │   ├── looker-get-project-files.md
│   │   │       │   ├── looker-get-projects.md
│   │   │       │   ├── looker-health-analyze.md
│   │   │       │   ├── looker-health-pulse.md
│   │   │       │   ├── looker-health-vacuum.md
│   │   │       │   ├── looker-make-dashboard.md
│   │   │       │   ├── looker-make-look.md
│   │   │       │   ├── looker-query-sql.md
│   │   │       │   ├── looker-query-url.md
│   │   │       │   ├── looker-query.md
│   │   │       │   ├── looker-run-dashboard.md
│   │   │       │   ├── looker-run-look.md
│   │   │       │   └── looker-update-project-file.md
│   │   │       ├── mindsdb
│   │   │       │   ├── _index.md
│   │   │       │   ├── mindsdb-execute-sql.md
│   │   │       │   └── mindsdb-sql.md
│   │   │       ├── mongodb
│   │   │       │   ├── _index.md
│   │   │       │   ├── mongodb-aggregate.md
│   │   │       │   ├── mongodb-delete-many.md
│   │   │       │   ├── mongodb-delete-one.md
│   │   │       │   ├── mongodb-find-one.md
│   │   │       │   ├── mongodb-find.md
│   │   │       │   ├── mongodb-insert-many.md
│   │   │       │   ├── mongodb-insert-one.md
│   │   │       │   ├── mongodb-update-many.md
│   │   │       │   └── mongodb-update-one.md
│   │   │       ├── mssql
│   │   │       │   ├── _index.md
│   │   │       │   ├── mssql-execute-sql.md
│   │   │       │   ├── mssql-list-tables.md
│   │   │       │   └── mssql-sql.md
│   │   │       ├── mysql
│   │   │       │   ├── _index.md
│   │   │       │   ├── mysql-execute-sql.md
│   │   │       │   ├── mysql-get-query-plan.md
│   │   │       │   ├── mysql-list-active-queries.md
│   │   │       │   ├── mysql-list-table-fragmentation.md
│   │   │       │   ├── mysql-list-tables-missing-unique-indexes.md
│   │   │       │   ├── mysql-list-tables.md
│   │   │       │   └── mysql-sql.md
│   │   │       ├── neo4j
│   │   │       │   ├── _index.md
│   │   │       │   ├── neo4j-cypher.md
│   │   │       │   ├── neo4j-execute-cypher.md
│   │   │       │   └── neo4j-schema.md
│   │   │       ├── oceanbase
│   │   │       │   ├── _index.md
│   │   │       │   ├── oceanbase-execute-sql.md
│   │   │       │   └── oceanbase-sql.md
│   │   │       ├── oracle
│   │   │       │   ├── _index.md
│   │   │       │   ├── oracle-execute-sql.md
│   │   │       │   └── oracle-sql.md
│   │   │       ├── postgres
│   │   │       │   ├── _index.md
│   │   │       │   ├── postgres-database-overview.md
│   │   │       │   ├── postgres-execute-sql.md
│   │   │       │   ├── postgres-get-column-cardinality.md
│   │   │       │   ├── postgres-list-active-queries.md
│   │   │       │   ├── postgres-list-available-extensions.md
│   │   │       │   ├── postgres-list-database-stats.md
│   │   │       │   ├── postgres-list-indexes.md
│   │   │       │   ├── postgres-list-installed-extensions.md
│   │   │       │   ├── postgres-list-locks.md
│   │   │       │   ├── postgres-list-pg-settings.md
│   │   │       │   ├── postgres-list-publication-tables.md
│   │   │       │   ├── postgres-list-query-stats.md
│   │   │       │   ├── postgres-list-roles.md
│   │   │       │   ├── postgres-list-schemas.md
│   │   │       │   ├── postgres-list-sequences.md
│   │   │       │   ├── postgres-list-stored-procedure.md
│   │   │       │   ├── postgres-list-table-stats.md
│   │   │       │   ├── postgres-list-tables.md
│   │   │       │   ├── postgres-list-tablespaces.md
│   │   │       │   ├── postgres-list-triggers.md
│   │   │       │   ├── postgres-list-views.md
│   │   │       │   ├── postgres-long-running-transactions.md
│   │   │       │   ├── postgres-replication-stats.md
│   │   │       │   └── postgres-sql.md
│   │   │       ├── redis
│   │   │       │   ├── _index.md
│   │   │       │   └── redis.md
│   │   │       ├── serverless-spark
│   │   │       │   ├── _index.md
│   │   │       │   ├── serverless-spark-cancel-batch.md
│   │   │       │   ├── serverless-spark-create-pyspark-batch.md
│   │   │       │   ├── serverless-spark-create-spark-batch.md
│   │   │       │   ├── serverless-spark-get-batch.md
│   │   │       │   └── serverless-spark-list-batches.md
│   │   │       ├── singlestore
│   │   │       │   ├── _index.md
│   │   │       │   ├── singlestore-execute-sql.md
│   │   │       │   └── singlestore-sql.md
│   │   │       ├── snowflake
│   │   │       │   ├── _index.md
│   │   │       │   ├── snowflake-execute-sql.md
│   │   │       │   └── snowflake-sql.md
│   │   │       ├── spanner
│   │   │       │   ├── _index.md
│   │   │       │   ├── spanner-execute-sql.md
│   │   │       │   ├── spanner-list-graphs.md
│   │   │       │   ├── spanner-list-tables.md
│   │   │       │   └── spanner-sql.md
│   │   │       ├── sqlite
│   │   │       │   ├── _index.md
│   │   │       │   ├── sqlite-execute-sql.md
│   │   │       │   └── sqlite-sql.md
│   │   │       ├── tidb
│   │   │       │   ├── _index.md
│   │   │       │   ├── tidb-execute-sql.md
│   │   │       │   └── tidb-sql.md
│   │   │       ├── trino
│   │   │       │   ├── _index.md
│   │   │       │   ├── trino-execute-sql.md
│   │   │       │   └── trino-sql.md
│   │   │       ├── utility
│   │   │       │   ├── _index.md
│   │   │       │   └── wait.md
│   │   │       ├── valkey
│   │   │       │   ├── _index.md
│   │   │       │   └── valkey.md
│   │   │       └── yuagbytedb
│   │   │           ├── _index.md
│   │   │           └── yugabytedb-sql.md
│   │   ├── samples
│   │   │   ├── _index.md
│   │   │   ├── alloydb
│   │   │   │   ├── _index.md
│   │   │   │   ├── ai-nl
│   │   │   │   │   ├── alloydb_ai_nl.ipynb
│   │   │   │   │   └── index.md
│   │   │   │   └── mcp_quickstart.md
│   │   │   ├── bigquery
│   │   │   │   ├── _index.md
│   │   │   │   ├── colab_quickstart_bigquery.ipynb
│   │   │   │   ├── local_quickstart.md
│   │   │   │   └── mcp_quickstart
│   │   │   │       ├── _index.md
│   │   │   │       ├── inspector_tools.png
│   │   │   │       └── inspector.png
│   │   │   ├── looker
│   │   │   │   ├── _index.md
│   │   │   │   ├── looker_gemini_oauth
│   │   │   │   │   ├── _index.md
│   │   │   │   │   ├── authenticated.png
│   │   │   │   │   ├── authorize.png
│   │   │   │   │   └── registration.png
│   │   │   │   ├── looker_gemini.md
│   │   │   │   └── looker_mcp_inspector
│   │   │   │       ├── _index.md
│   │   │   │       ├── inspector_tools.png
│   │   │   │       └── inspector.png
│   │   │   └── snowflake
│   │   │       ├── _index.md
│   │   │       ├── runme.py
│   │   │       ├── snowflake-config.yaml
│   │   │       ├── snowflake-env.sh
│   │   │       └── test-snowflake.sh
│   │   └── sdks
│   │       ├── _index.md
│   │       ├── go-sdk.md
│   │       ├── js-sdk.md
│   │       └── python-sdk.md
│   ├── LOOKER_README.md
│   ├── SPANNER_README.md
│   └── TOOLBOX_README.md
├── gemini-extension.json
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── embeddingmodels
│   │   ├── embeddingmodels.go
│   │   └── gemini
│   │       ├── gemini_test.go
│   │       └── gemini.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-healthcare.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── elasticsearch.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mindsdb.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── serverless-spark.yaml
│   │       ├── singlestore.yaml
│   │       ├── snowflake.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── prompts
│   │   ├── arguments_test.go
│   │   ├── arguments.go
│   │   ├── custom
│   │   │   ├── custom_test.go
│   │   │   └── custom.go
│   │   ├── messages_test.go
│   │   ├── messages.go
│   │   ├── prompts_test.go
│   │   ├── prompts.go
│   │   ├── promptsets_test.go
│   │   └── promptsets.go
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── resources
│   │   │   ├── resources_test.go
│   │   │   └── resources.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   ├── bigquery.go
│   │   │   └── cache.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudgda
│   │   │   ├── cloud_gda_test.go
│   │   │   └── cloud_gda.go
│   │   ├── cloudhealthcare
│   │   │   ├── cloud_healthcare_test.go
│   │   │   └── cloud_healthcare.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── elasticsearch
│   │   │   ├── elasticsearch_test.go
│   │   │   └── elasticsearch.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mindsdb
│   │   │   ├── mindsdb_test.go
│   │   │   └── mindsdb.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   ├── oracle_test.go
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── serverlessspark_test.go
│   │   │   ├── serverlessspark.go
│   │   │   ├── url_test.go
│   │   │   └── url.go
│   │   ├── singlestore
│   │   │   ├── singlestore_test.go
│   │   │   └── singlestore.go
│   │   ├── snowflake
│   │   │   ├── snowflake_test.go
│   │   │   └── snowflake.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudgda
│   │   │   ├── cloudgda_test.go
│   │   │   ├── cloudgda.go
│   │   │   └── types.go
│   │   ├── cloudhealthcare
│   │   │   ├── cloudhealthcarefhirfetchpage
│   │   │   │   ├── cloudhealthcarefhirfetchpage_test.go
│   │   │   │   └── cloudhealthcarefhirfetchpage.go
│   │   │   ├── cloudhealthcarefhirpatienteverything
│   │   │   │   ├── cloudhealthcarefhirpatienteverything_test.go
│   │   │   │   └── cloudhealthcarefhirpatienteverything.go
│   │   │   ├── cloudhealthcarefhirpatientsearch
│   │   │   │   ├── cloudhealthcarefhirpatientsearch_test.go
│   │   │   │   └── cloudhealthcarefhirpatientsearch.go
│   │   │   ├── cloudhealthcaregetdataset
│   │   │   │   ├── cloudhealthcaregetdataset_test.go
│   │   │   │   └── cloudhealthcaregetdataset.go
│   │   │   ├── cloudhealthcaregetdicomstore
│   │   │   │   ├── cloudhealthcaregetdicomstore_test.go
│   │   │   │   └── cloudhealthcaregetdicomstore.go
│   │   │   ├── cloudhealthcaregetdicomstoremetrics
│   │   │   │   ├── cloudhealthcaregetdicomstoremetrics_test.go
│   │   │   │   └── cloudhealthcaregetdicomstoremetrics.go
│   │   │   ├── cloudhealthcaregetfhirresource
│   │   │   │   ├── cloudhealthcaregetfhirresource_test.go
│   │   │   │   └── cloudhealthcaregetfhirresource.go
│   │   │   ├── cloudhealthcaregetfhirstore
│   │   │   │   ├── cloudhealthcaregetfhirstore_test.go
│   │   │   │   └── cloudhealthcaregetfhirstore.go
│   │   │   ├── cloudhealthcaregetfhirstoremetrics
│   │   │   │   ├── cloudhealthcaregetfhirstoremetrics_test.go
│   │   │   │   └── cloudhealthcaregetfhirstoremetrics.go
│   │   │   ├── cloudhealthcarelistdicomstores
│   │   │   │   ├── cloudhealthcarelistdicomstores_test.go
│   │   │   │   └── cloudhealthcarelistdicomstores.go
│   │   │   ├── cloudhealthcarelistfhirstores
│   │   │   │   ├── cloudhealthcarelistfhirstores_test.go
│   │   │   │   └── cloudhealthcarelistfhirstores.go
│   │   │   ├── cloudhealthcareretrieverendereddicominstance
│   │   │   │   ├── cloudhealthcareretrieverendereddicominstance_test.go
│   │   │   │   └── cloudhealthcareretrieverendereddicominstance.go
│   │   │   ├── cloudhealthcaresearchdicominstances
│   │   │   │   ├── cloudhealthcaresearchdicominstances_test.go
│   │   │   │   └── cloudhealthcaresearchdicominstances.go
│   │   │   ├── cloudhealthcaresearchdicomseries
│   │   │   │   ├── cloudhealthcaresearchdicomseries_test.go
│   │   │   │   └── cloudhealthcaresearchdicomseries.go
│   │   │   ├── cloudhealthcaresearchdicomstudies
│   │   │   │   ├── cloudhealthcaresearchdicomstudies_test.go
│   │   │   │   └── cloudhealthcaresearchdicomstudies.go
│   │   │   └── common
│   │   │       └── util.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcloneinstance
│   │   │   │   ├── cloudsqlcloneinstance_test.go
│   │   │   │   └── cloudsqlcloneinstance.go
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloudsqlpgcreateinstances
│   │   │   │   ├── cloudsqlpgcreateinstances_test.go
│   │   │   │   └── cloudsqlpgcreateinstances.go
│   │   │   └── cloudsqlpgupgradeprecheck
│   │   │       ├── cloudsqlpgupgradeprecheck_test.go
│   │   │       └── cloudsqlpgupgradeprecheck.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── elasticsearch
│   │   │   └── elasticsearchesql
│   │   │       ├── elasticsearchesql_test.go
│   │   │       └── elasticsearchesql.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookeradddashboardfilter
│   │   │   │   ├── lookeradddashboardfilter_test.go
│   │   │   │   └── lookeradddashboardfilter.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookercreateprojectfile
│   │   │   │   ├── lookercreateprojectfile_test.go
│   │   │   │   └── lookercreateprojectfile.go
│   │   │   ├── lookerdeleteprojectfile
│   │   │   │   ├── lookerdeleteprojectfile_test.go
│   │   │   │   └── lookerdeleteprojectfile.go
│   │   │   ├── lookerdevmode
│   │   │   │   ├── lookerdevmode_test.go
│   │   │   │   └── lookerdevmode.go
│   │   │   ├── lookergenerateembedurl
│   │   │   │   ├── lookergenerateembedurl_test.go
│   │   │   │   └── lookergenerateembedurl.go
│   │   │   ├── lookergetconnectiondatabases
│   │   │   │   ├── lookergetconnectiondatabases_test.go
│   │   │   │   └── lookergetconnectiondatabases.go
│   │   │   ├── lookergetconnections
│   │   │   │   ├── lookergetconnections_test.go
│   │   │   │   └── lookergetconnections.go
│   │   │   ├── lookergetconnectionschemas
│   │   │   │   ├── lookergetconnectionschemas_test.go
│   │   │   │   └── lookergetconnectionschemas.go
│   │   │   ├── lookergetconnectiontablecolumns
│   │   │   │   ├── lookergetconnectiontablecolumns_test.go
│   │   │   │   └── lookergetconnectiontablecolumns.go
│   │   │   ├── lookergetconnectiontables
│   │   │   │   ├── lookergetconnectiontables_test.go
│   │   │   │   └── lookergetconnectiontables.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookergetprojectfile
│   │   │   │   ├── lookergetprojectfile_test.go
│   │   │   │   └── lookergetprojectfile.go
│   │   │   ├── lookergetprojectfiles
│   │   │   │   ├── lookergetprojectfiles_test.go
│   │   │   │   └── lookergetprojectfiles.go
│   │   │   ├── lookergetprojects
│   │   │   │   ├── lookergetprojects_test.go
│   │   │   │   └── lookergetprojects.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   ├── lookerrundashboard
│   │   │   │   ├── lookerrundashboard_test.go
│   │   │   │   └── lookerrundashboard.go
│   │   │   ├── lookerrunlook
│   │   │   │   ├── lookerrunlook_test.go
│   │   │   │   └── lookerrunlook.go
│   │   │   └── lookerupdateprojectfile
│   │   │       ├── lookerupdateprojectfile_test.go
│   │   │       └── lookerupdateprojectfile.go
│   │   ├── mindsdb
│   │   │   ├── mindsdbexecutesql
│   │   │   │   ├── mindsdbexecutesql_test.go
│   │   │   │   └── mindsdbexecutesql.go
│   │   │   └── mindsdbsql
│   │   │       ├── mindsdbsql_test.go
│   │   │       └── mindsdbsql.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqlgetqueryplan
│   │   │   │   ├── mysqlgetqueryplan_test.go
│   │   │   │   └── mysqlgetqueryplan.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   ├── oracleexecutesql_test.go
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       ├── oraclesql_test.go
│   │   │       └── oraclesql.go
│   │   ├── postgres
│   │   │   ├── postgresdatabaseoverview
│   │   │   │   ├── postgresdatabaseoverview_test.go
│   │   │   │   └── postgresdatabaseoverview.go
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgresgetcolumncardinality
│   │   │   │   ├── postgresgetcolumncardinality_test.go
│   │   │   │   └── postgresgetcolumncardinality.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistdatabasestats
│   │   │   │   ├── postgreslistdatabasestats_test.go
│   │   │   │   └── postgreslistdatabasestats.go
│   │   │   ├── postgreslistindexes
│   │   │   │   ├── postgreslistindexes_test.go
│   │   │   │   └── postgreslistindexes.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslistlocks
│   │   │   │   ├── postgreslistlocks_test.go
│   │   │   │   └── postgreslistlocks.go
│   │   │   ├── postgreslistpgsettings
│   │   │   │   ├── postgreslistpgsettings_test.go
│   │   │   │   └── postgreslistpgsettings.go
│   │   │   ├── postgreslistpublicationtables
│   │   │   │   ├── postgreslistpublicationtables_test.go
│   │   │   │   └── postgreslistpublicationtables.go
│   │   │   ├── postgreslistquerystats
│   │   │   │   ├── postgreslistquerystats_test.go
│   │   │   │   └── postgreslistquerystats.go
│   │   │   ├── postgreslistroles
│   │   │   │   ├── postgreslistroles_test.go
│   │   │   │   └── postgreslistroles.go
│   │   │   ├── postgreslistschemas
│   │   │   │   ├── postgreslistschemas_test.go
│   │   │   │   └── postgreslistschemas.go
│   │   │   ├── postgreslistsequences
│   │   │   │   ├── postgreslistsequences_test.go
│   │   │   │   └── postgreslistsequences.go
│   │   │   ├── postgresliststoredprocedure
│   │   │   │   ├── postgresliststoredprocedure_test.go
│   │   │   │   └── postgresliststoredprocedure.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   ├── postgreslisttablespaces
│   │   │   │   ├── postgreslisttablespaces_test.go
│   │   │   │   └── postgreslisttablespaces.go
│   │   │   ├── postgreslisttablestats
│   │   │   │   ├── postgreslisttablestats_test.go
│   │   │   │   └── postgreslisttablestats.go
│   │   │   ├── postgreslisttriggers
│   │   │   │   ├── postgreslisttriggers_test.go
│   │   │   │   └── postgreslisttriggers.go
│   │   │   ├── postgreslistviews
│   │   │   │   ├── postgreslistviews_test.go
│   │   │   │   └── postgreslistviews.go
│   │   │   ├── postgreslongrunningtransactions
│   │   │   │   ├── postgreslongrunningtransactions_test.go
│   │   │   │   └── postgreslongrunningtransactions.go
│   │   │   ├── postgresreplicationstats
│   │   │   │   ├── postgresreplicationstats_test.go
│   │   │   │   └── postgresreplicationstats.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── serverlessspark
│   │   │   ├── createbatch
│   │   │   │   ├── config.go
│   │   │   │   └── tool.go
│   │   │   ├── serverlesssparkcancelbatch
│   │   │   │   ├── serverlesssparkcancelbatch_test.go
│   │   │   │   └── serverlesssparkcancelbatch.go
│   │   │   ├── serverlesssparkcreatepysparkbatch
│   │   │   │   ├── serverlesssparkcreatepysparkbatch_test.go
│   │   │   │   └── serverlesssparkcreatepysparkbatch.go
│   │   │   ├── serverlesssparkcreatesparkbatch
│   │   │   │   ├── serverlesssparkcreatesparkbatch_test.go
│   │   │   │   └── serverlesssparkcreatesparkbatch.go
│   │   │   ├── serverlesssparkgetbatch
│   │   │   │   ├── serverlesssparkgetbatch_test.go
│   │   │   │   └── serverlesssparkgetbatch.go
│   │   │   ├── serverlesssparklistbatches
│   │   │   │   ├── serverlesssparklistbatches_test.go
│   │   │   │   └── serverlesssparklistbatches.go
│   │   │   └── testutils
│   │   │       └── testutils.go
│   │   ├── singlestore
│   │   │   ├── singlestoreexecutesql
│   │   │   │   ├── singlestoreexecutesql_test.go
│   │   │   │   └── singlestoreexecutesql.go
│   │   │   └── singlestoresql
│   │   │       ├── singlestoresql_test.go
│   │   │       └── singlestoresql.go
│   │   ├── snowflake
│   │   │   ├── snowflakeexecutesql
│   │   │   │   ├── snowflakeexecutesql_test.go
│   │   │   │   └── snowflakeexecutesql.go
│   │   │   └── snowflakesql
│   │   │       ├── snowflakesql_test.go
│   │   │       └── snowflakesql.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlistgraphs
│   │   │   │   ├── spannerlistgraphs_test.go
│   │   │   │   └── spannerlistgraphs.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       ├── orderedmap
│       │   ├── orderedmap_test.go
│       │   └── orderedmap.go
│       ├── parameters
│       │   ├── common_test.go
│       │   ├── common.go
│       │   ├── parameters_test.go
│       │   └── parameters.go
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── MCP-TOOLBOX-EXTENSION.md
├── README.md
├── server.json
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudgda
    │   └── cloud_gda_integration_test.go
    ├── cloudhealthcare
    │   └── cloud_healthcare_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_clone_instance_test.go
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   ├── cloud_sql_pg_integration_test.go
    │   └── cloud_sql_pg_upgrade_precheck_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── elasticsearch
    │   └── elasticsearch_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mariadb
    │   └── mariadb_integration_test.go
    ├── mindsdb
    │   └── mindsdb_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── prompts
    │   └── custom
    │       └── prompts_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── serverlessspark
    │   └── serverless_spark_integration_test.go
    ├── singlestore
    │   └── singlestore_integration_test.go
    ├── snowflake
    │   └── snowflake_integration_test.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/internal/prebuiltconfigs/tools/alloydb-postgres-observability.yaml:
--------------------------------------------------------------------------------

```yaml
  1 | # Copyright 2025 Google LLC
  2 | #
  3 | # Licensed under the Apache License, Version 2.0 (the "License");
  4 | # you may not use this file except in compliance with the License.
  5 | # You may obtain a copy of the License at
  6 | #
  7 | #     http://www.apache.org/licenses/LICENSE-2.0
  8 | #
  9 | # Unless required by applicable law or agreed to in writing, software
 10 | # distributed under the License is distributed on an "AS IS" BASIS,
 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | # See the License for the specific language governing permissions and
 13 | # limitations under the License.
 14 | sources:
 15 |   cloud-monitoring-source:
 16 |     kind: cloud-monitoring
 17 | tools:
 18 |   get_system_metrics:
 19 |     kind: cloud-monitoring-query-prometheus
 20 |     source: cloud-monitoring-source
 21 |     description: |
 22 |       Fetches system level cloudmonitoring data (timeseries metrics) for an AlloyDB cluster, instance.
 23 |       To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
 24 | 
 25 |       Generate the PromQL `query` for AlloyDB system metrics using the provided metrics and rules. Get labels like `cluster_id` and `instance_id` from the user's intent.
 26 | 
 27 |       Defaults:
 28 |       1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.
 29 | 
 30 |       PromQL Query Examples:
 31 |       1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
 32 |       2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
 33 |       3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
 34 |       4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 35 |       5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 36 |       6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 37 |       7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 38 |       8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`
 39 | 
 40 |       Available Metrics List: metricname. description. monitored resource. labels
 41 |       1. `alloydb.googleapis.com/instance/cpu/average_utilization`: The percentage of CPU being used on an instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 42 |       2. `alloydb.googleapis.com/instance/cpu/maximum_utilization`: Maximum CPU utilization across all currently serving nodes of the instance from 0 to 100. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 43 |       3. `alloydb.googleapis.com/cluster/storage/usage`: The total AlloyDB storage in bytes across the entire cluster. `alloydb.googleapis.com/Cluster`. `cluster_id`.
 44 |       4. `alloydb.googleapis.com/instance/postgres/replication/replicas`: The number of read replicas connected to the primary instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`, `replica_instance_id`.
 45 |       5. `alloydb.googleapis.com/instance/postgres/replication/maximum_lag`: The maximum replication time lag calculated across all serving read replicas of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `replica_instance_id`.
 46 |       6. `alloydb.googleapis.com/instance/memory/min_available_memory`: The minimum available memory across all currently serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 47 |       7. `alloydb.googleapis.com/instance/postgres/instances`: The number of nodes in the instance, along with their status, which can be either up or down. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `status`.
 48 |       8. `alloydb.googleapis.com/database/postgresql/tuples`: Number of tuples (rows) by state per database in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `state`.
 49 |       9. `alloydb.googleapis.com/database/postgresql/temp_bytes_written_for_top_databases`: The total amount of data(in bytes) written to temporary files by the queries per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 50 |       10. `alloydb.googleapis.com/database/postgresql/temp_files_written_for_top_databases`: The number of temporary files used for writing data per database while performing internal algorithms like join, sort etc for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 51 |       11. `alloydb.googleapis.com/database/postgresql/inserted_tuples_count_for_top_databases`: The total number of rows inserted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 52 |       12. `alloydb.googleapis.com/database/postgresql/updated_tuples_count_for_top_databases`: The total number of rows updated per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 53 |       13. `alloydb.googleapis.com/database/postgresql/deleted_tuples_count_for_top_databases`: The total  number of rows deleted per db for top 500 dbs as a result of the queries in the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 54 |       14. `alloydb.googleapis.com/database/postgresql/backends_for_top_databases`: The current number of connections per database to the instance for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 55 |       15. `alloydb.googleapis.com/instance/postgresql/backends_by_state`: The current number of connections to the instance grouped by the state like idle, active, idle_in_transaction, idle_in_transaction_aborted, disabled, and fastpath_function_call. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `state`.
 56 |       16. `alloydb.googleapis.com/instance/postgresql/backends_for_top_applications`: The current number of connections to the AlloyDB instance, grouped by applications for top 500 applications. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `application_name`.
 57 |       17. `alloydb.googleapis.com/database/postgresql/new_connections_for_top_databases`: Total number of new connections added per database for top 500 databases to the instance. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 58 |       18. `alloydb.googleapis.com/database/postgresql/deadlock_count_for_top_databases`: Total number of deadlocks detected in the instance per database for top 500 dbs. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`.
 59 |       19. `alloydb.googleapis.com/database/postgresql/statements_executed_count`: Total count of statements executed in the instance per database per operation_type. `alloydb.googleapis.com/Database`. `cluster_id`, `instance_id`, `database`, `operation_type`.
 60 |       20. `alloydb.googleapis.com/instance/postgresql/returned_tuples_count`: Number of rows scanned while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 61 |       21. `alloydb.googleapis.com/instance/postgresql/fetched_tuples_count`: Number of rows fetched while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 62 |       22. `alloydb.googleapis.com/instance/postgresql/updated_tuples_count`: Number of rows updated while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 63 |       23. `alloydb.googleapis.com/instance/postgresql/inserted_tuples_count`: Number of rows inserted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 64 |       24. `alloydb.googleapis.com/instance/postgresql/deleted_tuples_count`: Number of rows deleted while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 65 |       25. `alloydb.googleapis.com/instance/postgresql/written_tuples_count`: Number of rows written while processing the queries in the instance since the last sample. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 66 |       26. `alloydb.googleapis.com/instance/postgresql/deadlock_count`: Number of deadlocks detected in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 67 |       27. `alloydb.googleapis.com/instance/postgresql/blks_read`: Number of blocks read by Postgres that were not in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 68 |       28. `alloydb.googleapis.com/instance/postgresql/blks_hit`: Number of times Postgres found the requested block in the buffer cache. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 69 |       29. `alloydb.googleapis.com/instance/postgresql/temp_bytes_written_count`: The total amount of data(in bytes) written to temporary files by the queries while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 70 |       30. `alloydb.googleapis.com/instance/postgresql/temp_files_written_count`: The number of temporary files used for writing data in the instance while performing internal algorithms like join, sort etc. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 71 |       31. `alloydb.googleapis.com/instance/postgresql/new_connections_count`: The number new connections added to the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 72 |       32. `alloydb.googleapis.com/instance/postgresql/wait_count`: Total number of times processes waited for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
 73 |       33. `alloydb.googleapis.com/instance/postgresql/wait_time`: Total elapsed wait time for each wait event in the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
 74 |       34. `alloydb.googleapis.com/instance/postgres/transaction_count`: The number of committed and rolled back transactions across all serving nodes of the instance. `alloydb.googleapis.com/Instance`. `cluster_id`, `instance_id`.
 75 | 
 76 |   get_query_metrics:
 77 |     kind: cloud-monitoring-query-prometheus
 78 |     source: cloud-monitoring-source
 79 |     description: |
 80 |       Fetches query level cloudmonitoring data (timeseries metrics) for queries running in an AlloyDB instance.
 81 |       To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
 82 | 
 83 |       Generate the PromQL `query` for AlloyDB query metrics using the provided metrics and rules. Get labels like `cluster_id`, `instance_id`, and `query_hash` from the user's intent. If `query_hash` is provided, use the per-query metrics.
 84 | 
 85 |       Defaults:
 86 |       1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user.
 87 | 
 88 |       PromQL Query Examples:
 89 |       1. Basic Time Series: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
 90 |       2. Top K: `topk(30, avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m]))`
 91 |       3. Mean: `avg(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="my-instance","cluster_id"="my-cluster"}[5m]))`
 92 |       4. Minimum: `min(min_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 93 |       5. Maximum: `max(max_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 94 |       6. Sum: `sum(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 95 |       7. Count streams: `count(avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance","cluster_id"="alloydb-cluster"}[5m]))`
 96 |       8. Percentile with groupby on instanceid, clusterid: `quantile by ("instance_id","cluster_id")(0.99,avg_over_time({"__name__"="alloydb.googleapis.com/instance/cpu/average_utilization","monitored_resource"="alloydb.googleapis.com/Instance","cluster_id"="my-cluster","instance_id"="my-instance"}[5m]))`
 97 | 
 98 |       Available Metrics List: metricname. description. monitored resource. labels. aggregate is the aggregated values for all query stats, Use aggregate metrics if query id is not provided. For perquery metrics do not fetch querystring unless specified by user specifically. Have the aggregation on query hash to avoid fetching the querystring. Do not use latency metrics for anything.
 99 |       1. `alloydb.googleapis.com/database/postgresql/insights/aggregate/latencies`: Aggregated query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
100 |       2. `alloydb.googleapis.com/database/postgresql/insights/aggregate/execution_time`: Accumulated aggregated query execution time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
101 |       3. `alloydb.googleapis.com/database/postgresql/insights/aggregate/io_time`: Accumulated aggregated IO time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`.
102 |       4. `alloydb.googleapis.com/database/postgresql/insights/aggregate/lock_time`: Accumulated aggregated lock wait time since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`.
103 |       5. `alloydb.googleapis.com/database/postgresql/insights/aggregate/row_count`: Aggregated number of retrieved or affected rows since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`.
104 |       6. `alloydb.googleapis.com/database/postgresql/insights/aggregate/shared_blk_access_count`: Aggregated shared blocks accessed by statement execution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`.
105 |       7. `alloydb.googleapis.com/database/postgresql/insights/perquery/latencies`: Per query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
106 |       8. `alloydb.googleapis.com/database/postgresql/insights/perquery/execution_time`: Accumulated execution times per user per database per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
107 |       9. `alloydb.googleapis.com/database/postgresql/insights/perquery/io_time`: Accumulated IO time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `io_type`, `querystring`, `query_hash`.
108 |       10. `alloydb.googleapis.com/database/postgresql/insights/perquery/lock_time`: Accumulated lock wait time since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `lock_type`, `querystring`, `query_hash`.
109 |       11. `alloydb.googleapis.com/database/postgresql/insights/perquery/row_count`: The number of retrieved or affected rows since the last sample per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `querystring`, `query_hash`.
110 |       12. `alloydb.googleapis.com/database/postgresql/insights/perquery/shared_blk_access_count`: Shared blocks accessed by statement execution per query. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `access_type`, `querystring`, `query_hash`.
111 |       13. `alloydb.googleapis.com/database/postgresql/insights/pertag/latencies`: Query latency distribution. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
112 |       14. `alloydb.googleapis.com/database/postgresql/insights/pertag/execution_time`: Accumulated execution times since the last sample. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
113 |       15. `alloydb.googleapis.com/database/postgresql/insights/pertag/io_time`: Accumulated IO time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `io_type`, `tag_hash`.
114 |       16. `alloydb.googleapis.com/database/postgresql/insights/pertag/lock_time`: Accumulated lock wait time since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `lock_type`, `tag_hash`.
115 |       17. `alloydb.googleapis.com/database/postgresql/insights/pertag/shared_blk_access_count`: Shared blocks accessed by statement execution per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `access_type`, `tag_hash`.
116 |       18. `alloydb.googleapis.com/database/postgresql/insights/pertag/row_count`: The number of retrieved or affected rows since the last sample per tag. `alloydb.googleapis.com/Database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`.
117 | 
118 | toolsets:
119 |   alloydb_postgres_cloud_monitoring_tools:
120 |     - get_system_metrics
121 |     - get_query_metrics
122 | 
```

--------------------------------------------------------------------------------
/internal/tools/bigquery/bigquerycommon/table_name_parser_test.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package bigquerycommon_test
 16 | 
 17 | import (
 18 | 	"sort"
 19 | 	"strings"
 20 | 	"testing"
 21 | 
 22 | 	"github.com/google/go-cmp/cmp"
 23 | 	"github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
 24 | )
 25 | 
 26 | func TestTableParser(t *testing.T) {
 27 | 	testCases := []struct {
 28 | 		name             string
 29 | 		sql              string
 30 | 		defaultProjectID string
 31 | 		want             []string
 32 | 		wantErr          bool
 33 | 		wantErrMsg       string
 34 | 	}{
 35 | 		{
 36 | 			name:             "single fully qualified table",
 37 | 			sql:              "SELECT * FROM `my-project.my_dataset.my_table`",
 38 | 			defaultProjectID: "default-proj",
 39 | 			want:             []string{"my-project.my_dataset.my_table"},
 40 | 			wantErr:          false,
 41 | 		},
 42 | 		{
 43 | 			name:             "multiple statements with same table",
 44 | 			sql:              "select * from proj1.data1.tbl1 limit 1; select A.b from proj1.data1.tbl1 as A limit 1;",
 45 | 			defaultProjectID: "default-proj",
 46 | 			want:             []string{"proj1.data1.tbl1"},
 47 | 			wantErr:          false,
 48 | 		},
 49 | 		{
 50 | 			name:             "multiple fully qualified tables",
 51 | 			sql:              "SELECT * FROM `proj1.data1`.`tbl1` JOIN proj2.`data2.tbl2` ON id",
 52 | 			defaultProjectID: "default-proj",
 53 | 			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
 54 | 			wantErr:          false,
 55 | 		},
 56 | 		{
 57 | 			name:             "duplicate tables",
 58 | 			sql:              "SELECT * FROM `proj1.data1.tbl1` JOIN proj1.data1.tbl1 ON id",
 59 | 			defaultProjectID: "default-proj",
 60 | 			want:             []string{"proj1.data1.tbl1"},
 61 | 			wantErr:          false,
 62 | 		},
 63 | 		{
 64 | 			name:             "partial table with default project",
 65 | 			sql:              "SELECT * FROM `my_dataset`.my_table",
 66 | 			defaultProjectID: "default-proj",
 67 | 			want:             []string{"default-proj.my_dataset.my_table"},
 68 | 			wantErr:          false,
 69 | 		},
 70 | 		{
 71 | 			name:             "partial table without default project",
 72 | 			sql:              "SELECT * FROM `my_dataset.my_table`",
 73 | 			defaultProjectID: "",
 74 | 			want:             nil,
 75 | 			wantErr:          true,
 76 | 		},
 77 | 		{
 78 | 			name:             "mixed fully qualified and partial tables",
 79 | 			sql:              "SELECT t1.*, t2.* FROM `proj1.data1.tbl1` AS t1 JOIN `data2.tbl2` AS t2 ON t1.id = t2.id",
 80 | 			defaultProjectID: "default-proj",
 81 | 			want:             []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
 82 | 			wantErr:          false,
 83 | 		},
 84 | 		{
 85 | 			name:             "no tables",
 86 | 			sql:              "SELECT 1+1",
 87 | 			defaultProjectID: "default-proj",
 88 | 			want:             []string{},
 89 | 			wantErr:          false,
 90 | 		},
 91 | 		{
 92 | 			name:             "ignore single part identifiers (like CTEs)",
 93 | 			sql:              "WITH my_cte AS (SELECT 1) SELECT * FROM `my_cte`",
 94 | 			defaultProjectID: "default-proj",
 95 | 			want:             []string{},
 96 | 			wantErr:          false,
 97 | 		},
 98 | 		{
 99 | 			name:             "complex CTE",
100 | 			sql:              "WITH cte1 AS (SELECT * FROM `real.table.one`), cte2 AS (SELECT * FROM cte1) SELECT * FROM cte2 JOIN `real.table.two` ON true",
101 | 			defaultProjectID: "default-proj",
102 | 			want:             []string{"real.table.one", "real.table.two"},
103 | 			wantErr:          false,
104 | 		},
105 | 		{
106 | 			name:             "nested subquery should be parsed",
107 | 			sql:              "SELECT * FROM (SELECT a FROM (SELECT A.b FROM `real.table.nested` AS A))",
108 | 			defaultProjectID: "default-proj",
109 | 			want:             []string{"real.table.nested"},
110 | 			wantErr:          false,
111 | 		},
112 | 		{
113 | 			name:             "from clause with unnest",
114 | 			sql:              "SELECT event.name FROM `my-project.my_dataset.my_table` AS A, UNNEST(A.events) AS event",
115 | 			defaultProjectID: "default-proj",
116 | 			want:             []string{"my-project.my_dataset.my_table"},
117 | 			wantErr:          false,
118 | 		},
119 | 		{
120 | 			name:             "ignore more than 3 parts",
121 | 			sql:              "SELECT * FROM `proj.data.tbl.col`",
122 | 			defaultProjectID: "default-proj",
123 | 			want:             []string{},
124 | 			wantErr:          false,
125 | 		},
126 | 		{
127 | 			name:             "complex query",
128 | 			sql:              "SELECT name FROM (SELECT name FROM `proj1.data1.tbl1`) UNION ALL SELECT name FROM `data2.tbl2`",
129 | 			defaultProjectID: "default-proj",
130 | 			want:             []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"},
131 | 			wantErr:          false,
132 | 		},
133 | 		{
134 | 			name:             "empty sql",
135 | 			sql:              "",
136 | 			defaultProjectID: "default-proj",
137 | 			want:             []string{},
138 | 			wantErr:          false,
139 | 		},
140 | 		{
141 | 			name:             "with comments",
142 | 			sql:              "SELECT * FROM `proj1.data1.tbl1`; -- comment `fake.table.one` \n SELECT * FROM `proj2.data2.tbl2`; # comment `fake.table.two`",
143 | 			defaultProjectID: "default-proj",
144 | 			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
145 | 			wantErr:          false,
146 | 		},
147 | 		{
148 | 			name:             "multi-statement with semicolon",
149 | 			sql:              "SELECT * FROM `proj1.data1.tbl1`; SELECT * FROM `proj2.data2.tbl2`",
150 | 			defaultProjectID: "default-proj",
151 | 			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
152 | 			wantErr:          false,
153 | 		},
154 | 		{
155 | 			name:             "simple execute immediate",
156 | 			sql:              "EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
157 | 			defaultProjectID: "default-proj",
158 | 			want:             nil,
159 | 			wantErr:          true,
160 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
161 | 		},
162 | 		{
163 | 			name:             "execute immediate with multiple spaces",
164 | 			sql:              "EXECUTE  IMMEDIATE 'SELECT 1'",
165 | 			defaultProjectID: "default-proj",
166 | 			want:             nil,
167 | 			wantErr:          true,
168 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
169 | 		},
170 | 		{
171 | 			name:             "execute immediate with newline",
172 | 			sql:              "EXECUTE\nIMMEDIATE 'SELECT 1'",
173 | 			defaultProjectID: "default-proj",
174 | 			want:             nil,
175 | 			wantErr:          true,
176 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
177 | 		},
178 | 		{
179 | 			name:             "execute immediate with comment",
180 | 			sql:              "EXECUTE -- some comment\n IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'",
181 | 			defaultProjectID: "default-proj",
182 | 			want:             nil,
183 | 			wantErr:          true,
184 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
185 | 		},
186 | 		{
187 | 			name:             "nested execute immediate",
188 | 			sql:              "EXECUTE IMMEDIATE \"EXECUTE IMMEDIATE '''SELECT * FROM `nested.exec.tbl`'''\"",
189 | 			defaultProjectID: "default-proj",
190 | 			want:             nil,
191 | 			wantErr:          true,
192 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
193 | 		},
194 | 		{
195 | 			name:             "begin execute immediate",
196 | 			sql:              "BEGIN EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'; END;",
197 | 			defaultProjectID: "default-proj",
198 | 			want:             nil,
199 | 			wantErr:          true,
200 | 			wantErrMsg:       "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place",
201 | 		},
202 | 		{
203 | 			name:             "table inside string literal should be ignored",
204 | 			sql:              "SELECT * FROM `real.table.one` WHERE name = 'select * from `fake.table.two`'",
205 | 			defaultProjectID: "default-proj",
206 | 			want:             []string{"real.table.one"},
207 | 			wantErr:          false,
208 | 		},
209 | 		{
210 | 			name:             "string with escaped single quote",
211 | 			sql:              "SELECT 'this is a string with an escaped quote \\' and a fake table `fake.table.one`' FROM `real.table.two`",
212 | 			defaultProjectID: "default-proj",
213 | 			want:             []string{"real.table.two"},
214 | 			wantErr:          false,
215 | 		},
216 | 		{
217 | 			name:             "string with escaped double quote",
218 | 			sql:              `SELECT "this is a string with an escaped quote \" and a fake table ` + "`fake.table.one`" + `" FROM ` + "`real.table.two`",
219 | 			defaultProjectID: "default-proj",
220 | 			want:             []string{"real.table.two"},
221 | 			wantErr:          false,
222 | 		},
223 | 		{
224 | 			name:             "multi-line comment",
225 | 			sql:              "/* `fake.table.1` */ SELECT * FROM `real.table.2`",
226 | 			defaultProjectID: "default-proj",
227 | 			want:             []string{"real.table.2"},
228 | 			wantErr:          false,
229 | 		},
230 | 		{
231 | 			name:             "raw string with backslash should be ignored",
232 | 			sql:              "SELECT * FROM `real.table.one` WHERE name = r'a raw string with a \\ and a fake table `fake.table.two`'",
233 | 			defaultProjectID: "default-proj",
234 | 			want:             []string{"real.table.one"},
235 | 			wantErr:          false,
236 | 		},
237 | 		{
238 | 			name:             "capital R raw string with quotes inside should be ignored",
239 | 			sql:              `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
240 | 			defaultProjectID: "default-proj",
241 | 			want:             []string{"real.table.one"},
242 | 			wantErr:          false,
243 | 		},
244 | 		{
245 | 			name:             "triple quoted raw string should be ignored",
246 | 			sql:              "SELECT * FROM `real.table.one` WHERE name = r'''a raw string with a ' and a \" and a \\ and a fake table `fake.table.two`'''",
247 | 			defaultProjectID: "default-proj",
248 | 			want:             []string{"real.table.one"},
249 | 			wantErr:          false,
250 | 		},
251 | 		{
252 | 			name:             "triple quoted capital R raw string should be ignored",
253 | 			sql:              `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`,
254 | 			defaultProjectID: "default-proj",
255 | 			want:             []string{"real.table.one"},
256 | 			wantErr:          false,
257 | 		},
258 | 		{
259 | 			name:             "unquoted fully qualified table",
260 | 			sql:              "SELECT * FROM my-project.my_dataset.my_table",
261 | 			defaultProjectID: "default-proj",
262 | 			want:             []string{"my-project.my_dataset.my_table"},
263 | 			wantErr:          false,
264 | 		},
265 | 		{
266 | 			name:             "unquoted partial table with default project",
267 | 			sql:              "SELECT * FROM my_dataset.my_table",
268 | 			defaultProjectID: "default-proj",
269 | 			want:             []string{"default-proj.my_dataset.my_table"},
270 | 			wantErr:          false,
271 | 		},
272 | 		{
273 | 			name:             "unquoted partial table without default project",
274 | 			sql:              "SELECT * FROM my_dataset.my_table",
275 | 			defaultProjectID: "",
276 | 			want:             nil,
277 | 			wantErr:          true,
278 | 		},
279 | 		{
280 | 			name:             "mixed quoting style 1",
281 | 			sql:              "SELECT * FROM `my-project`.my_dataset.my_table",
282 | 			defaultProjectID: "default-proj",
283 | 			want:             []string{"my-project.my_dataset.my_table"},
284 | 			wantErr:          false,
285 | 		},
286 | 		{
287 | 			name:             "mixed quoting style 2",
288 | 			sql:              "SELECT * FROM `my-project`.`my_dataset`.my_table",
289 | 			defaultProjectID: "default-proj",
290 | 			want:             []string{"my-project.my_dataset.my_table"},
291 | 			wantErr:          false,
292 | 		},
293 | 		{
294 | 			name:             "mixed quoting style 3",
295 | 			sql:              "SELECT * FROM `my-project`.`my_dataset`.`my_table`",
296 | 			defaultProjectID: "default-proj",
297 | 			want:             []string{"my-project.my_dataset.my_table"},
298 | 			wantErr:          false,
299 | 		},
300 | 		{
301 | 			name:             "mixed quoted and unquoted tables",
302 | 			sql:              "SELECT * FROM `proj1.data1.tbl1` JOIN proj2.data2.tbl2 ON id",
303 | 			defaultProjectID: "default-proj",
304 | 			want:             []string{"proj1.data1.tbl1", "proj2.data2.tbl2"},
305 | 			wantErr:          false,
306 | 		},
307 | 		{
308 | 			name:             "create table statement",
309 | 			sql:              "CREATE TABLE `my-project.my_dataset.my_table` (x INT64)",
310 | 			defaultProjectID: "default-proj",
311 | 			want:             []string{"my-project.my_dataset.my_table"},
312 | 			wantErr:          false,
313 | 		},
314 | 		{
315 | 			name:             "insert into statement",
316 | 			sql:              "INSERT INTO `my-project.my_dataset.my_table` (x) VALUES (1)",
317 | 			defaultProjectID: "default-proj",
318 | 			want:             []string{"my-project.my_dataset.my_table"},
319 | 			wantErr:          false,
320 | 		},
321 | 		{
322 | 			name:             "update statement",
323 | 			sql:              "UPDATE `my-project.my_dataset.my_table` SET x = 2 WHERE true",
324 | 			defaultProjectID: "default-proj",
325 | 			want:             []string{"my-project.my_dataset.my_table"},
326 | 			wantErr:          false,
327 | 		},
328 | 		{
329 | 			name:             "delete from statement",
330 | 			sql:              "DELETE FROM `my-project.my_dataset.my_table` WHERE true",
331 | 			defaultProjectID: "default-proj",
332 | 			want:             []string{"my-project.my_dataset.my_table"},
333 | 			wantErr:          false,
334 | 		},
335 | 		{
336 | 			name:             "merge into statement",
337 | 			sql:              "MERGE `proj.data.target` T USING `proj.data.source` S ON T.id = S.id WHEN NOT MATCHED THEN INSERT ROW",
338 | 			defaultProjectID: "default-proj",
339 | 			want:             []string{"proj.data.source", "proj.data.target"},
340 | 			wantErr:          false,
341 | 		},
342 | 		{
343 | 			name:             "create schema statement",
344 | 			sql:              "CREATE SCHEMA `my-project.my_dataset`",
345 | 			defaultProjectID: "default-proj",
346 | 			want:             nil,
347 | 			wantErr:          true,
348 | 			wantErrMsg:       "dataset-level operations like 'CREATE SCHEMA' are not allowed",
349 | 		},
350 | 		{
351 | 			name:             "create dataset statement",
352 | 			sql:              "CREATE DATASET `my-project.my_dataset`",
353 | 			defaultProjectID: "default-proj",
354 | 			want:             nil,
355 | 			wantErr:          true,
356 | 			wantErrMsg:       "dataset-level operations like 'CREATE DATASET' are not allowed",
357 | 		},
358 | 		{
359 | 			name:             "drop schema statement",
360 | 			sql:              "DROP SCHEMA `my-project.my_dataset`",
361 | 			defaultProjectID: "default-proj",
362 | 			want:             nil,
363 | 			wantErr:          true,
364 | 			wantErrMsg:       "dataset-level operations like 'DROP SCHEMA' are not allowed",
365 | 		},
366 | 		{
367 | 			name:             "drop dataset statement",
368 | 			sql:              "DROP DATASET `my-project.my_dataset`",
369 | 			defaultProjectID: "default-proj",
370 | 			want:             nil,
371 | 			wantErr:          true,
372 | 			wantErrMsg:       "dataset-level operations like 'DROP DATASET' are not allowed",
373 | 		},
374 | 		{
375 | 			name:             "alter schema statement",
376 | 			sql:              "ALTER SCHEMA my_dataset SET OPTIONS(description='new description')",
377 | 			defaultProjectID: "default-proj",
378 | 			want:             nil,
379 | 			wantErr:          true,
380 | 			wantErrMsg:       "dataset-level operations like 'ALTER SCHEMA' are not allowed",
381 | 		},
382 | 		{
383 | 			name:             "alter dataset statement",
384 | 			sql:              "ALTER DATASET my_dataset SET OPTIONS(description='new description')",
385 | 			defaultProjectID: "default-proj",
386 | 			want:             nil,
387 | 			wantErr:          true,
388 | 			wantErrMsg:       "dataset-level operations like 'ALTER DATASET' are not allowed",
389 | 		},
390 | 		{
391 | 			name:             "begin...end block",
392 | 			sql:              "BEGIN CREATE TABLE `proj.data.tbl1` (x INT64); INSERT `proj.data.tbl2` (y) VALUES (1); END;",
393 | 			defaultProjectID: "default-proj",
394 | 			want:             []string{"proj.data.tbl1", "proj.data.tbl2"},
395 | 			wantErr:          false,
396 | 		},
397 | 		{
398 | 			name: "complex begin...end block with comments and different quoting",
399 | 			sql: `
400 | 				BEGIN
401 | 					-- Create a new table
402 | 					CREATE TABLE proj.data.tbl1 (x INT64);
403 | 					/* Insert some data from another table */
404 | 					INSERT INTO ` + "`proj.data.tbl2`" + ` (y) SELECT y FROM proj.data.source;
405 | 				END;`,
406 | 			defaultProjectID: "default-proj",
407 | 			want:             []string{"proj.data.source", "proj.data.tbl1", "proj.data.tbl2"},
408 | 			wantErr:          false,
409 | 		},
410 | 		{
411 | 			name:             "call fully qualified procedure",
412 | 			sql:              "CALL my-project.my_dataset.my_procedure()",
413 | 			defaultProjectID: "default-proj",
414 | 			want:             nil,
415 | 			wantErr:          true,
416 | 			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
417 | 		},
418 | 		{
419 | 			name:             "call partially qualified procedure",
420 | 			sql:              "CALL my_dataset.my_procedure()",
421 | 			defaultProjectID: "default-proj",
422 | 			want:             nil,
423 | 			wantErr:          true,
424 | 			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
425 | 		},
426 | 		{
427 | 			name:             "call procedure in begin...end block",
428 | 			sql:              "BEGIN CALL proj.data.proc1(); SELECT * FROM proj.data.tbl1; END;",
429 | 			defaultProjectID: "default-proj",
430 | 			want:             nil,
431 | 			wantErr:          true,
432 | 			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
433 | 		},
434 | 		{
435 | 			name:             "call procedure with newline",
436 | 			sql:              "CALL\nmy_dataset.my_procedure()",
437 | 			defaultProjectID: "default-proj",
438 | 			want:             nil,
439 | 			wantErr:          true,
440 | 			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
441 | 		},
442 | 		{
443 | 			name:             "call procedure without default project should fail",
444 | 			sql:              "CALL my_dataset.my_procedure()",
445 | 			defaultProjectID: "",
446 | 			want:             nil,
447 | 			wantErr:          true,
448 | 			wantErrMsg:       "CALL is not allowed when dataset restrictions are in place",
449 | 		},
450 | 		{
451 | 			name:             "create procedure statement",
452 | 			sql:              "CREATE PROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
453 | 			defaultProjectID: "default-proj",
454 | 			want:             nil,
455 | 			wantErr:          true,
456 | 			wantErrMsg:       "unanalyzable statements like 'CREATE PROCEDURE' are not allowed",
457 | 		},
458 | 		{
459 | 			name:             "create or replace procedure statement",
460 | 			sql:              "CREATE\n OR \nREPLACE \nPROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;",
461 | 			defaultProjectID: "default-proj",
462 | 			want:             nil,
463 | 			wantErr:          true,
464 | 			wantErrMsg:       "unanalyzable statements like 'CREATE OR REPLACE PROCEDURE' are not allowed",
465 | 		},
466 | 		{
467 | 			name:             "create function statement",
468 | 			sql:              "CREATE FUNCTION my_dataset.my_function() RETURNS INT64 AS (1);",
469 | 			defaultProjectID: "default-proj",
470 | 			want:             nil,
471 | 			wantErr:          true,
472 | 			wantErrMsg:       "unanalyzable statements like 'CREATE FUNCTION' are not allowed",
473 | 		},
474 | 	}
475 | 
476 | 	for _, tc := range testCases {
477 | 		t.Run(tc.name, func(t *testing.T) {
478 | 			got, err := bigquerycommon.TableParser(tc.sql, tc.defaultProjectID)
479 | 			if (err != nil) != tc.wantErr {
480 | 				t.Errorf("TableParser() error = %v, wantErr %v", err, tc.wantErr)
481 | 				return
482 | 			}
483 | 			if tc.wantErr && tc.wantErrMsg != "" {
484 | 				if err == nil || !strings.Contains(err.Error(), tc.wantErrMsg) {
485 | 					t.Errorf("TableParser() error = %v, want err containing %q", err, tc.wantErrMsg)
486 | 				}
487 | 			}
488 | 			// Sort slices to ensure comparison is order-independent.
489 | 			sort.Strings(got)
490 | 			sort.Strings(tc.want)
491 | 			if diff := cmp.Diff(tc.want, got); diff != "" {
492 | 				t.Errorf("TableParser() mismatch (-want +got):\n%s", diff)
493 | 			}
494 | 		})
495 | 	}
496 | }
497 | 
```

--------------------------------------------------------------------------------
/docs/en/resources/sources/dataplex.md:
--------------------------------------------------------------------------------

```markdown
  1 | ---
  2 | title: "Dataplex"
  3 | type: docs
  4 | weight: 1
  5 | description: >
  6 |   Dataplex Universal Catalog is a unified, intelligent governance solution for data and AI assets in Google Cloud. Dataplex Universal Catalog powers AI, analytics, and business intelligence at scale.
  7 | ---
  8 | 
  9 | # Dataplex Source
 10 | 
 11 | [Dataplex][dataplex-docs] Universal Catalog is a unified, intelligent governance
 12 | solution for data and AI assets in Google Cloud. Dataplex Universal Catalog
 13 | powers AI, analytics, and business intelligence at scale.
 14 | 
 15 | At the heart of these governance capabilities is a catalog that contains a
 16 | centralized inventory of the data assets in your organization. Dataplex
 17 | Universal Catalog holds business, technical, and runtime metadata for all of
 18 | your data. It helps you discover relationships and semantics in the metadata by
 19 | applying artificial intelligence and machine learning.
 20 | 
 21 | [dataplex-docs]: https://cloud.google.com/dataplex/docs
 22 | 
 23 | ## Example
 24 | 
 25 | ```yaml
 26 | sources:
 27 |   my-dataplex-source:
 28 |     kind: "dataplex"
 29 |     project: "my-project-id"
 30 | ```
 31 | 
 32 | ## Sample System Prompt
 33 | 
 34 | You can use the following system prompt as "Custom Instructions" in your client
 35 | application.
 36 | 
 37 | ```
 38 | # Objective
 39 | Your primary objective is to help discover, organize and manage metadata related to data assets. 
 40 | 
 41 | # Tone and Style
 42 | 1. Adopt the persona of a senior subject matter expert
 43 | 2. Your communication style must be:
 44 |     1. Concise: Always favor brevity.
 45 |     2. Direct: Avoid greetings (e.g., "Hi there!", "Certainly!"). Get straight to the point.  
 46 |         Example (Incorrect): Hi there! I see that you are looking for...  
 47 |         Example (Correct): This problem likely stems from...
 48 | 3. Do not reiterate or summarize the question in the answer.
 49 | 4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
 50 |     Example (Correct): "The entry describes..."  
 51 |     Example (Correct): "According to catalog,..."  
 52 |     Example (Correct): "Based on the metadata,..."  
 53 |     Example (Correct): "Based on the search results,..."  
 54 | 5. Do not make assumptions
 55 | 
 56 | # Data Model
 57 | ## Entries
 58 | Entry represents a specific data asset. Entry acts as a metadata record for something that is managed by Catalog, such as:
 59 | 
 60 | - A BigQuery table or dataset
 61 | - A Cloud Storage bucket or folder
 62 | - An on-premises SQL table
 63 | 
 64 | ## Aspects
 65 | While the Entry itself is a container, the rich descriptive information about the asset (e.g., schema, data types, business descriptions, classifications) is stored in associated components called Aspects. Aspects are created based on pre-defined blueprints known as Aspect Types.
 66 | 
 67 | ## Aspect Types
 68 | Aspect Type is a reusable template that defines the schema for a set of metadata fields. Think of an Aspect Type as a structure for the kind of metadata that is organized in the catalog within the Entry. 
 69 | 
 70 | Examples:
 71 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub-exchange
 72 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub
 73 | - projects/dataplex-types/locations/global/aspectTypes/analytics-hub-listing
 74 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-connection
 75 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-data-policy
 76 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-dataset
 77 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-model
 78 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-policy
 79 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-routine
 80 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-row-access-policy
 81 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-table
 82 | - projects/dataplex-types/locations/global/aspectTypes/bigquery-view
 83 | - projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-instance
 84 | - projects/dataplex-types/locations/global/aspectTypes/cloud-bigtable-table
 85 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-database
 86 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-instance
 87 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-table
 88 | - projects/dataplex-types/locations/global/aspectTypes/cloud-spanner-view
 89 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-database
 90 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-instance
 91 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-schema
 92 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-table
 93 | - projects/dataplex-types/locations/global/aspectTypes/cloudsql-view
 94 | - projects/dataplex-types/locations/global/aspectTypes/contacts
 95 | - projects/dataplex-types/locations/global/aspectTypes/dataform-code-asset
 96 | - projects/dataplex-types/locations/global/aspectTypes/dataform-repository
 97 | - projects/dataplex-types/locations/global/aspectTypes/dataform-workspace
 98 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-database
 99 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-service
100 | - projects/dataplex-types/locations/global/aspectTypes/dataproc-metastore-table
101 | - projects/dataplex-types/locations/global/aspectTypes/data-product
102 | - projects/dataplex-types/locations/global/aspectTypes/data-quality-scorecard
103 | - projects/dataplex-types/locations/global/aspectTypes/external-connection
104 | - projects/dataplex-types/locations/global/aspectTypes/overview
105 | - projects/dataplex-types/locations/global/aspectTypes/pubsub-topic
106 | - projects/dataplex-types/locations/global/aspectTypes/schema
107 | - projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-job-result
108 | - projects/dataplex-types/locations/global/aspectTypes/sensitive-data-protection-profile
109 | - projects/dataplex-types/locations/global/aspectTypes/sql-access
110 | - projects/dataplex-types/locations/global/aspectTypes/storage-bucket
111 | - projects/dataplex-types/locations/global/aspectTypes/storage-folder
112 | - projects/dataplex-types/locations/global/aspectTypes/storage
113 | - projects/dataplex-types/locations/global/aspectTypes/usage
114 | 
115 | ## Entry Types
116 | Every Entry must conform to an Entry Type. The Entry Type acts as a template, defining the structure, required aspects, and constraints for Entries of that type. 
117 | 
118 | Examples:
119 | - projects/dataplex-types/locations/global/entryTypes/analytics-hub-exchange
120 | - projects/dataplex-types/locations/global/entryTypes/analytics-hub-listing
121 | - projects/dataplex-types/locations/global/entryTypes/bigquery-connection
122 | - projects/dataplex-types/locations/global/entryTypes/bigquery-data-policy
123 | - projects/dataplex-types/locations/global/entryTypes/bigquery-dataset
124 | - projects/dataplex-types/locations/global/entryTypes/bigquery-model
125 | - projects/dataplex-types/locations/global/entryTypes/bigquery-routine
126 | - projects/dataplex-types/locations/global/entryTypes/bigquery-row-access-policy
127 | - projects/dataplex-types/locations/global/entryTypes/bigquery-table
128 | - projects/dataplex-types/locations/global/entryTypes/bigquery-view
129 | - projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-instance
130 | - projects/dataplex-types/locations/global/entryTypes/cloud-bigtable-table
131 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-database
132 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-instance
133 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-table
134 | - projects/dataplex-types/locations/global/entryTypes/cloud-spanner-view
135 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-database
136 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-instance
137 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-table
138 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-mysql-view
139 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-database
140 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-instance
141 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-schema
142 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-table
143 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-postgresql-view
144 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-database
145 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-instance
146 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-schema
147 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-table
148 | - projects/dataplex-types/locations/global/entryTypes/cloudsql-sqlserver-view
149 | - projects/dataplex-types/locations/global/entryTypes/dataform-code-asset
150 | - projects/dataplex-types/locations/global/entryTypes/dataform-repository
151 | - projects/dataplex-types/locations/global/entryTypes/dataform-workspace
152 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-database
153 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-service
154 | - projects/dataplex-types/locations/global/entryTypes/dataproc-metastore-table
155 | - projects/dataplex-types/locations/global/entryTypes/pubsub-topic
156 | - projects/dataplex-types/locations/global/entryTypes/storage-bucket
157 | - projects/dataplex-types/locations/global/entryTypes/storage-folder
158 | - projects/dataplex-types/locations/global/entryTypes/vertexai-dataset
159 | - projects/dataplex-types/locations/global/entryTypes/vertexai-feature-group
160 | - projects/dataplex-types/locations/global/entryTypes/vertexai-feature-online-store
161 | 
162 | ## Entry Groups
163 | Entries are organized within Entry Groups, which are logical groupings of Entries. An Entry Group acts as a namespace for its Entries.
164 | 
165 | ## Entry Links
166 | Entries can be linked together using EntryLinks to represent relationships between data assets (e.g. foreign keys).
167 | 
168 | # Tool instructions
169 | ## Tool: dataplex_search_entries
170 | ## General
171 | - Do not try to search within search results on your own.
172 | - Do not fetch multiple pages of results unless explicitly asked.
173 | 
174 | ## Search syntax
175 | 
176 | ### Simple search
177 | In its simplest form, a search query consists of a single predicate. Such a predicate can match several pieces of metadata:
178 | 
179 | - A substring of a name, display name, or description of a resource
180 | - A substring of the type of a resource
181 | - A substring of a column name (or nested column name) in the schema of a resource
182 | - A substring of a project ID
183 | - A string from an overview description
184 | 
185 | For example, the predicate foo matches the following resources:
186 | - Resource with the name foo.bar
187 | - Resource with the display name Foo Bar
188 | - Resource with the description This is the foo script
189 | - Resource with the exact type foo
190 | - Column foo_bar in the schema of a resource
191 | - Nested column foo_bar in the schema of a resource
192 | - Project prod-foo-bar
193 | - Resource with an overview containing the word foo
194 | 
195 | 
196 | ### Qualified predicates
197 | You can qualify a predicate by prefixing it with a key that restricts the matching to a specific piece of metadata:
198 | - An equal sign (=) restricts the search to an exact match.
199 | - A colon (:) after the key matches the predicate to either a substring or a token within the value in the search results.
200 | 
201 | Tokenization splits the stream of text into a series of tokens, with each token usually corresponding to a single word. For example:
202 | - name:foo selects resources with names that contain the foo substring, like foo1 and barfoo.
203 | - description:foo selects resources with the foo token in the description, like bar and foo.
204 | - location=foo matches resources in a specified location with foo as the location name.
205 | 
206 | The predicate keys type, system, location, and orgid support only the exact match (=) qualifier, not the substring qualifier (:). For example, type=foo or orgid=number.
207 | 
208 | Search syntax supports the following qualifiers:
209 | - "name:x" - Matches x as a substring of the resource ID.
210 | - "displayname:x" - Match x as a substring of the resource display name.
211 | - "column:x" - Matches x as a substring of the column name (or nested column name) in the schema of the resource.
212 | - "description:x" - Matches x as a token in the resource description.
213 | - "label:bar" - Matches BigQuery resources that have a label (with some value) and the label key has bar as a substring.
214 | - "label=bar" - Matches BigQuery resources that have a label (with some value) and the label key equals bar as a string.
215 | - "label:bar:x" - Matches x as a substring in the value of a label with a key bar attached to a BigQuery resource.
216 | - "label=foo:bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
217 | - "label.foo=bar" - Matches BigQuery resources where the key equals foo and the key value equals bar.
218 | - "label.foo" - Matches BigQuery resources that have a label whose key equals foo as a string.
219 | - "type=TYPE" - Matches resources of a specific entry type or its type alias.
220 | - "projectid:bar" - Matches resources within Google Cloud projects that match bar as a substring in the ID.
221 | - "parent:x" - Matches x as a substring of the hierarchical path of a resource. It supports same syntax as `name` predicate.
222 | - "orgid=number" - Matches resources within a Google Cloud organization with the exact ID value of the number.
223 | - "system=SYSTEM" - Matches resources from a specified system. For example, system=bigquery matches BigQuery resources.
224 | - "location=LOCATION" - Matches resources in a specified location with an exact name. For example, location=us-central1 matches assets hosted in Iowa. BigQuery Omni assets support this qualifier by using the BigQuery Omni location name. For example, location=aws-us-east-1 matches BigQuery Omni assets in Northern Virginia.
225 | - "createtime" -
226 | Finds resources that were created within, before, or after a given date or time. For example "createtime:2019-01-01" matches resources created on 2019-01-01. 
227 | - "updatetime" - Finds resources that were updated within, before, or after a given date or time. For example "updatetime>2019-01-01" matches resources updated after 2019-01-01.
228 | 
229 | ### Aspect Search
230 | To search for entries based on their attached aspects, use the following query syntax.
231 | 
232 | `has:x`
233 | Matches `x` as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
234 | 
235 | `has=x`
236 | Matches `x` as the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
237 | 
238 | `xOPERATORvalue`
239 | Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID.FIELD_NAME`
240 | 
241 | The list of supported operators depends on the type of field in the aspect, as follows:
242 | * **String**: `=` (exact match)
243 | * **All number types**: `=`, `:`, `<`, `>`, `<=`, `>=`, `=>`, `=<`
244 | * **Enum**: `=` (exact match only)
245 | * **Datetime**: same as for numbers, but the values to compare are treated as datetimes instead of numbers
246 | * **Boolean**: `=`
247 | 
248 | Only top-level fields of the aspect are searchable.
249 | 
250 | * Syntax for system aspect types:
251 |     * `ASPECT_TYPE_ID.FIELD_NAME`
252 |     * `dataplex-types.ASPECT_TYPE_ID.FIELD_NAME`
253 |     * `dataplex-types.LOCATION.ASPECT_TYPE_ID.FIELD_NAME`
254 | For example, the following queries match entries where the value of the `type` field in the `bigquery-dataset` aspect is `default`:
255 |     * `bigquery-dataset.type=default`
256 |     * `dataplex-types.bigquery-dataset.type=default`
257 |     * `dataplex-types.global.bigquery-dataset.type=default`
258 | * Syntax for custom aspect types:
259 |     * If the aspect is created in the global region: `PROJECT_ID.ASPECT_TYPE_ID.FIELD_NAME`
260 |     * If the aspect is created in a specific region: `PROJECT_ID.REGION.ASPECT_TYPE_ID.FIELD_NAME`
261 | For example, the following queries match entries where the value of the `is-enrolled` field in the `employee-info` aspect is `true`.
262 |     * `example-project.us-central1.employee-info.is-enrolled=true`
263 |     * `example-project.employee-info.is-enrolled=true`
264 | 
265 | Example:-
266 | You can use following filters
267 | - dataplex-types.global.bigquery-table.type={BIGLAKE_TABLE, BIGLAKE_OBJECT_TABLE, EXTERNAL_TABLE, TABLE}
268 | - dataplex-types.global.storage.type={STRUCTURED, UNSTRUCTURED}
269 | 
270 | ### Logical operators
271 | A query can consist of several predicates with logical operators. If you don't specify an operator, logical AND is implied. For example, foo bar returns resources that match both predicate foo and predicate bar.
272 | Logical AND and logical OR are supported. For example, foo OR bar.
273 | 
274 | You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
275 | Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
276 | 
277 | ### Abbreviated syntax
278 | 
279 | An abbreviated search syntax is also available, using `|` (vertical bar) for `OR` operators and `,` (comma) for `AND` operators.
280 | 
281 | For example, to search for entries inside one of many projects using the `OR` operator, you can use the following abbreviated syntax:
282 | 
283 | `projectid:(id1|id2|id3|id4)`
284 | 
285 | The same search without using abbreviated syntax looks like the following:
286 | 
287 | `projectid:id1 OR projectid:id2 OR projectid:id3 OR projectid:id4`
288 | 
289 | To search for entries with matching column names, use the following:
290 | 
291 | * **AND**: `column:(name1,name2,name3)`
292 | * **OR**: `column:(name1|name2|name3)`
293 | 
294 | This abbreviated syntax works for the qualified predicates except for `label` in keyword search.
295 | 
296 | ### Request
297 | 1. Always try to rewrite the prompt using search syntax.
298 | 
299 | ### Response
300 | 1. If there are multiple search results found
301 |     1. Present the list of search results
302 |     2. Format the output in nested ordered list, for example:  
303 |     Given
304 |     ```
305 |     {
306 |         results: [
307 |             {
308 |                 name: "projects/test-project/locations/us/entryGroups/@bigquery-aws-us-east-1/entries/users"
309 |                 entrySource: {
310 |                 displayName: "Users"
311 |                 description: "Table contains list of users."
312 |                 location: "aws-us-east-1"
313 |                 system: "BigQuery"
314 |                 }
315 |             },
316 |             {
317 |                 name: "projects/another_project/locations/us-central1/entryGroups/@bigquery/entries/top_customers"
318 |                 entrySource: {
319 |                 displayName: "Top customers",
320 |                 description: "Table contains list of best customers."
321 |                 location: "us-central1"
322 |                 system: "BigQuery"
323 |                 }
324 |             },
325 |         ]
326 |     }
327 |     ```
328 |     Return output formatted as markdown nested list:
329 |     ```
330 |     * Users:
331 |         - projectId: test_project
332 |         - location: aws-us-east-1
333 |         - description: Table contains list of users.
334 |     * Top customers:
335 |         - projectId: another_project
336 |         - location: us-central1
337 |         - description: Table contains list of best customers.
338 |     ```
339 |     3. Ask to select one of the presented search results
340 | 2. If there is only one search result found
341 |     1. Present the search result immediately.
342 | 3. If there are no search result found
343 |     1. Explain that no search result was found
344 |     2. Suggest to provide a more specific search query.
345 | 
346 | ## Tool: dataplex_lookup_entry
347 | ### Request
348 | 1. Always try to limit the size of the response by specifying `aspect_types` parameter. Make sure to include to select view=CUSTOM when using aspect_types parameter. If you do not know the name of the aspect type, use the `dataplex_search_aspect_types` tool.
349 | 2. If you do not know the name of the entry, use `dataplex_search_entries` tool
350 | ### Response
351 | 1. Unless asked for a specific aspect, respond with all aspects attached to the entry.
352 | ```
353 | 
354 | ## Reference
355 | 
356 | | **field** | **type** | **required** | **description**                                                                  |
357 | |-----------|:--------:|:------------:|----------------------------------------------------------------------------------|
358 | | kind      |  string  |     true     | Must be "dataplex".                                                              |
359 | | project   |  string  |     true     | ID of the GCP project used for quota and billing purposes (e.g. "my-project-id").|
360 | 
```

--------------------------------------------------------------------------------
/docs/en/getting-started/introduction/_index.md:
--------------------------------------------------------------------------------

```markdown
  1 | ---
  2 | title: "Introduction"
  3 | type: docs
  4 | weight: 1
  5 | description: >
  6 |   An introduction to MCP Toolbox for Databases.
  7 | ---
  8 | 
  9 | MCP Toolbox for Databases is an open source MCP server for databases. It enables
 10 | you to develop tools easier, faster, and more securely by handling the complexities
 11 | such as connection pooling, authentication, and more.
 12 | 
 13 | {{< notice note >}}
 14 | This solution was originally named “Gen AI Toolbox for
 15 | Databases” as its initial development predated MCP, but was renamed to align
 16 | with recently added MCP compatibility.
 17 | {{< /notice >}}
 18 | 
 19 | ## Why Toolbox?
 20 | 
 21 | Toolbox helps you build Gen AI tools that let your agents access data in your
 22 | database. Toolbox provides:
 23 | 
 24 | - **Simplified development**: Integrate tools to your agent in less than 10
 25 |   lines of code, reuse tools between multiple agents or frameworks, and deploy
 26 |   new versions of tools more easily.
 27 | - **Better performance**: Best practices such as connection pooling,
 28 |   authentication, and more.
 29 | - **Enhanced security**: Integrated auth for more secure access to your data
 30 | - **End-to-end observability**: Out of the box metrics and tracing with built-in
 31 |   support for OpenTelemetry.
 32 | 
 33 | **⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
 34 | 
 35 | Stop context-switching and let your AI assistant become a true co-developer. By
 36 | [connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
 37 | delegate complex and time-consuming database tasks, allowing you to build faster
 38 | and focus on what matters. This isn't just about code completion; it's about
 39 | giving your AI the context it needs to handle the entire development lifecycle.
 40 | 
 41 | Here’s how it will save you time:
 42 | 
 43 | - **Query in Plain English**: Interact with your data using natural language
 44 |   right from your IDE. Ask complex questions like, *"How many orders were
 45 |   delivered in 2024, and what items were in them?"* without writing any SQL.
 46 | - **Automate Database Management**: Simply describe your data needs, and let the
 47 |   AI assistant manage your database for you. It can handle generating queries,
 48 |   creating tables, adding indexes, and more.
 49 | - **Generate Context-Aware Code**: Empower your AI assistant to generate
 50 |   application code and tests with a deep understanding of your real-time
 51 |   database schema.  This accelerates the development cycle by ensuring the
 52 |   generated code is directly usable.
 53 | - **Slash Development Overhead**: Radically reduce the time spent on manual
 54 |   setup and boilerplate. MCP Toolbox helps streamline lengthy database
 55 |   configurations, repetitive code, and error-prone schema migrations.
 56 | 
 57 | Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
 58 | 
 59 | [connect-ide]: ../../how-to/connect-ide/
 60 | 
 61 | ## General Architecture
 62 | 
 63 | Toolbox sits between your application's orchestration framework and your
 64 | database, providing a control plane that is used to modify, distribute, or
 65 | invoke tools. It simplifies the management of your tools by providing you with a
 66 | centralized location to store and update tools, allowing you to share tools
 67 | between agents and applications and update those tools without necessarily
 68 | redeploying your application.
 69 | 
 70 | ![architecture](./architecture.png)
 71 | 
 72 | ## Getting Started
 73 | 
 74 | ### (Non-production) Running Toolbox
 75 | 
 76 | You can run Toolbox directly with a [configuration file](../configure.md):
 77 | 
 78 | ```sh
 79 | npx @toolbox-sdk/server --tools-file tools.yaml
 80 | ```
 81 | 
 82 | This runs the latest version of the toolbox server with your configuration file.
 83 | 
 84 | {{< notice note >}}
 85 | This method should only be used for non-production use cases such as
 86 | experimentation. For any production use-cases, please consider [Installing the
 87 | server](#installing-the-server) and then [running it](#running-the-server).
 88 | {{< /notice >}}
 89 | 
 90 | ### Installing the server
 91 | 
 92 | For the latest version, check the [releases page][releases] and use the
 93 | following instructions for your OS and CPU architecture.
 94 | 
 95 | [releases]: https://github.com/googleapis/genai-toolbox/releases
 96 | 
 97 | <!-- {x-release-please-start-version} -->
 98 | {{< tabpane text=true >}}
 99 | {{% tab header="Binary" lang="en" %}}
100 | {{< tabpane text=true >}}
101 | {{% tab header="Linux (AMD64)" lang="en" %}}
102 | To install Toolbox as a binary on Linux (AMD64):
103 | 
104 | ```sh
105 | # see releases page for other versions
106 | export VERSION=0.25.0
107 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
108 | chmod +x toolbox
109 | ```
110 | 
111 | {{% /tab %}}
112 | {{% tab header="macOS (Apple Silicon)" lang="en" %}}
113 | To install Toolbox as a binary on macOS (Apple Silicon):
114 | 
115 | ```sh
116 | # see releases page for other versions
117 | export VERSION=0.25.0
118 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
119 | chmod +x toolbox
120 | ```
121 | 
122 | {{% /tab %}}
123 | {{% tab header="macOS (Intel)" lang="en" %}}
124 | To install Toolbox as a binary on macOS (Intel):
125 | 
126 | ```sh
127 | # see releases page for other versions
128 | export VERSION=0.25.0
129 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
130 | chmod +x toolbox
131 | ```
132 | 
133 | {{% /tab %}}
134 | {{% tab header="Windows (Command Prompt)" lang="en" %}}
135 | To install Toolbox as a binary on Windows (Command Prompt):
136 | 
137 | ```cmd
138 | :: see releases page for other versions
139 | set VERSION=0.25.0
140 | curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
141 | ```
142 | 
143 | {{% /tab %}}
144 | {{% tab header="Windows (PowerShell)" lang="en" %}}
145 | To install Toolbox as a binary on Windows (PowerShell):
146 | 
147 | ```powershell
148 | # see releases page for other versions
149 | $VERSION = "0.25.0"
150 | curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
151 | ```
152 | 
153 | {{% /tab %}}
154 | {{< /tabpane >}}
155 | {{% /tab %}}
156 | {{% tab header="Container image" lang="en" %}}
157 | You can also install Toolbox as a container:
158 | 
159 | ```sh
160 | # see releases page for other versions
161 | export VERSION=0.25.0
162 | docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
163 | ```
164 | 
165 | {{% /tab %}}
166 | {{% tab header="Homebrew" lang="en" %}}
167 | To install Toolbox using Homebrew on macOS or Linux:
168 | 
169 | ```sh
170 | brew install mcp-toolbox
171 | ```
172 | 
173 | {{% /tab %}}
174 | {{% tab header="Compile from source" lang="en" %}}
175 | 
176 | To install from source, ensure you have the latest version of
177 | [Go installed](https://go.dev/doc/install), and then run the following command:
178 | 
179 | ```sh
180 | go install github.com/googleapis/[email protected]
181 | ```
182 | 
183 | {{% /tab %}}
184 | {{< /tabpane >}}
185 | <!-- {x-release-please-end} -->
186 | 
187 | ### Running the server
188 | 
189 | [Configure](../configure.md) a `tools.yaml` to define your tools, and then
190 | execute `toolbox` to start the server:
191 | 
192 | ```sh
193 | ./toolbox --tools-file "tools.yaml"
194 | ```
195 | 
196 | {{< notice note >}}
197 | Toolbox enables dynamic reloading by default. To disable, use the
198 | `--disable-reload` flag.
199 | {{< /notice >}}
200 | 
201 | #### Launching Toolbox UI
202 | 
203 | To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
204 | tools and toolsets with features such as authorized parameters. To learn more,
205 | visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
206 | 
207 | ```sh
208 | ./toolbox --ui
209 | ```
210 | 
211 | #### Homebrew Users
212 | 
213 | If you installed Toolbox using Homebrew, the `toolbox` binary is available in
214 | your system path. You can start the server with the same command:
215 | 
216 | ```sh
217 | toolbox --tools-file "tools.yaml"
218 | ```
219 | 
220 | You can use `toolbox help` for a full list of flags! To stop the server, send a
221 | terminate signal (`ctrl+c` on most platforms).
222 | 
223 | For more detailed documentation on deploying to different environments, check
224 | out the resources in the [How-to section](../../how-to/)
225 | 
226 | ### Integrating your application
227 | 
228 | Once your server is up and running, you can load the tools into your
229 | application. See below the list of Client SDKs for using various frameworks:
230 | 
231 | #### Python
232 | 
233 | {{< tabpane text=true persist=header >}}
234 | {{% tab header="Core" lang="en" %}}
235 | 
236 | Once you've installed the [Toolbox Core
237 | SDK](https://pypi.org/project/toolbox-core/), you can load
238 | tools:
239 | 
240 | {{< highlight python >}}
241 | from toolbox_core import ToolboxClient
242 | 
243 | # update the url to point to your server
244 | 
245 | async with ToolboxClient("http://127.0.0.1:5000") as client:
246 | 
247 |     # these tools can be passed to your application!
248 |     tools = await client.load_toolset("toolset_name")
249 | {{< /highlight >}}
250 | 
251 | For more detailed instructions on using the Toolbox Core SDK, see the
252 | [project's
253 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md).
254 | 
255 | {{% /tab %}}
256 | {{% tab header="LangChain" lang="en" %}}
257 | 
258 | Once you've installed the [Toolbox LangChain
259 | SDK](https://pypi.org/project/toolbox-langchain/), you can load
260 | tools:
261 | 
262 | {{< highlight python >}}
263 | from toolbox_langchain import ToolboxClient
264 | 
265 | # update the url to point to your server
266 | 
267 | async with ToolboxClient("http://127.0.0.1:5000") as client:
268 | 
269 |     # these tools can be passed to your application!
270 |     tools = client.load_toolset()
271 | {{< /highlight >}}
272 | 
273 | For more detailed instructions on using the Toolbox LangChain SDK, see the
274 | [project's
275 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md).
276 | 
277 | {{% /tab %}}
278 | {{% tab header="Llamaindex" lang="en" %}}
279 | 
280 | Once you've installed the [Toolbox Llamaindex
281 | SDK](https://github.com/googleapis/genai-toolbox-llamaindex-python), you can load
282 | tools:
283 | 
284 | {{< highlight python >}}
285 | from toolbox_llamaindex import ToolboxClient
286 | 
287 | # update the url to point to your server
288 | 
289 | async with ToolboxClient("http://127.0.0.1:5000") as client:
290 | 
291 | # these tools can be passed to your application
292 | 
293 |   tools = client.load_toolset()
294 | {{< /highlight >}}
295 | 
296 | For more detailed instructions on using the Toolbox Llamaindex SDK, see the
297 | [project's
298 | README](https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md).
299 | 
300 | {{% /tab %}}
301 | {{< /tabpane >}}
302 | 
303 | #### Javascript/Typescript
304 | 
305 | Once you've installed the [Toolbox Core
306 | SDK](https://www.npmjs.com/package/@toolbox-sdk/core), you can load
307 | tools:
308 | 
309 | {{< tabpane text=true persist=header >}}
310 | {{% tab header="Core" lang="en" %}}
311 | 
312 | {{< highlight javascript >}}
313 | import { ToolboxClient } from '@toolbox-sdk/core';
314 | 
315 | // update the url to point to your server
316 | const URL = 'http://127.0.0.1:5000';
317 | let client = new ToolboxClient(URL);
318 | 
319 | // these tools can be passed to your application!
320 | const toolboxTools = await client.loadToolset('toolsetName');
321 | {{< /highlight >}}
322 | 
323 | For more detailed instructions on using the Toolbox Core SDK, see the
324 | [project's
325 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
326 | 
327 | {{% /tab %}}
328 | {{% tab header="LangChain/Langraph" lang="en" %}}
329 | 
330 | {{< highlight javascript >}}
331 | import { ToolboxClient } from '@toolbox-sdk/core';
332 | 
333 | // update the url to point to your server
334 | const URL = 'http://127.0.0.1:5000';
335 | let client = new ToolboxClient(URL);
336 | 
337 | // these tools can be passed to your application!
338 | const toolboxTools = await client.loadToolset('toolsetName');
339 | 
340 | // Define the basics of the tool: name, description, schema and core logic
341 | const getTool = (toolboxTool) => tool(currTool, {
342 |     name: toolboxTool.getName(),
343 |     description: toolboxTool.getDescription(),
344 |     schema: toolboxTool.getParamSchema()
345 | });
346 | 
347 | // Use these tools in your Langchain/Langraph applications
348 | const tools = toolboxTools.map(getTool);
349 | {{< /highlight >}}
350 | 
351 | For more detailed instructions on using the Toolbox Core SDK, see the
352 | [project's
353 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
354 | 
355 | {{% /tab %}}
356 | {{% tab header="Genkit" lang="en" %}}
357 | 
358 | {{< highlight javascript >}}
359 | import { ToolboxClient } from '@toolbox-sdk/core';
360 | import { genkit } from 'genkit';
361 | 
362 | // Initialise genkit
363 | const ai = genkit({
364 |     plugins: [
365 |         googleAI({
366 |             apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
367 |         })
368 |     ],
369 |     model: googleAI.model('gemini-2.0-flash'),
370 | });
371 | 
372 | // update the url to point to your server
373 | const URL = 'http://127.0.0.1:5000';
374 | let client = new ToolboxClient(URL);
375 | 
376 | // these tools can be passed to your application!
377 | const toolboxTools = await client.loadToolset('toolsetName');
378 | 
379 | // Define the basics of the tool: name, description, schema and core logic
380 | const getTool = (toolboxTool) => ai.defineTool({
381 |     name: toolboxTool.getName(),
382 |     description: toolboxTool.getDescription(),
383 |     schema: toolboxTool.getParamSchema()
384 | }, toolboxTool)
385 | 
386 | // Use these tools in your Genkit applications
387 | const tools = toolboxTools.map(getTool);
388 | {{< /highlight >}}
389 | 
390 | For more detailed instructions on using the Toolbox Core SDK, see the
391 | [project's
392 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
393 | 
394 | {{% /tab %}}
395 | {{% tab header="LlamaIndex" lang="en" %}}
396 | 
397 | {{< highlight javascript >}}
398 | import { ToolboxClient } from '@toolbox-sdk/core';
399 | import { tool } from "llamaindex";
400 | 
401 | // update the url to point to your server
402 | const URL = 'http://127.0.0.1:5000';
403 | let client = new ToolboxClient(URL);
404 | 
405 | // these tools can be passed to your application!
406 | const toolboxTools = await client.loadToolset('toolsetName');
407 | 
408 | // Define the basics of the tool: name, description, schema and core logic
409 | const getTool = (toolboxTool) => tool({
410 |     name: toolboxTool.getName(),
411 |     description: toolboxTool.getDescription(),
412 |     parameters: toolboxTool.getParamSchema(),
413 |     execute: toolboxTool
414 | });;
415 | 
416 | // Use these tools in your LlamaIndex applications
417 | const tools = toolboxTools.map(getTool);
418 | 
419 | {{< /highlight >}}
420 | 
421 | For more detailed instructions on using the Toolbox Core SDK, see the
422 | [project's
423 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
424 | 
425 | {{% /tab %}}
426 | {{% tab header="ADK TS" lang="en" %}}
427 | 
428 | {{< highlight javascript >}}
429 | import { ToolboxClient } from '@toolbox-sdk/adk';
430 | 
431 | // Replace with the actual URL where your Toolbox service is running
432 | const URL = 'http://127.0.0.1:5000';
433 | 
434 | let client = new ToolboxClient(URL);
435 | const tools = await client.loadToolset();
436 | 
437 | // Use the client and tools as per requirement
438 | 
439 | {{< /highlight >}}
440 | 
441 | For detailed samples on using the Toolbox JS SDK with ADK JS, see the [project's
442 | README.](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main/packages/toolbox-adk/README.md)
443 | 
444 | {{% /tab %}}
445 | {{< /tabpane >}}
446 | 
447 | 
448 | #### Go
449 | 
450 | Once you've installed the [Toolbox Go
451 | SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core), you can load
452 | tools:
453 | 
454 | {{< tabpane text=true persist=header >}}
455 | {{% tab header="Core" lang="en" %}}
456 | 
457 | {{< highlight go >}}
458 | package main
459 | 
460 | import (
461 | 	"context"
462 | 	"log"
463 | 
464 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
465 | )
466 | 
467 | func main() {
468 | 	// update the url to point to your server
469 | 	URL := "http://127.0.0.1:5000"
470 | 	ctx := context.Background()
471 | 
472 | 	client, err := core.NewToolboxClient(URL)
473 | 	if err != nil {
474 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
475 | 	}
476 | 
477 | 	// Framework agnostic tools
478 | 	tools, err := client.LoadToolset("toolsetName", ctx)
479 | 	if err != nil {
480 | 		log.Fatalf("Failed to load tools: %v", err)
481 | 	}
482 | }
483 | {{< /highlight >}}
484 | 
485 | {{% /tab %}}
486 | {{% tab header="LangChain Go" lang="en" %}}
487 | 
488 | {{< highlight go >}}
489 | package main
490 | 
491 | import (
492 | 	"context"
493 | 	"encoding/json"
494 | 	"log"
495 | 
496 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
497 | 	"github.com/tmc/langchaingo/llms"
498 | )
499 | 
500 | func main() {
501 | 	// Make sure to add the error checks
502 | 	// update the url to point to your server
503 | 	URL := "http://127.0.0.1:5000"
504 | 	ctx := context.Background()
505 | 
506 | 	client, err := core.NewToolboxClient(URL)
507 | 	if err != nil {
508 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
509 | 	}
510 | 
511 | 	// Framework agnostic tool
512 | 	tool, err := client.LoadTool("toolName", ctx)
513 | 	if err != nil {
514 | 		log.Fatalf("Failed to load tools: %v", err)
515 | 	}
516 | 
517 | 	// Fetch the tool's input schema
518 | 	inputschema, err := tool.InputSchema()
519 | 	if err != nil {
520 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
521 | 	}
522 | 
523 | 	var paramsSchema map[string]any
524 | 	_ = json.Unmarshal(inputschema, &paramsSchema)
525 | 
526 | 	// Use this tool with LangChainGo
527 | 	langChainTool := llms.Tool{
528 | 		Type: "function",
529 | 		Function: &llms.FunctionDefinition{
530 | 			Name:        tool.Name(),
531 | 			Description: tool.Description(),
532 | 			Parameters:  paramsSchema,
533 | 		},
534 | 	}
535 | }
536 | {{< /highlight >}}
537 | For end-to-end samples on using the Toolbox Go SDK with LangChain Go, see the [project's
538 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
539 | 
540 | {{% /tab %}}
541 | {{% tab header="Genkit Go" lang="en" %}}
542 | 
543 | {{< highlight go >}}
544 | package main
545 | import (
546 | 	"context"
547 | 	"encoding/json"
548 | 	"log"
549 | 
550 | 	"github.com/firebase/genkit/go/ai"
551 | 	"github.com/firebase/genkit/go/genkit"
552 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
553 | 	"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
554 | 	"github.com/invopop/jsonschema"
555 | )
556 | 
557 | func main() {
558 | 	// Make sure to add the error checks
559 | 	// Update the url to point to your server
560 | 	URL := "http://127.0.0.1:5000"
561 | 	ctx := context.Background()
562 | 	g, err := genkit.Init(ctx)
563 | 
564 | 	client, err := core.NewToolboxClient(URL)
565 | 	if err != nil {
566 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
567 | 	}
568 | 
569 | 	// Framework agnostic tool
570 | 	tool, err := client.LoadTool("toolName", ctx)
571 | 	if err != nil {
572 | 		log.Fatalf("Failed to load tools: %v", err)
573 | 	}
574 | 
575 | 	// Convert the tool using the tbgenkit package
576 |  	// Use this tool with Genkit Go
577 | 	genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
578 | 	if err != nil {
579 | 		log.Fatalf("Failed to convert tool: %v\n", err)
580 | 	}
581 | }
582 | {{< /highlight >}}
583 | For end-to-end samples on using the Toolbox Go SDK with Genkit Go, see the [project's
584 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/tbgenkit/samples)
585 | 
586 | {{% /tab %}}
587 | {{% tab header="Go GenAI" lang="en" %}}
588 | 
589 | {{< highlight go >}}
590 | package main
591 | 
592 | import (
593 | 	"context"
594 | 	"encoding/json"
595 | 	"log"
596 | 
597 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
598 | 	"google.golang.org/genai"
599 | )
600 | 
601 | func main() {
602 | 	// Make sure to add the error checks
603 | 	// Update the url to point to your server
604 | 	URL := "http://127.0.0.1:5000"
605 | 	ctx := context.Background()
606 | 
607 | 	client, err := core.NewToolboxClient(URL)
608 | 	if err != nil {
609 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
610 | 	}
611 | 
612 | 	// Framework agnostic tool
613 | 	tool, err := client.LoadTool("toolName", ctx)
614 | 	if err != nil {
615 | 		log.Fatalf("Failed to load tools: %v", err)
616 | 	}
617 | 
618 | 	// Fetch the tool's input schema
619 | 	inputschema, err := tool.InputSchema()
620 | 	if err != nil {
621 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
622 | 	}
623 | 
624 | 	var schema *genai.Schema
625 | 	_ = json.Unmarshal(inputschema, &schema)
626 | 
627 | 	funcDeclaration := &genai.FunctionDeclaration{
628 | 		Name:        tool.Name(),
629 | 		Description: tool.Description(),
630 | 		Parameters:  schema,
631 | 	}
632 | 
633 | 	// Use this tool with Go GenAI
634 | 	genAITool := &genai.Tool{
635 | 		FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
636 | 	}
637 | }
638 | {{< /highlight >}}
639 | For end-to-end samples on using the Toolbox Go SDK with Go GenAI, see the [project's
640 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
641 | 
642 | {{% /tab %}}
643 | 
644 | {{% tab header="OpenAI Go" lang="en" %}}
645 | 
646 | {{< highlight go >}}
647 | package main
648 | 
649 | import (
650 | 	"context"
651 | 	"encoding/json"
652 | 	"log"
653 | 
654 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
655 | 	openai "github.com/openai/openai-go"
656 | )
657 | 
658 | func main() {
659 | 	// Make sure to add the error checks
660 | 	// Update the url to point to your server
661 | 	URL := "http://127.0.0.1:5000"
662 | 	ctx := context.Background()
663 | 
664 | 	client, err := core.NewToolboxClient(URL)
665 | 	if err != nil {
666 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
667 | 	}
668 | 
669 | 	// Framework agnostic tool
670 | 	tool, err := client.LoadTool("toolName", ctx)
671 | 	if err != nil {
672 | 		log.Fatalf("Failed to load tools: %v", err)
673 | 	}
674 | 
675 | 	// Fetch the tool's input schema
676 | 	inputschema, err := tool.InputSchema()
677 | 	if err != nil {
678 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
679 | 	}
680 | 
681 | 	var paramsSchema openai.FunctionParameters
682 | 	_ = json.Unmarshal(inputschema, &paramsSchema)
683 | 
684 | 	// Use this tool with OpenAI Go
685 | 	openAITool := openai.ChatCompletionToolParam{
686 | 		Function: openai.FunctionDefinitionParam{
687 | 			Name:        tool.Name(),
688 | 			Description: openai.String(tool.Description()),
689 | 			Parameters:  paramsSchema,
690 | 		},
691 | 	}
692 | }
693 | {{< /highlight >}}
694 | For end-to-end samples on using the Toolbox Go SDK with OpenAI Go, see the [project's
695 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
696 | 
697 | {{% /tab %}}
698 | 
699 | {{% tab header="ADK Go" lang="en" %}}
700 | 
701 | {{< highlight go >}}
702 | package main
703 | 
704 | import (
705 |   	"context"
706 |   	"fmt"
707 |   	"github.com/googleapis/mcp-toolbox-sdk-go/tbadk"
708 | )
709 | 
710 | func main() {
711 |   	// Make sure to add the error checks
712 | 	// Update the url to point to your server
713 | 	URL := "http://127.0.0.1:5000"
714 | 	ctx := context.Background()
715 |   	client, err := tbadk.NewToolboxClient(URL)
716 |   	if err != nil {
717 | 		return fmt.Sprintln("Could not start Toolbox Client", err)
718 |   	}
719 | 
720 |   	// Use this tool with ADK Go
721 |   	tool, err := client.LoadTool("toolName", ctx)
722 |   	if err != nil {
723 | 		return fmt.Sprintln("Could not load Toolbox Tool", err)
724 |   	}
725 | }
726 | 
727 | {{< /highlight >}}
728 | 
729 | For end-to-end samples on using the Toolbox Go SDK with ADK Go, see the [project's
730 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/tbadk/samples)
731 | 
732 | {{% /tab %}}
733 | {{< /tabpane >}}
734 | 
735 | For more detailed instructions on using the Toolbox Go SDK, see the
736 | [project's
737 | README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
738 | 
```
Page 56/76FirstPrevNextLast