#
tokens: 47940/50000 6/784 files (page 31/45)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 31 of 45. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .ci
│   ├── continuous.release.cloudbuild.yaml
│   ├── generate_release_table.sh
│   ├── integration.cloudbuild.yaml
│   ├── quickstart_test
│   │   ├── go.integration.cloudbuild.yaml
│   │   ├── js.integration.cloudbuild.yaml
│   │   ├── py.integration.cloudbuild.yaml
│   │   ├── run_go_tests.sh
│   │   ├── run_js_tests.sh
│   │   ├── run_py_tests.sh
│   │   └── setup_hotels_sample.sql
│   ├── test_with_coverage.sh
│   └── versioned.release.cloudbuild.yaml
├── .github
│   ├── auto-label.yaml
│   ├── blunderbuss.yml
│   ├── CODEOWNERS
│   ├── header-checker-lint.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── question.yml
│   ├── label-sync.yml
│   ├── labels.yaml
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-please.yml
│   ├── renovate.json5
│   ├── sync-repo-settings.yaml
│   └── workflows
│       ├── cloud_build_failure_reporter.yml
│       ├── deploy_dev_docs.yaml
│       ├── deploy_previous_version_docs.yaml
│       ├── deploy_versioned_docs.yaml
│       ├── docs_deploy.yaml
│       ├── docs_preview_clean.yaml
│       ├── docs_preview_deploy.yaml
│       ├── lint.yaml
│       ├── schedule_reporter.yml
│       ├── sync-labels.yaml
│       └── tests.yaml
├── .gitignore
├── .gitmodules
├── .golangci.yaml
├── .hugo
│   ├── archetypes
│   │   └── default.md
│   ├── assets
│   │   ├── icons
│   │   │   └── logo.svg
│   │   └── scss
│   │       ├── _styles_project.scss
│   │       └── _variables_project.scss
│   ├── go.mod
│   ├── go.sum
│   ├── hugo.toml
│   ├── layouts
│   │   ├── _default
│   │   │   └── home.releases.releases
│   │   ├── index.llms-full.txt
│   │   ├── index.llms.txt
│   │   ├── partials
│   │   │   ├── hooks
│   │   │   │   └── head-end.html
│   │   │   ├── navbar-version-selector.html
│   │   │   ├── page-meta-links.html
│   │   │   └── td
│   │   │       └── render-heading.html
│   │   ├── robot.txt
│   │   └── shortcodes
│   │       ├── include.html
│   │       ├── ipynb.html
│   │       └── regionInclude.html
│   ├── package-lock.json
│   ├── package.json
│   └── static
│       ├── favicons
│       │   ├── android-chrome-192x192.png
│       │   ├── android-chrome-512x512.png
│       │   ├── apple-touch-icon.png
│       │   ├── favicon-16x16.png
│       │   ├── favicon-32x32.png
│       │   └── favicon.ico
│       └── js
│           └── w3.js
├── CHANGELOG.md
├── cmd
│   ├── options_test.go
│   ├── options.go
│   ├── root_test.go
│   ├── root.go
│   └── version.txt
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER.md
├── Dockerfile
├── docs
│   └── en
│       ├── _index.md
│       ├── about
│       │   ├── _index.md
│       │   └── faq.md
│       ├── concepts
│       │   ├── _index.md
│       │   └── telemetry
│       │       ├── index.md
│       │       ├── telemetry_flow.png
│       │       └── telemetry_traces.png
│       ├── getting-started
│       │   ├── _index.md
│       │   ├── colab_quickstart.ipynb
│       │   ├── configure.md
│       │   ├── introduction
│       │   │   ├── _index.md
│       │   │   └── architecture.png
│       │   ├── local_quickstart_go.md
│       │   ├── local_quickstart_js.md
│       │   ├── local_quickstart.md
│       │   ├── mcp_quickstart
│       │   │   ├── _index.md
│       │   │   ├── inspector_tools.png
│       │   │   └── inspector.png
│       │   └── quickstart
│       │       ├── go
│       │       │   ├── genAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── genkit
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── langchain
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   ├── openAI
│       │       │   │   ├── go.mod
│       │       │   │   ├── go.sum
│       │       │   │   └── quickstart.go
│       │       │   └── quickstart_test.go
│       │       ├── golden.txt
│       │       ├── js
│       │       │   ├── genAI
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── genkit
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── langchain
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   ├── llamaindex
│       │       │   │   ├── package-lock.json
│       │       │   │   ├── package.json
│       │       │   │   └── quickstart.js
│       │       │   └── quickstart.test.js
│       │       ├── python
│       │       │   ├── __init__.py
│       │       │   ├── adk
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── core
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── langchain
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   ├── llamaindex
│       │       │   │   ├── quickstart.py
│       │       │   │   └── requirements.txt
│       │       │   └── quickstart_test.py
│       │       └── shared
│       │           ├── cloud_setup.md
│       │           ├── configure_toolbox.md
│       │           └── database_setup.md
│       ├── how-to
│       │   ├── _index.md
│       │   ├── connect_via_geminicli.md
│       │   ├── connect_via_mcp.md
│       │   ├── connect-ide
│       │   │   ├── _index.md
│       │   │   ├── alloydb_pg_admin_mcp.md
│       │   │   ├── alloydb_pg_mcp.md
│       │   │   ├── bigquery_mcp.md
│       │   │   ├── cloud_sql_mssql_admin_mcp.md
│       │   │   ├── cloud_sql_mssql_mcp.md
│       │   │   ├── cloud_sql_mysql_admin_mcp.md
│       │   │   ├── cloud_sql_mysql_mcp.md
│       │   │   ├── cloud_sql_pg_admin_mcp.md
│       │   │   ├── cloud_sql_pg_mcp.md
│       │   │   ├── firestore_mcp.md
│       │   │   ├── looker_mcp.md
│       │   │   ├── mssql_mcp.md
│       │   │   ├── mysql_mcp.md
│       │   │   ├── neo4j_mcp.md
│       │   │   ├── postgres_mcp.md
│       │   │   ├── spanner_mcp.md
│       │   │   └── sqlite_mcp.md
│       │   ├── deploy_docker.md
│       │   ├── deploy_gke.md
│       │   ├── deploy_toolbox.md
│       │   ├── export_telemetry.md
│       │   └── toolbox-ui
│       │       ├── edit-headers.gif
│       │       ├── edit-headers.png
│       │       ├── index.md
│       │       ├── optional-param-checked.png
│       │       ├── optional-param-unchecked.png
│       │       ├── run-tool.gif
│       │       ├── tools.png
│       │       └── toolsets.png
│       ├── reference
│       │   ├── _index.md
│       │   ├── cli.md
│       │   └── prebuilt-tools.md
│       ├── resources
│       │   ├── _index.md
│       │   ├── authServices
│       │   │   ├── _index.md
│       │   │   └── google.md
│       │   ├── sources
│       │   │   ├── _index.md
│       │   │   ├── alloydb-admin.md
│       │   │   ├── alloydb-pg.md
│       │   │   ├── bigquery.md
│       │   │   ├── bigtable.md
│       │   │   ├── cassandra.md
│       │   │   ├── clickhouse.md
│       │   │   ├── cloud-monitoring.md
│       │   │   ├── cloud-sql-admin.md
│       │   │   ├── cloud-sql-mssql.md
│       │   │   ├── cloud-sql-mysql.md
│       │   │   ├── cloud-sql-pg.md
│       │   │   ├── couchbase.md
│       │   │   ├── dataplex.md
│       │   │   ├── dgraph.md
│       │   │   ├── firebird.md
│       │   │   ├── firestore.md
│       │   │   ├── http.md
│       │   │   ├── looker.md
│       │   │   ├── mongodb.md
│       │   │   ├── mssql.md
│       │   │   ├── mysql.md
│       │   │   ├── neo4j.md
│       │   │   ├── oceanbase.md
│       │   │   ├── oracle.md
│       │   │   ├── postgres.md
│       │   │   ├── redis.md
│       │   │   ├── spanner.md
│       │   │   ├── sqlite.md
│       │   │   ├── tidb.md
│       │   │   ├── trino.md
│       │   │   ├── valkey.md
│       │   │   └── yugabytedb.md
│       │   └── tools
│       │       ├── _index.md
│       │       ├── alloydb
│       │       │   ├── _index.md
│       │       │   ├── alloydb-create-cluster.md
│       │       │   ├── alloydb-create-instance.md
│       │       │   ├── alloydb-create-user.md
│       │       │   ├── alloydb-get-cluster.md
│       │       │   ├── alloydb-get-instance.md
│       │       │   ├── alloydb-get-user.md
│       │       │   ├── alloydb-list-clusters.md
│       │       │   ├── alloydb-list-instances.md
│       │       │   ├── alloydb-list-users.md
│       │       │   └── alloydb-wait-for-operation.md
│       │       ├── alloydbainl
│       │       │   ├── _index.md
│       │       │   └── alloydb-ai-nl.md
│       │       ├── bigquery
│       │       │   ├── _index.md
│       │       │   ├── bigquery-analyze-contribution.md
│       │       │   ├── bigquery-conversational-analytics.md
│       │       │   ├── bigquery-execute-sql.md
│       │       │   ├── bigquery-forecast.md
│       │       │   ├── bigquery-get-dataset-info.md
│       │       │   ├── bigquery-get-table-info.md
│       │       │   ├── bigquery-list-dataset-ids.md
│       │       │   ├── bigquery-list-table-ids.md
│       │       │   ├── bigquery-search-catalog.md
│       │       │   └── bigquery-sql.md
│       │       ├── bigtable
│       │       │   ├── _index.md
│       │       │   └── bigtable-sql.md
│       │       ├── cassandra
│       │       │   ├── _index.md
│       │       │   └── cassandra-cql.md
│       │       ├── clickhouse
│       │       │   ├── _index.md
│       │       │   ├── clickhouse-execute-sql.md
│       │       │   ├── clickhouse-list-databases.md
│       │       │   ├── clickhouse-list-tables.md
│       │       │   └── clickhouse-sql.md
│       │       ├── cloudmonitoring
│       │       │   ├── _index.md
│       │       │   └── cloud-monitoring-query-prometheus.md
│       │       ├── cloudsql
│       │       │   ├── _index.md
│       │       │   ├── cloudsqlcreatedatabase.md
│       │       │   ├── cloudsqlcreateusers.md
│       │       │   ├── cloudsqlgetinstances.md
│       │       │   ├── cloudsqllistdatabases.md
│       │       │   ├── cloudsqllistinstances.md
│       │       │   ├── cloudsqlmssqlcreateinstance.md
│       │       │   ├── cloudsqlmysqlcreateinstance.md
│       │       │   ├── cloudsqlpgcreateinstances.md
│       │       │   └── cloudsqlwaitforoperation.md
│       │       ├── couchbase
│       │       │   ├── _index.md
│       │       │   └── couchbase-sql.md
│       │       ├── dataform
│       │       │   ├── _index.md
│       │       │   └── dataform-compile-local.md
│       │       ├── dataplex
│       │       │   ├── _index.md
│       │       │   ├── dataplex-lookup-entry.md
│       │       │   ├── dataplex-search-aspect-types.md
│       │       │   └── dataplex-search-entries.md
│       │       ├── dgraph
│       │       │   ├── _index.md
│       │       │   └── dgraph-dql.md
│       │       ├── firebird
│       │       │   ├── _index.md
│       │       │   ├── firebird-execute-sql.md
│       │       │   └── firebird-sql.md
│       │       ├── firestore
│       │       │   ├── _index.md
│       │       │   ├── firestore-add-documents.md
│       │       │   ├── firestore-delete-documents.md
│       │       │   ├── firestore-get-documents.md
│       │       │   ├── firestore-get-rules.md
│       │       │   ├── firestore-list-collections.md
│       │       │   ├── firestore-query-collection.md
│       │       │   ├── firestore-query.md
│       │       │   ├── firestore-update-document.md
│       │       │   └── firestore-validate-rules.md
│       │       ├── http
│       │       │   ├── _index.md
│       │       │   └── http.md
│       │       ├── looker
│       │       │   ├── _index.md
│       │       │   ├── looker-add-dashboard-element.md
│       │       │   ├── looker-conversational-analytics.md
│       │       │   ├── looker-get-dashboards.md
│       │       │   ├── looker-get-dimensions.md
│       │       │   ├── looker-get-explores.md
│       │       │   ├── looker-get-filters.md
│       │       │   ├── looker-get-looks.md
│       │       │   ├── looker-get-measures.md
│       │       │   ├── looker-get-models.md
│       │       │   ├── looker-get-parameters.md
│       │       │   ├── looker-health-analyze.md
│       │       │   ├── looker-health-pulse.md
│       │       │   ├── looker-health-vacuum.md
│       │       │   ├── looker-make-dashboard.md
│       │       │   ├── looker-make-look.md
│       │       │   ├── looker-query-sql.md
│       │       │   ├── looker-query-url.md
│       │       │   ├── looker-query.md
│       │       │   └── looker-run-look.md
│       │       ├── mongodb
│       │       │   ├── _index.md
│       │       │   ├── mongodb-aggregate.md
│       │       │   ├── mongodb-delete-many.md
│       │       │   ├── mongodb-delete-one.md
│       │       │   ├── mongodb-find-one.md
│       │       │   ├── mongodb-find.md
│       │       │   ├── mongodb-insert-many.md
│       │       │   ├── mongodb-insert-one.md
│       │       │   ├── mongodb-update-many.md
│       │       │   └── mongodb-update-one.md
│       │       ├── mssql
│       │       │   ├── _index.md
│       │       │   ├── mssql-execute-sql.md
│       │       │   ├── mssql-list-tables.md
│       │       │   └── mssql-sql.md
│       │       ├── mysql
│       │       │   ├── _index.md
│       │       │   ├── mysql-execute-sql.md
│       │       │   ├── mysql-list-active-queries.md
│       │       │   ├── mysql-list-table-fragmentation.md
│       │       │   ├── mysql-list-tables-missing-unique-indexes.md
│       │       │   ├── mysql-list-tables.md
│       │       │   └── mysql-sql.md
│       │       ├── neo4j
│       │       │   ├── _index.md
│       │       │   ├── neo4j-cypher.md
│       │       │   ├── neo4j-execute-cypher.md
│       │       │   └── neo4j-schema.md
│       │       ├── oceanbase
│       │       │   ├── _index.md
│       │       │   ├── oceanbase-execute-sql.md
│       │       │   └── oceanbase-sql.md
│       │       ├── oracle
│       │       │   ├── _index.md
│       │       │   ├── oracle-execute-sql.md
│       │       │   └── oracle-sql.md
│       │       ├── postgres
│       │       │   ├── _index.md
│       │       │   ├── postgres-execute-sql.md
│       │       │   ├── postgres-list-active-queries.md
│       │       │   ├── postgres-list-available-extensions.md
│       │       │   ├── postgres-list-installed-extensions.md
│       │       │   ├── postgres-list-tables.md
│       │       │   └── postgres-sql.md
│       │       ├── redis
│       │       │   ├── _index.md
│       │       │   └── redis.md
│       │       ├── spanner
│       │       │   ├── _index.md
│       │       │   ├── spanner-execute-sql.md
│       │       │   ├── spanner-list-tables.md
│       │       │   └── spanner-sql.md
│       │       ├── sqlite
│       │       │   ├── _index.md
│       │       │   ├── sqlite-execute-sql.md
│       │       │   └── sqlite-sql.md
│       │       ├── tidb
│       │       │   ├── _index.md
│       │       │   ├── tidb-execute-sql.md
│       │       │   └── tidb-sql.md
│       │       ├── trino
│       │       │   ├── _index.md
│       │       │   ├── trino-execute-sql.md
│       │       │   └── trino-sql.md
│       │       ├── utility
│       │       │   ├── _index.md
│       │       │   └── wait.md
│       │       ├── valkey
│       │       │   ├── _index.md
│       │       │   └── valkey.md
│       │       └── yuagbytedb
│       │           ├── _index.md
│       │           └── yugabytedb-sql.md
│       ├── samples
│       │   ├── _index.md
│       │   ├── alloydb
│       │   │   ├── _index.md
│       │   │   ├── ai-nl
│       │   │   │   ├── alloydb_ai_nl.ipynb
│       │   │   │   └── index.md
│       │   │   └── mcp_quickstart.md
│       │   ├── bigquery
│       │   │   ├── _index.md
│       │   │   ├── colab_quickstart_bigquery.ipynb
│       │   │   ├── local_quickstart.md
│       │   │   └── mcp_quickstart
│       │   │       ├── _index.md
│       │   │       ├── inspector_tools.png
│       │   │       └── inspector.png
│       │   └── looker
│       │       ├── _index.md
│       │       ├── looker_gemini_oauth
│       │       │   ├── _index.md
│       │       │   ├── authenticated.png
│       │       │   ├── authorize.png
│       │       │   └── registration.png
│       │       ├── looker_gemini.md
│       │       └── looker_mcp_inspector
│       │           ├── _index.md
│       │           ├── inspector_tools.png
│       │           └── inspector.png
│       └── sdks
│           ├── _index.md
│           ├── go-sdk.md
│           ├── js-sdk.md
│           └── python-sdk.md
├── go.mod
├── go.sum
├── internal
│   ├── auth
│   │   ├── auth.go
│   │   └── google
│   │       └── google.go
│   ├── log
│   │   ├── handler.go
│   │   ├── log_test.go
│   │   ├── log.go
│   │   └── logger.go
│   ├── prebuiltconfigs
│   │   ├── prebuiltconfigs_test.go
│   │   ├── prebuiltconfigs.go
│   │   └── tools
│   │       ├── alloydb-postgres-admin.yaml
│   │       ├── alloydb-postgres-observability.yaml
│   │       ├── alloydb-postgres.yaml
│   │       ├── bigquery.yaml
│   │       ├── clickhouse.yaml
│   │       ├── cloud-sql-mssql-admin.yaml
│   │       ├── cloud-sql-mssql-observability.yaml
│   │       ├── cloud-sql-mssql.yaml
│   │       ├── cloud-sql-mysql-admin.yaml
│   │       ├── cloud-sql-mysql-observability.yaml
│   │       ├── cloud-sql-mysql.yaml
│   │       ├── cloud-sql-postgres-admin.yaml
│   │       ├── cloud-sql-postgres-observability.yaml
│   │       ├── cloud-sql-postgres.yaml
│   │       ├── dataplex.yaml
│   │       ├── firestore.yaml
│   │       ├── looker-conversational-analytics.yaml
│   │       ├── looker.yaml
│   │       ├── mssql.yaml
│   │       ├── mysql.yaml
│   │       ├── neo4j.yaml
│   │       ├── oceanbase.yaml
│   │       ├── postgres.yaml
│   │       ├── spanner-postgres.yaml
│   │       ├── spanner.yaml
│   │       └── sqlite.yaml
│   ├── server
│   │   ├── api_test.go
│   │   ├── api.go
│   │   ├── common_test.go
│   │   ├── config.go
│   │   ├── mcp
│   │   │   ├── jsonrpc
│   │   │   │   ├── jsonrpc_test.go
│   │   │   │   └── jsonrpc.go
│   │   │   ├── mcp.go
│   │   │   ├── util
│   │   │   │   └── lifecycle.go
│   │   │   ├── v20241105
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   ├── v20250326
│   │   │   │   ├── method.go
│   │   │   │   └── types.go
│   │   │   └── v20250618
│   │   │       ├── method.go
│   │   │       └── types.go
│   │   ├── mcp_test.go
│   │   ├── mcp.go
│   │   ├── server_test.go
│   │   ├── server.go
│   │   ├── static
│   │   │   ├── assets
│   │   │   │   └── mcptoolboxlogo.png
│   │   │   ├── css
│   │   │   │   └── style.css
│   │   │   ├── index.html
│   │   │   ├── js
│   │   │   │   ├── auth.js
│   │   │   │   ├── loadTools.js
│   │   │   │   ├── mainContent.js
│   │   │   │   ├── navbar.js
│   │   │   │   ├── runTool.js
│   │   │   │   ├── toolDisplay.js
│   │   │   │   ├── tools.js
│   │   │   │   └── toolsets.js
│   │   │   ├── tools.html
│   │   │   └── toolsets.html
│   │   ├── web_test.go
│   │   └── web.go
│   ├── sources
│   │   ├── alloydbadmin
│   │   │   ├── alloydbadmin_test.go
│   │   │   └── alloydbadmin.go
│   │   ├── alloydbpg
│   │   │   ├── alloydb_pg_test.go
│   │   │   └── alloydb_pg.go
│   │   ├── bigquery
│   │   │   ├── bigquery_test.go
│   │   │   └── bigquery.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   ├── cassandra_test.go
│   │   │   └── cassandra.go
│   │   ├── clickhouse
│   │   │   ├── clickhouse_test.go
│   │   │   └── clickhouse.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloud_monitoring_test.go
│   │   │   └── cloud_monitoring.go
│   │   ├── cloudsqladmin
│   │   │   ├── cloud_sql_admin_test.go
│   │   │   └── cloud_sql_admin.go
│   │   ├── cloudsqlmssql
│   │   │   ├── cloud_sql_mssql_test.go
│   │   │   └── cloud_sql_mssql.go
│   │   ├── cloudsqlmysql
│   │   │   ├── cloud_sql_mysql_test.go
│   │   │   └── cloud_sql_mysql.go
│   │   ├── cloudsqlpg
│   │   │   ├── cloud_sql_pg_test.go
│   │   │   └── cloud_sql_pg.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataplex
│   │   │   ├── dataplex_test.go
│   │   │   └── dataplex.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── dialect.go
│   │   ├── firebird
│   │   │   ├── firebird_test.go
│   │   │   └── firebird.go
│   │   ├── firestore
│   │   │   ├── firestore_test.go
│   │   │   └── firestore.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── ip_type.go
│   │   ├── looker
│   │   │   ├── looker_test.go
│   │   │   └── looker.go
│   │   ├── mongodb
│   │   │   ├── mongodb_test.go
│   │   │   └── mongodb.go
│   │   ├── mssql
│   │   │   ├── mssql_test.go
│   │   │   └── mssql.go
│   │   ├── mysql
│   │   │   ├── mysql_test.go
│   │   │   └── mysql.go
│   │   ├── neo4j
│   │   │   ├── neo4j_test.go
│   │   │   └── neo4j.go
│   │   ├── oceanbase
│   │   │   ├── oceanbase_test.go
│   │   │   └── oceanbase.go
│   │   ├── oracle
│   │   │   └── oracle.go
│   │   ├── postgres
│   │   │   ├── postgres_test.go
│   │   │   └── postgres.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── sources.go
│   │   ├── spanner
│   │   │   ├── spanner_test.go
│   │   │   └── spanner.go
│   │   ├── sqlite
│   │   │   ├── sqlite_test.go
│   │   │   └── sqlite.go
│   │   ├── tidb
│   │   │   ├── tidb_test.go
│   │   │   └── tidb.go
│   │   ├── trino
│   │   │   ├── trino_test.go
│   │   │   └── trino.go
│   │   ├── util.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedb
│   │       ├── yugabytedb_test.go
│   │       └── yugabytedb.go
│   ├── telemetry
│   │   ├── instrumentation.go
│   │   └── telemetry.go
│   ├── testutils
│   │   └── testutils.go
│   ├── tools
│   │   ├── alloydb
│   │   │   ├── alloydbcreatecluster
│   │   │   │   ├── alloydbcreatecluster_test.go
│   │   │   │   └── alloydbcreatecluster.go
│   │   │   ├── alloydbcreateinstance
│   │   │   │   ├── alloydbcreateinstance_test.go
│   │   │   │   └── alloydbcreateinstance.go
│   │   │   ├── alloydbcreateuser
│   │   │   │   ├── alloydbcreateuser_test.go
│   │   │   │   └── alloydbcreateuser.go
│   │   │   ├── alloydbgetcluster
│   │   │   │   ├── alloydbgetcluster_test.go
│   │   │   │   └── alloydbgetcluster.go
│   │   │   ├── alloydbgetinstance
│   │   │   │   ├── alloydbgetinstance_test.go
│   │   │   │   └── alloydbgetinstance.go
│   │   │   ├── alloydbgetuser
│   │   │   │   ├── alloydbgetuser_test.go
│   │   │   │   └── alloydbgetuser.go
│   │   │   ├── alloydblistclusters
│   │   │   │   ├── alloydblistclusters_test.go
│   │   │   │   └── alloydblistclusters.go
│   │   │   ├── alloydblistinstances
│   │   │   │   ├── alloydblistinstances_test.go
│   │   │   │   └── alloydblistinstances.go
│   │   │   ├── alloydblistusers
│   │   │   │   ├── alloydblistusers_test.go
│   │   │   │   └── alloydblistusers.go
│   │   │   └── alloydbwaitforoperation
│   │   │       ├── alloydbwaitforoperation_test.go
│   │   │       └── alloydbwaitforoperation.go
│   │   ├── alloydbainl
│   │   │   ├── alloydbainl_test.go
│   │   │   └── alloydbainl.go
│   │   ├── bigquery
│   │   │   ├── bigqueryanalyzecontribution
│   │   │   │   ├── bigqueryanalyzecontribution_test.go
│   │   │   │   └── bigqueryanalyzecontribution.go
│   │   │   ├── bigquerycommon
│   │   │   │   ├── table_name_parser_test.go
│   │   │   │   ├── table_name_parser.go
│   │   │   │   └── util.go
│   │   │   ├── bigqueryconversationalanalytics
│   │   │   │   ├── bigqueryconversationalanalytics_test.go
│   │   │   │   └── bigqueryconversationalanalytics.go
│   │   │   ├── bigqueryexecutesql
│   │   │   │   ├── bigqueryexecutesql_test.go
│   │   │   │   └── bigqueryexecutesql.go
│   │   │   ├── bigqueryforecast
│   │   │   │   ├── bigqueryforecast_test.go
│   │   │   │   └── bigqueryforecast.go
│   │   │   ├── bigquerygetdatasetinfo
│   │   │   │   ├── bigquerygetdatasetinfo_test.go
│   │   │   │   └── bigquerygetdatasetinfo.go
│   │   │   ├── bigquerygettableinfo
│   │   │   │   ├── bigquerygettableinfo_test.go
│   │   │   │   └── bigquerygettableinfo.go
│   │   │   ├── bigquerylistdatasetids
│   │   │   │   ├── bigquerylistdatasetids_test.go
│   │   │   │   └── bigquerylistdatasetids.go
│   │   │   ├── bigquerylisttableids
│   │   │   │   ├── bigquerylisttableids_test.go
│   │   │   │   └── bigquerylisttableids.go
│   │   │   ├── bigquerysearchcatalog
│   │   │   │   ├── bigquerysearchcatalog_test.go
│   │   │   │   └── bigquerysearchcatalog.go
│   │   │   └── bigquerysql
│   │   │       ├── bigquerysql_test.go
│   │   │       └── bigquerysql.go
│   │   ├── bigtable
│   │   │   ├── bigtable_test.go
│   │   │   └── bigtable.go
│   │   ├── cassandra
│   │   │   └── cassandracql
│   │   │       ├── cassandracql_test.go
│   │   │       └── cassandracql.go
│   │   ├── clickhouse
│   │   │   ├── clickhouseexecutesql
│   │   │   │   ├── clickhouseexecutesql_test.go
│   │   │   │   └── clickhouseexecutesql.go
│   │   │   ├── clickhouselistdatabases
│   │   │   │   ├── clickhouselistdatabases_test.go
│   │   │   │   └── clickhouselistdatabases.go
│   │   │   ├── clickhouselisttables
│   │   │   │   ├── clickhouselisttables_test.go
│   │   │   │   └── clickhouselisttables.go
│   │   │   └── clickhousesql
│   │   │       ├── clickhousesql_test.go
│   │   │       └── clickhousesql.go
│   │   ├── cloudmonitoring
│   │   │   ├── cloudmonitoring_test.go
│   │   │   └── cloudmonitoring.go
│   │   ├── cloudsql
│   │   │   ├── cloudsqlcreatedatabase
│   │   │   │   ├── cloudsqlcreatedatabase_test.go
│   │   │   │   └── cloudsqlcreatedatabase.go
│   │   │   ├── cloudsqlcreateusers
│   │   │   │   ├── cloudsqlcreateusers_test.go
│   │   │   │   └── cloudsqlcreateusers.go
│   │   │   ├── cloudsqlgetinstances
│   │   │   │   ├── cloudsqlgetinstances_test.go
│   │   │   │   └── cloudsqlgetinstances.go
│   │   │   ├── cloudsqllistdatabases
│   │   │   │   ├── cloudsqllistdatabases_test.go
│   │   │   │   └── cloudsqllistdatabases.go
│   │   │   ├── cloudsqllistinstances
│   │   │   │   ├── cloudsqllistinstances_test.go
│   │   │   │   └── cloudsqllistinstances.go
│   │   │   └── cloudsqlwaitforoperation
│   │   │       ├── cloudsqlwaitforoperation_test.go
│   │   │       └── cloudsqlwaitforoperation.go
│   │   ├── cloudsqlmssql
│   │   │   └── cloudsqlmssqlcreateinstance
│   │   │       ├── cloudsqlmssqlcreateinstance_test.go
│   │   │       └── cloudsqlmssqlcreateinstance.go
│   │   ├── cloudsqlmysql
│   │   │   └── cloudsqlmysqlcreateinstance
│   │   │       ├── cloudsqlmysqlcreateinstance_test.go
│   │   │       └── cloudsqlmysqlcreateinstance.go
│   │   ├── cloudsqlpg
│   │   │   └── cloudsqlpgcreateinstances
│   │   │       ├── cloudsqlpgcreateinstances_test.go
│   │   │       └── cloudsqlpgcreateinstances.go
│   │   ├── common_test.go
│   │   ├── common.go
│   │   ├── couchbase
│   │   │   ├── couchbase_test.go
│   │   │   └── couchbase.go
│   │   ├── dataform
│   │   │   └── dataformcompilelocal
│   │   │       ├── dataformcompilelocal_test.go
│   │   │       └── dataformcompilelocal.go
│   │   ├── dataplex
│   │   │   ├── dataplexlookupentry
│   │   │   │   ├── dataplexlookupentry_test.go
│   │   │   │   └── dataplexlookupentry.go
│   │   │   ├── dataplexsearchaspecttypes
│   │   │   │   ├── dataplexsearchaspecttypes_test.go
│   │   │   │   └── dataplexsearchaspecttypes.go
│   │   │   └── dataplexsearchentries
│   │   │       ├── dataplexsearchentries_test.go
│   │   │       └── dataplexsearchentries.go
│   │   ├── dgraph
│   │   │   ├── dgraph_test.go
│   │   │   └── dgraph.go
│   │   ├── firebird
│   │   │   ├── firebirdexecutesql
│   │   │   │   ├── firebirdexecutesql_test.go
│   │   │   │   └── firebirdexecutesql.go
│   │   │   └── firebirdsql
│   │   │       ├── firebirdsql_test.go
│   │   │       └── firebirdsql.go
│   │   ├── firestore
│   │   │   ├── firestoreadddocuments
│   │   │   │   ├── firestoreadddocuments_test.go
│   │   │   │   └── firestoreadddocuments.go
│   │   │   ├── firestoredeletedocuments
│   │   │   │   ├── firestoredeletedocuments_test.go
│   │   │   │   └── firestoredeletedocuments.go
│   │   │   ├── firestoregetdocuments
│   │   │   │   ├── firestoregetdocuments_test.go
│   │   │   │   └── firestoregetdocuments.go
│   │   │   ├── firestoregetrules
│   │   │   │   ├── firestoregetrules_test.go
│   │   │   │   └── firestoregetrules.go
│   │   │   ├── firestorelistcollections
│   │   │   │   ├── firestorelistcollections_test.go
│   │   │   │   └── firestorelistcollections.go
│   │   │   ├── firestorequery
│   │   │   │   ├── firestorequery_test.go
│   │   │   │   └── firestorequery.go
│   │   │   ├── firestorequerycollection
│   │   │   │   ├── firestorequerycollection_test.go
│   │   │   │   └── firestorequerycollection.go
│   │   │   ├── firestoreupdatedocument
│   │   │   │   ├── firestoreupdatedocument_test.go
│   │   │   │   └── firestoreupdatedocument.go
│   │   │   ├── firestorevalidaterules
│   │   │   │   ├── firestorevalidaterules_test.go
│   │   │   │   └── firestorevalidaterules.go
│   │   │   └── util
│   │   │       ├── converter_test.go
│   │   │       ├── converter.go
│   │   │       ├── validator_test.go
│   │   │       └── validator.go
│   │   ├── http
│   │   │   ├── http_test.go
│   │   │   └── http.go
│   │   ├── http_method.go
│   │   ├── looker
│   │   │   ├── lookeradddashboardelement
│   │   │   │   ├── lookeradddashboardelement_test.go
│   │   │   │   └── lookeradddashboardelement.go
│   │   │   ├── lookercommon
│   │   │   │   ├── lookercommon_test.go
│   │   │   │   └── lookercommon.go
│   │   │   ├── lookerconversationalanalytics
│   │   │   │   ├── lookerconversationalanalytics_test.go
│   │   │   │   └── lookerconversationalanalytics.go
│   │   │   ├── lookergetdashboards
│   │   │   │   ├── lookergetdashboards_test.go
│   │   │   │   └── lookergetdashboards.go
│   │   │   ├── lookergetdimensions
│   │   │   │   ├── lookergetdimensions_test.go
│   │   │   │   └── lookergetdimensions.go
│   │   │   ├── lookergetexplores
│   │   │   │   ├── lookergetexplores_test.go
│   │   │   │   └── lookergetexplores.go
│   │   │   ├── lookergetfilters
│   │   │   │   ├── lookergetfilters_test.go
│   │   │   │   └── lookergetfilters.go
│   │   │   ├── lookergetlooks
│   │   │   │   ├── lookergetlooks_test.go
│   │   │   │   └── lookergetlooks.go
│   │   │   ├── lookergetmeasures
│   │   │   │   ├── lookergetmeasures_test.go
│   │   │   │   └── lookergetmeasures.go
│   │   │   ├── lookergetmodels
│   │   │   │   ├── lookergetmodels_test.go
│   │   │   │   └── lookergetmodels.go
│   │   │   ├── lookergetparameters
│   │   │   │   ├── lookergetparameters_test.go
│   │   │   │   └── lookergetparameters.go
│   │   │   ├── lookerhealthanalyze
│   │   │   │   ├── lookerhealthanalyze_test.go
│   │   │   │   └── lookerhealthanalyze.go
│   │   │   ├── lookerhealthpulse
│   │   │   │   ├── lookerhealthpulse_test.go
│   │   │   │   └── lookerhealthpulse.go
│   │   │   ├── lookerhealthvacuum
│   │   │   │   ├── lookerhealthvacuum_test.go
│   │   │   │   └── lookerhealthvacuum.go
│   │   │   ├── lookermakedashboard
│   │   │   │   ├── lookermakedashboard_test.go
│   │   │   │   └── lookermakedashboard.go
│   │   │   ├── lookermakelook
│   │   │   │   ├── lookermakelook_test.go
│   │   │   │   └── lookermakelook.go
│   │   │   ├── lookerquery
│   │   │   │   ├── lookerquery_test.go
│   │   │   │   └── lookerquery.go
│   │   │   ├── lookerquerysql
│   │   │   │   ├── lookerquerysql_test.go
│   │   │   │   └── lookerquerysql.go
│   │   │   ├── lookerqueryurl
│   │   │   │   ├── lookerqueryurl_test.go
│   │   │   │   └── lookerqueryurl.go
│   │   │   └── lookerrunlook
│   │   │       ├── lookerrunlook_test.go
│   │   │       └── lookerrunlook.go
│   │   ├── mongodb
│   │   │   ├── mongodbaggregate
│   │   │   │   ├── mongodbaggregate_test.go
│   │   │   │   └── mongodbaggregate.go
│   │   │   ├── mongodbdeletemany
│   │   │   │   ├── mongodbdeletemany_test.go
│   │   │   │   └── mongodbdeletemany.go
│   │   │   ├── mongodbdeleteone
│   │   │   │   ├── mongodbdeleteone_test.go
│   │   │   │   └── mongodbdeleteone.go
│   │   │   ├── mongodbfind
│   │   │   │   ├── mongodbfind_test.go
│   │   │   │   └── mongodbfind.go
│   │   │   ├── mongodbfindone
│   │   │   │   ├── mongodbfindone_test.go
│   │   │   │   └── mongodbfindone.go
│   │   │   ├── mongodbinsertmany
│   │   │   │   ├── mongodbinsertmany_test.go
│   │   │   │   └── mongodbinsertmany.go
│   │   │   ├── mongodbinsertone
│   │   │   │   ├── mongodbinsertone_test.go
│   │   │   │   └── mongodbinsertone.go
│   │   │   ├── mongodbupdatemany
│   │   │   │   ├── mongodbupdatemany_test.go
│   │   │   │   └── mongodbupdatemany.go
│   │   │   └── mongodbupdateone
│   │   │       ├── mongodbupdateone_test.go
│   │   │       └── mongodbupdateone.go
│   │   ├── mssql
│   │   │   ├── mssqlexecutesql
│   │   │   │   ├── mssqlexecutesql_test.go
│   │   │   │   └── mssqlexecutesql.go
│   │   │   ├── mssqllisttables
│   │   │   │   ├── mssqllisttables_test.go
│   │   │   │   └── mssqllisttables.go
│   │   │   └── mssqlsql
│   │   │       ├── mssqlsql_test.go
│   │   │       └── mssqlsql.go
│   │   ├── mysql
│   │   │   ├── mysqlcommon
│   │   │   │   └── mysqlcommon.go
│   │   │   ├── mysqlexecutesql
│   │   │   │   ├── mysqlexecutesql_test.go
│   │   │   │   └── mysqlexecutesql.go
│   │   │   ├── mysqllistactivequeries
│   │   │   │   ├── mysqllistactivequeries_test.go
│   │   │   │   └── mysqllistactivequeries.go
│   │   │   ├── mysqllisttablefragmentation
│   │   │   │   ├── mysqllisttablefragmentation_test.go
│   │   │   │   └── mysqllisttablefragmentation.go
│   │   │   ├── mysqllisttables
│   │   │   │   ├── mysqllisttables_test.go
│   │   │   │   └── mysqllisttables.go
│   │   │   ├── mysqllisttablesmissinguniqueindexes
│   │   │   │   ├── mysqllisttablesmissinguniqueindexes_test.go
│   │   │   │   └── mysqllisttablesmissinguniqueindexes.go
│   │   │   └── mysqlsql
│   │   │       ├── mysqlsql_test.go
│   │   │       └── mysqlsql.go
│   │   ├── neo4j
│   │   │   ├── neo4jcypher
│   │   │   │   ├── neo4jcypher_test.go
│   │   │   │   └── neo4jcypher.go
│   │   │   ├── neo4jexecutecypher
│   │   │   │   ├── classifier
│   │   │   │   │   ├── classifier_test.go
│   │   │   │   │   └── classifier.go
│   │   │   │   ├── neo4jexecutecypher_test.go
│   │   │   │   └── neo4jexecutecypher.go
│   │   │   └── neo4jschema
│   │   │       ├── cache
│   │   │       │   ├── cache_test.go
│   │   │       │   └── cache.go
│   │   │       ├── helpers
│   │   │       │   ├── helpers_test.go
│   │   │       │   └── helpers.go
│   │   │       ├── neo4jschema_test.go
│   │   │       ├── neo4jschema.go
│   │   │       └── types
│   │   │           └── types.go
│   │   ├── oceanbase
│   │   │   ├── oceanbaseexecutesql
│   │   │   │   ├── oceanbaseexecutesql_test.go
│   │   │   │   └── oceanbaseexecutesql.go
│   │   │   └── oceanbasesql
│   │   │       ├── oceanbasesql_test.go
│   │   │       └── oceanbasesql.go
│   │   ├── oracle
│   │   │   ├── oracleexecutesql
│   │   │   │   └── oracleexecutesql.go
│   │   │   └── oraclesql
│   │   │       └── oraclesql.go
│   │   ├── parameters_test.go
│   │   ├── parameters.go
│   │   ├── postgres
│   │   │   ├── postgresexecutesql
│   │   │   │   ├── postgresexecutesql_test.go
│   │   │   │   └── postgresexecutesql.go
│   │   │   ├── postgreslistactivequeries
│   │   │   │   ├── postgreslistactivequeries_test.go
│   │   │   │   └── postgreslistactivequeries.go
│   │   │   ├── postgreslistavailableextensions
│   │   │   │   ├── postgreslistavailableextensions_test.go
│   │   │   │   └── postgreslistavailableextensions.go
│   │   │   ├── postgreslistinstalledextensions
│   │   │   │   ├── postgreslistinstalledextensions_test.go
│   │   │   │   └── postgreslistinstalledextensions.go
│   │   │   ├── postgreslisttables
│   │   │   │   ├── postgreslisttables_test.go
│   │   │   │   └── postgreslisttables.go
│   │   │   └── postgressql
│   │   │       ├── postgressql_test.go
│   │   │       └── postgressql.go
│   │   ├── redis
│   │   │   ├── redis_test.go
│   │   │   └── redis.go
│   │   ├── spanner
│   │   │   ├── spannerexecutesql
│   │   │   │   ├── spannerexecutesql_test.go
│   │   │   │   └── spannerexecutesql.go
│   │   │   ├── spannerlisttables
│   │   │   │   ├── spannerlisttables_test.go
│   │   │   │   └── spannerlisttables.go
│   │   │   └── spannersql
│   │   │       ├── spanner_test.go
│   │   │       └── spannersql.go
│   │   ├── sqlite
│   │   │   ├── sqliteexecutesql
│   │   │   │   ├── sqliteexecutesql_test.go
│   │   │   │   └── sqliteexecutesql.go
│   │   │   └── sqlitesql
│   │   │       ├── sqlitesql_test.go
│   │   │       └── sqlitesql.go
│   │   ├── tidb
│   │   │   ├── tidbexecutesql
│   │   │   │   ├── tidbexecutesql_test.go
│   │   │   │   └── tidbexecutesql.go
│   │   │   └── tidbsql
│   │   │       ├── tidbsql_test.go
│   │   │       └── tidbsql.go
│   │   ├── tools_test.go
│   │   ├── tools.go
│   │   ├── toolsets.go
│   │   ├── trino
│   │   │   ├── trinoexecutesql
│   │   │   │   ├── trinoexecutesql_test.go
│   │   │   │   └── trinoexecutesql.go
│   │   │   └── trinosql
│   │   │       ├── trinosql_test.go
│   │   │       └── trinosql.go
│   │   ├── utility
│   │   │   └── wait
│   │   │       ├── wait_test.go
│   │   │       └── wait.go
│   │   ├── valkey
│   │   │   ├── valkey_test.go
│   │   │   └── valkey.go
│   │   └── yugabytedbsql
│   │       ├── yugabytedbsql_test.go
│   │       └── yugabytedbsql.go
│   └── util
│       └── util.go
├── LICENSE
├── logo.png
├── main.go
├── README.md
└── tests
    ├── alloydb
    │   ├── alloydb_integration_test.go
    │   └── alloydb_wait_for_operation_test.go
    ├── alloydbainl
    │   └── alloydb_ai_nl_integration_test.go
    ├── alloydbpg
    │   └── alloydb_pg_integration_test.go
    ├── auth.go
    ├── bigquery
    │   └── bigquery_integration_test.go
    ├── bigtable
    │   └── bigtable_integration_test.go
    ├── cassandra
    │   └── cassandra_integration_test.go
    ├── clickhouse
    │   └── clickhouse_integration_test.go
    ├── cloudmonitoring
    │   └── cloud_monitoring_integration_test.go
    ├── cloudsql
    │   ├── cloud_sql_create_database_test.go
    │   ├── cloud_sql_create_users_test.go
    │   ├── cloud_sql_get_instances_test.go
    │   ├── cloud_sql_list_databases_test.go
    │   ├── cloudsql_list_instances_test.go
    │   └── cloudsql_wait_for_operation_test.go
    ├── cloudsqlmssql
    │   ├── cloud_sql_mssql_create_instance_integration_test.go
    │   └── cloud_sql_mssql_integration_test.go
    ├── cloudsqlmysql
    │   ├── cloud_sql_mysql_create_instance_integration_test.go
    │   └── cloud_sql_mysql_integration_test.go
    ├── cloudsqlpg
    │   ├── cloud_sql_pg_create_instances_test.go
    │   └── cloud_sql_pg_integration_test.go
    ├── common.go
    ├── couchbase
    │   └── couchbase_integration_test.go
    ├── dataform
    │   └── dataform_integration_test.go
    ├── dataplex
    │   └── dataplex_integration_test.go
    ├── dgraph
    │   └── dgraph_integration_test.go
    ├── firebird
    │   └── firebird_integration_test.go
    ├── firestore
    │   └── firestore_integration_test.go
    ├── http
    │   └── http_integration_test.go
    ├── looker
    │   └── looker_integration_test.go
    ├── mongodb
    │   └── mongodb_integration_test.go
    ├── mssql
    │   └── mssql_integration_test.go
    ├── mysql
    │   └── mysql_integration_test.go
    ├── neo4j
    │   └── neo4j_integration_test.go
    ├── oceanbase
    │   └── oceanbase_integration_test.go
    ├── option.go
    ├── oracle
    │   └── oracle_integration_test.go
    ├── postgres
    │   └── postgres_integration_test.go
    ├── redis
    │   └── redis_test.go
    ├── server.go
    ├── source.go
    ├── spanner
    │   └── spanner_integration_test.go
    ├── sqlite
    │   └── sqlite_integration_test.go
    ├── tidb
    │   └── tidb_integration_test.go
    ├── tool.go
    ├── trino
    │   └── trino_integration_test.go
    ├── utility
    │   └── wait_integration_test.go
    ├── valkey
    │   └── valkey_test.go
    └── yugabytedb
        └── yugabytedb_integration_test.go
```

# Files

--------------------------------------------------------------------------------
/internal/tools/mssql/mssqllisttables/mssqllisttables.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package mssqllisttables
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"database/sql"
 20 | 	"fmt"
 21 | 
 22 | 	yaml "github.com/goccy/go-yaml"
 23 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 24 | 	"github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
 25 | 	"github.com/googleapis/genai-toolbox/internal/sources/mssql"
 26 | 	"github.com/googleapis/genai-toolbox/internal/tools"
 27 | )
 28 | 
 29 | const kind string = "mssql-list-tables"
 30 | 
 31 | const listTablesStatement = `
 32 | 	WITH table_info AS (
 33 | 		SELECT
 34 | 			t.object_id AS table_oid,
 35 | 			s.name AS schema_name,
 36 | 			t.name AS table_name,
 37 | 			dp.name AS table_owner, -- Schema's owner principal name
 38 | 			CAST(ep.value AS NVARCHAR(MAX)) AS table_comment, -- Cast for JSON compatibility
 39 | 			CASE
 40 | 				WHEN EXISTS ( -- Check if the table has more than one partition for any of its indexes or heap
 41 | 					SELECT 1 FROM sys.partitions p
 42 | 					WHERE p.object_id = t.object_id AND p.partition_number > 1
 43 | 				) THEN 'PARTITIONED TABLE'
 44 | 				ELSE 'TABLE'
 45 | 			END AS object_type_detail
 46 | 		FROM
 47 | 			sys.tables t
 48 | 		INNER JOIN
 49 | 			sys.schemas s ON t.schema_id = s.schema_id
 50 | 		LEFT JOIN
 51 | 			sys.database_principals dp ON s.principal_id = dp.principal_id
 52 | 		LEFT JOIN
 53 | 			sys.extended_properties ep ON ep.major_id = t.object_id AND ep.minor_id = 0 AND ep.class = 1 AND ep.name = 'MS_Description'
 54 | 		WHERE
 55 | 			t.type = 'U' -- User tables
 56 | 			AND s.name NOT IN ('sys', 'INFORMATION_SCHEMA', 'guest', 'db_owner', 'db_accessadmin', 'db_backupoperator', 'db_datareader', 'db_datawriter', 'db_ddladmin', 'db_denydatareader', 'db_denydatawriter', 'db_securityadmin')
 57 | 			AND (@table_names IS NULL OR LTRIM(RTRIM(@table_names)) = '' OR t.name IN (SELECT LTRIM(RTRIM(value)) FROM STRING_SPLIT(@table_names, ',')))
 58 | 	),
 59 | 	columns_info AS (
 60 | 		SELECT
 61 | 			c.object_id AS table_oid,
 62 | 			c.name AS column_name,
 63 | 			CONCAT(
 64 | 				UPPER(TY.name), -- Base type name
 65 | 				CASE
 66 | 					WHEN TY.name IN ('char', 'varchar', 'nchar', 'nvarchar', 'binary', 'varbinary') THEN
 67 | 						CONCAT('(', IIF(c.max_length = -1, 'MAX', CAST(c.max_length / CASE WHEN TY.name IN ('nchar', 'nvarchar') THEN 2 ELSE 1 END AS VARCHAR(10))), ')')
 68 | 					WHEN TY.name IN ('decimal', 'numeric') THEN
 69 | 						CONCAT('(', c.precision, ',', c.scale, ')')
 70 | 					WHEN TY.name IN ('datetime2', 'datetimeoffset', 'time') THEN
 71 | 						CONCAT('(', c.scale, ')')
 72 | 					ELSE ''
 73 | 				END
 74 | 			) AS data_type,
 75 | 			c.column_id AS column_ordinal_position,
 76 | 			IIF(c.is_nullable = 0, CAST(1 AS BIT), CAST(0 AS BIT)) AS is_not_nullable,
 77 | 			dc.definition AS column_default,
 78 | 			CAST(epc.value AS NVARCHAR(MAX)) AS column_comment
 79 | 		FROM
 80 | 			sys.columns c
 81 | 		JOIN
 82 | 			table_info ti ON c.object_id = ti.table_oid
 83 | 		JOIN
 84 | 			sys.types TY ON c.user_type_id = TY.user_type_id AND TY.is_user_defined = 0 -- Ensure we get base types
 85 | 		LEFT JOIN
 86 | 			sys.default_constraints dc ON c.object_id = dc.parent_object_id AND c.column_id = dc.parent_column_id
 87 | 		LEFT JOIN
 88 | 			sys.extended_properties epc ON epc.major_id = c.object_id AND epc.minor_id = c.column_id AND epc.class = 1 AND epc.name = 'MS_Description'
 89 | 	),
 90 | 	constraints_info AS (
 91 | 		-- Primary Keys & Unique Constraints
 92 | 		SELECT
 93 | 			kc.parent_object_id AS table_oid,
 94 | 			kc.name AS constraint_name,
 95 | 			REPLACE(kc.type_desc, '_CONSTRAINT', '') AS constraint_type, -- 'PRIMARY_KEY', 'UNIQUE'
 96 | 			STUFF((SELECT ', ' + col.name
 97 | 				FROM sys.index_columns ic
 98 | 				JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id
 99 | 				WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id
100 | 				ORDER BY ic.key_ordinal
101 | 				FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS constraint_columns,
102 | 			NULL AS foreign_key_referenced_table,
103 | 			NULL AS foreign_key_referenced_columns,
104 | 			CASE kc.type
105 | 				WHEN 'PK' THEN 'PRIMARY KEY (' + STUFF((SELECT ', ' + col.name FROM sys.index_columns ic JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')'
106 | 				WHEN 'UQ' THEN 'UNIQUE (' + STUFF((SELECT ', ' + col.name FROM sys.index_columns ic JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')'
107 | 			END AS constraint_definition
108 | 		FROM sys.key_constraints kc
109 | 		JOIN table_info ti ON kc.parent_object_id = ti.table_oid
110 | 		UNION ALL
111 | 		-- Foreign Keys
112 | 		SELECT
113 | 			fk.parent_object_id AS table_oid,
114 | 			fk.name AS constraint_name,
115 | 			'FOREIGN KEY' AS constraint_type,
116 | 			STUFF((SELECT ', ' + pc.name
117 | 				FROM sys.foreign_key_columns fkc
118 | 				JOIN sys.columns pc ON fkc.parent_object_id = pc.object_id AND fkc.parent_column_id = pc.column_id
119 | 				WHERE fkc.constraint_object_id = fk.object_id
120 | 				ORDER BY fkc.constraint_column_id
121 | 				FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS constraint_columns,
122 | 			SCHEMA_NAME(rt.schema_id) + '.' + OBJECT_NAME(fk.referenced_object_id) AS foreign_key_referenced_table,
123 | 			STUFF((SELECT ', ' + rc.name
124 | 				FROM sys.foreign_key_columns fkc
125 | 				JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id
126 | 				WHERE fkc.constraint_object_id = fk.object_id
127 | 				ORDER BY fkc.constraint_column_id
128 | 				FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS foreign_key_referenced_columns,
129 | 			OBJECT_DEFINITION(fk.object_id) AS constraint_definition
130 | 		FROM sys.foreign_keys fk
131 | 		JOIN sys.tables rt ON fk.referenced_object_id = rt.object_id
132 | 		JOIN table_info ti ON fk.parent_object_id = ti.table_oid
133 | 		UNION ALL
134 | 		-- Check Constraints
135 | 		SELECT
136 | 			cc.parent_object_id AS table_oid,
137 | 			cc.name AS constraint_name,
138 | 			'CHECK' AS constraint_type,
139 | 			NULL AS constraint_columns, -- Definition includes column context
140 | 			NULL AS foreign_key_referenced_table,
141 | 			NULL AS foreign_key_referenced_columns,
142 | 			cc.definition AS constraint_definition
143 | 		FROM sys.check_constraints cc
144 | 		JOIN table_info ti ON cc.parent_object_id = ti.table_oid
145 | 	),
146 | 	indexes_info AS (
147 | 		SELECT
148 | 			i.object_id AS table_oid,
149 | 			i.name AS index_name,
150 | 			i.type_desc AS index_method, -- CLUSTERED, NONCLUSTERED, XML, etc.
151 | 			i.is_unique,
152 | 			i.is_primary_key AS is_primary,
153 | 			STUFF((SELECT ', ' + c.name
154 | 				FROM sys.index_columns ic
155 | 				JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
156 | 				WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 0
157 | 				ORDER BY ic.key_ordinal
158 | 				FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS index_columns,
159 | 			(
160 | 				'COLUMNS: (' + ISNULL(STUFF((SELECT ', ' + c.name + CASE WHEN ic.is_descending_key = 1 THEN ' DESC' ELSE '' END
161 | 										FROM sys.index_columns ic
162 | 										JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
163 | 										WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 0
164 | 										ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, ''), 'N/A') + ')' +
165 | 				ISNULL(CHAR(13)+CHAR(10) + 'INCLUDE: (' + STUFF((SELECT ', ' + c.name
166 | 										FROM sys.index_columns ic
167 | 										JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
168 | 										WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 1
169 | 										ORDER BY ic.index_column_id FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')', '') +
170 | 				ISNULL(CHAR(13)+CHAR(10) + 'FILTER: (' + i.filter_definition + ')', '')
171 | 			) AS index_definition_details
172 | 		FROM
173 | 			sys.indexes i
174 | 		JOIN
175 | 			table_info ti ON i.object_id = ti.table_oid
176 | 		WHERE i.type <> 0 -- Exclude Heaps
177 | 		AND i.name IS NOT NULL -- Exclude unnamed heap indexes; named indexes (PKs are often named) are preferred.
178 | 	),
179 | 	triggers_info AS (
180 | 		SELECT
181 | 			tr.parent_id AS table_oid,
182 | 			tr.name AS trigger_name,
183 | 			OBJECT_DEFINITION(tr.object_id) AS trigger_definition,
184 | 			CASE tr.is_disabled WHEN 0 THEN 'ENABLED' ELSE 'DISABLED' END AS trigger_enabled_state
185 | 		FROM
186 | 			sys.triggers tr
187 | 		JOIN
188 | 			table_info ti ON tr.parent_id = ti.table_oid
189 | 		WHERE
190 | 			tr.is_ms_shipped = 0
191 | 			AND tr.parent_class_desc = 'OBJECT_OR_COLUMN' -- DML Triggers on tables/views
192 | 	)
193 | 	SELECT
194 | 		ti.schema_name,
195 | 		ti.table_name AS object_name,
196 | 		CASE
197 | 			WHEN @output_format = 'simple' THEN
198 | 				(SELECT ti.table_name AS name FOR JSON PATH, WITHOUT_ARRAY_WRAPPER)
199 | 			ELSE
200 | 				(
201 | 					SELECT
202 | 						ti.schema_name AS schema_name,
203 | 						ti.table_name AS object_name,
204 | 						ti.object_type_detail AS object_type,
205 | 						ti.table_owner AS owner,
206 | 						ti.table_comment AS comment,
207 | 						JSON_QUERY(ISNULL((
208 | 							SELECT
209 | 								ci.column_name,
210 | 								ci.data_type,
211 | 								ci.column_ordinal_position,
212 | 								ci.is_not_nullable,
213 | 								ci.column_default,
214 | 								ci.column_comment
215 | 							FROM columns_info ci
216 | 							WHERE ci.table_oid = ti.table_oid
217 | 							ORDER BY ci.column_ordinal_position
218 | 							FOR JSON PATH
219 | 						), '[]')) AS columns,
220 | 						JSON_QUERY(ISNULL((
221 | 							SELECT
222 | 								cons.constraint_name,
223 | 								cons.constraint_type,
224 | 								cons.constraint_definition,
225 | 								JSON_QUERY(
226 | 									CASE
227 | 										WHEN cons.constraint_columns IS NOT NULL AND LTRIM(RTRIM(cons.constraint_columns)) <> ''
228 | 										THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(cons.constraint_columns, ',')) + ']'
229 | 										ELSE '[]'
230 | 									END
231 | 								) AS constraint_columns,
232 | 								cons.foreign_key_referenced_table,
233 | 								JSON_QUERY(
234 | 									CASE
235 | 										WHEN cons.foreign_key_referenced_columns IS NOT NULL AND LTRIM(RTRIM(cons.foreign_key_referenced_columns)) <> ''
236 | 										THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(cons.foreign_key_referenced_columns, ',')) + ']'
237 | 										ELSE '[]'
238 | 									END
239 | 								) AS foreign_key_referenced_columns
240 | 							FROM constraints_info cons
241 | 							WHERE cons.table_oid = ti.table_oid
242 | 							FOR JSON PATH
243 | 						), '[]')) AS constraints,
244 | 						JSON_QUERY(ISNULL((
245 | 							SELECT
246 | 								ii.index_name,
247 | 								ii.index_definition_details AS index_definition,
248 | 								ii.is_unique,
249 | 								ii.is_primary,
250 | 								ii.index_method,
251 | 								JSON_QUERY(
252 | 									CASE
253 | 										WHEN ii.index_columns IS NOT NULL AND LTRIM(RTRIM(ii.index_columns)) <> ''
254 | 										THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(ii.index_columns, ',')) + ']'
255 | 										ELSE '[]'
256 | 									END
257 | 								) AS index_columns
258 | 							FROM indexes_info ii
259 | 							WHERE ii.table_oid = ti.table_oid
260 | 							FOR JSON PATH
261 | 						), '[]')) AS indexes,
262 | 						JSON_QUERY(ISNULL((
263 | 							SELECT
264 | 								tri.trigger_name,
265 | 								tri.trigger_definition,
266 | 								tri.trigger_enabled_state
267 | 							FROM triggers_info tri
268 | 							WHERE tri.table_oid = ti.table_oid
269 | 							FOR JSON PATH
270 | 						), '[]')) AS triggers
271 | 					FOR JSON PATH, WITHOUT_ARRAY_WRAPPER -- Creates a single JSON object for this table's details
272 | 				)
273 | 		END AS object_details
274 | 	FROM
275 | 		table_info ti
276 | 	ORDER BY
277 | 		ti.schema_name, ti.table_name;
278 | `
279 | 
280 | func init() {
281 | 	if !tools.Register(kind, newConfig) {
282 | 		panic(fmt.Sprintf("tool kind %q already registered", kind))
283 | 	}
284 | }
285 | 
286 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
287 | 	actual := Config{Name: name}
288 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
289 | 		return nil, err
290 | 	}
291 | 	return actual, nil
292 | }
293 | 
294 | type compatibleSource interface {
295 | 	MSSQLDB() *sql.DB
296 | }
297 | 
298 | // validate compatible sources are still compatible
299 | var _ compatibleSource = &cloudsqlmssql.Source{}
300 | var _ compatibleSource = &mssql.Source{}
301 | 
302 | var compatibleSources = [...]string{cloudsqlmssql.SourceKind, mssql.SourceKind}
303 | 
304 | type Config struct {
305 | 	Name         string   `yaml:"name" validate:"required"`
306 | 	Kind         string   `yaml:"kind" validate:"required"`
307 | 	Source       string   `yaml:"source" validate:"required"`
308 | 	Description  string   `yaml:"description" validate:"required"`
309 | 	AuthRequired []string `yaml:"authRequired"`
310 | }
311 | 
312 | // validate interface
313 | var _ tools.ToolConfig = Config{}
314 | 
315 | func (cfg Config) ToolConfigKind() string {
316 | 	return kind
317 | }
318 | 
319 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
320 | 	// verify source exists
321 | 	rawS, ok := srcs[cfg.Source]
322 | 	if !ok {
323 | 		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
324 | 	}
325 | 
326 | 	// verify the source is compatible
327 | 	s, ok := rawS.(compatibleSource)
328 | 	if !ok {
329 | 		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
330 | 	}
331 | 
332 | 	allParameters := tools.Parameters{
333 | 		tools.NewStringParameterWithDefault("table_names", "", "Optional: A comma-separated list of table names. If empty, details for all tables will be listed."),
334 | 		tools.NewStringParameterWithDefault("output_format", "detailed", "Optional: Use 'simple' for names only or 'detailed' for full info."),
335 | 	}
336 | 	paramManifest := allParameters.Manifest()
337 | 	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters)
338 | 
339 | 	// finish tool setup
340 | 	t := Tool{
341 | 		Name:         cfg.Name,
342 | 		Kind:         kind,
343 | 		AllParams:    allParameters,
344 | 		AuthRequired: cfg.AuthRequired,
345 | 		Db:           s.MSSQLDB(),
346 | 		manifest:     tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
347 | 		mcpManifest:  mcpManifest,
348 | 	}
349 | 	return t, nil
350 | }
351 | 
352 | // validate interface
353 | var _ tools.Tool = Tool{}
354 | 
355 | type Tool struct {
356 | 	Name         string           `yaml:"name"`
357 | 	Kind         string           `yaml:"kind"`
358 | 	AuthRequired []string         `yaml:"authRequired"`
359 | 	AllParams    tools.Parameters `yaml:"allParams"`
360 | 
361 | 	Db          *sql.DB
362 | 	manifest    tools.Manifest
363 | 	mcpManifest tools.McpManifest
364 | }
365 | 
366 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
367 | 	paramsMap := params.AsMap()
368 | 
369 | 	outputFormat, _ := paramsMap["output_format"].(string)
370 | 	if outputFormat != "simple" && outputFormat != "detailed" {
371 | 		return nil, fmt.Errorf("invalid value for output_format: must be 'simple' or 'detailed', but got %q", outputFormat)
372 | 	}
373 | 
374 | 	namedArgs := []any{
375 | 		sql.Named("table_names", paramsMap["table_names"]),
376 | 		sql.Named("output_format", outputFormat),
377 | 	}
378 | 
379 | 	rows, err := t.Db.QueryContext(ctx, listTablesStatement, namedArgs...)
380 | 	if err != nil {
381 | 		return nil, fmt.Errorf("unable to execute query: %w", err)
382 | 	}
383 | 	defer rows.Close()
384 | 
385 | 	cols, err := rows.Columns()
386 | 	if err != nil {
387 | 		return nil, fmt.Errorf("unable to fetch column names: %w", err)
388 | 	}
389 | 
390 | 	// create an array of values for each column, which can be re-used to scan each row
391 | 	rawValues := make([]any, len(cols))
392 | 	values := make([]any, len(cols))
393 | 	for i := range rawValues {
394 | 		values[i] = &rawValues[i]
395 | 	}
396 | 
397 | 	var out []any
398 | 	for rows.Next() {
399 | 		err = rows.Scan(values...)
400 | 		if err != nil {
401 | 			return nil, fmt.Errorf("unable to parse row: %w", err)
402 | 		}
403 | 		vMap := make(map[string]any)
404 | 		for i, name := range cols {
405 | 			vMap[name] = rawValues[i]
406 | 		}
407 | 		out = append(out, vMap)
408 | 	}
409 | 
410 | 	// Check if error occurred during iteration
411 | 	if err := rows.Err(); err != nil {
412 | 		return nil, fmt.Errorf("errors encountered during row iteration: %w", err)
413 | 	}
414 | 
415 | 	return out, nil
416 | }
417 | 
418 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
419 | 	return tools.ParseParams(t.AllParams, data, claims)
420 | }
421 | 
422 | func (t Tool) Manifest() tools.Manifest {
423 | 	return t.manifest
424 | }
425 | 
426 | func (t Tool) McpManifest() tools.McpManifest {
427 | 	return t.mcpManifest
428 | }
429 | 
430 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
431 | 	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
432 | }
433 | 
434 | func (t Tool) RequiresClientAuthorization() bool {
435 | 	return false
436 | }
437 | 
```

--------------------------------------------------------------------------------
/internal/tools/firestore/firestorequery/firestorequery.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package firestorequery
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"encoding/json"
 20 | 	"fmt"
 21 | 	"strconv"
 22 | 	"strings"
 23 | 
 24 | 	firestoreapi "cloud.google.com/go/firestore"
 25 | 	yaml "github.com/goccy/go-yaml"
 26 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 27 | 	firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
 28 | 	"github.com/googleapis/genai-toolbox/internal/tools"
 29 | 	"github.com/googleapis/genai-toolbox/internal/tools/firestore/util"
 30 | )
 31 | 
 32 | // Constants for tool configuration
 33 | const (
 34 | 	kind         = "firestore-query"
 35 | 	defaultLimit = 100
 36 | )
 37 | 
 38 | // Firestore operators
 39 | var validOperators = map[string]bool{
 40 | 	"<":                  true,
 41 | 	"<=":                 true,
 42 | 	">":                  true,
 43 | 	">=":                 true,
 44 | 	"==":                 true,
 45 | 	"!=":                 true,
 46 | 	"array-contains":     true,
 47 | 	"array-contains-any": true,
 48 | 	"in":                 true,
 49 | 	"not-in":             true,
 50 | }
 51 | 
 52 | // Error messages
 53 | const (
 54 | 	errFilterParseFailed      = "failed to parse filters: %w"
 55 | 	errQueryExecutionFailed   = "failed to execute query: %w"
 56 | 	errTemplateParseFailed    = "failed to parse template: %w"
 57 | 	errTemplateExecFailed     = "failed to execute template: %w"
 58 | 	errLimitParseFailed       = "failed to parse limit value '%s': %w"
 59 | 	errSelectFieldParseFailed = "failed to parse select field: %w"
 60 | )
 61 | 
 62 | func init() {
 63 | 	if !tools.Register(kind, newConfig) {
 64 | 		panic(fmt.Sprintf("tool kind %q already registered", kind))
 65 | 	}
 66 | }
 67 | 
 68 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
 69 | 	actual := Config{Name: name}
 70 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 71 | 		return nil, err
 72 | 	}
 73 | 	return actual, nil
 74 | }
 75 | 
 76 | // compatibleSource defines the interface for sources that can provide a Firestore client
 77 | type compatibleSource interface {
 78 | 	FirestoreClient() *firestoreapi.Client
 79 | }
 80 | 
 81 | // validate compatible sources are still compatible
 82 | var _ compatibleSource = &firestoreds.Source{}
 83 | 
 84 | var compatibleSources = [...]string{firestoreds.SourceKind}
 85 | 
 86 | // Config represents the configuration for the Firestore query tool
 87 | type Config struct {
 88 | 	Name         string   `yaml:"name" validate:"required"`
 89 | 	Kind         string   `yaml:"kind" validate:"required"`
 90 | 	Source       string   `yaml:"source" validate:"required"`
 91 | 	Description  string   `yaml:"description" validate:"required"`
 92 | 	AuthRequired []string `yaml:"authRequired"`
 93 | 
 94 | 	// Template fields
 95 | 	CollectionPath string         `yaml:"collectionPath" validate:"required"`
 96 | 	Filters        string         `yaml:"filters"`      // JSON string template
 97 | 	Select         []string       `yaml:"select"`       // Fields to select
 98 | 	OrderBy        map[string]any `yaml:"orderBy"`      // Order by configuration
 99 | 	Limit          string         `yaml:"limit"`        // Limit template (can be a number or template)
100 | 	AnalyzeQuery   bool           `yaml:"analyzeQuery"` // Analyze query (boolean, not parameterizable)
101 | 
102 | 	// Parameters for template substitution
103 | 	Parameters tools.Parameters `yaml:"parameters"`
104 | }
105 | 
106 | // validate interface
107 | var _ tools.ToolConfig = Config{}
108 | 
109 | // ToolConfigKind returns the kind of tool configuration
110 | func (cfg Config) ToolConfigKind() string {
111 | 	return kind
112 | }
113 | 
114 | // Initialize creates a new Tool instance from the configuration
115 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
116 | 	// verify source exists
117 | 	rawS, ok := srcs[cfg.Source]
118 | 	if !ok {
119 | 		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
120 | 	}
121 | 
122 | 	// verify the source is compatible
123 | 	s, ok := rawS.(compatibleSource)
124 | 	if !ok {
125 | 		return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
126 | 	}
127 | 
128 | 	// Set default limit if not specified
129 | 	if cfg.Limit == "" {
130 | 		cfg.Limit = fmt.Sprintf("%d", defaultLimit)
131 | 	}
132 | 
133 | 	// Create MCP manifest
134 | 	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, cfg.Parameters)
135 | 
136 | 	// finish tool setup
137 | 	t := Tool{
138 | 		Name:                   cfg.Name,
139 | 		Kind:                   kind,
140 | 		AuthRequired:           cfg.AuthRequired,
141 | 		Client:                 s.FirestoreClient(),
142 | 		CollectionPathTemplate: cfg.CollectionPath,
143 | 		FiltersTemplate:        cfg.Filters,
144 | 		SelectTemplate:         cfg.Select,
145 | 		OrderByTemplate:        cfg.OrderBy,
146 | 		LimitTemplate:          cfg.Limit,
147 | 		AnalyzeQuery:           cfg.AnalyzeQuery,
148 | 		Parameters:             cfg.Parameters,
149 | 		manifest:               tools.Manifest{Description: cfg.Description, Parameters: cfg.Parameters.Manifest(), AuthRequired: cfg.AuthRequired},
150 | 		mcpManifest:            mcpManifest,
151 | 	}
152 | 	return t, nil
153 | }
154 | 
155 | // validate interface
156 | var _ tools.Tool = Tool{}
157 | 
158 | // Tool represents the Firestore query tool
159 | type Tool struct {
160 | 	Name         string   `yaml:"name"`
161 | 	Kind         string   `yaml:"kind"`
162 | 	AuthRequired []string `yaml:"authRequired"`
163 | 
164 | 	Client                 *firestoreapi.Client
165 | 	CollectionPathTemplate string
166 | 	FiltersTemplate        string
167 | 	SelectTemplate         []string
168 | 	OrderByTemplate        map[string]any
169 | 	LimitTemplate          string
170 | 	AnalyzeQuery           bool
171 | 	Parameters             tools.Parameters
172 | 
173 | 	manifest    tools.Manifest
174 | 	mcpManifest tools.McpManifest
175 | }
176 | 
177 | // SimplifiedFilter represents the simplified filter format
178 | type SimplifiedFilter struct {
179 | 	And   []SimplifiedFilter `json:"and,omitempty"`
180 | 	Or    []SimplifiedFilter `json:"or,omitempty"`
181 | 	Field string             `json:"field,omitempty"`
182 | 	Op    string             `json:"op,omitempty"`
183 | 	Value interface{}        `json:"value,omitempty"`
184 | }
185 | 
186 | // OrderByConfig represents ordering configuration
187 | type OrderByConfig struct {
188 | 	Field     string `json:"field"`
189 | 	Direction string `json:"direction"`
190 | }
191 | 
192 | // GetDirection returns the Firestore direction constant
193 | func (o *OrderByConfig) GetDirection() firestoreapi.Direction {
194 | 	if strings.EqualFold(o.Direction, "DESCENDING") || strings.EqualFold(o.Direction, "DESC") {
195 | 		return firestoreapi.Desc
196 | 	}
197 | 	return firestoreapi.Asc
198 | }
199 | 
200 | // QueryResult represents a document result from the query
201 | type QueryResult struct {
202 | 	ID         string         `json:"id"`
203 | 	Path       string         `json:"path"`
204 | 	Data       map[string]any `json:"data"`
205 | 	CreateTime interface{}    `json:"createTime,omitempty"`
206 | 	UpdateTime interface{}    `json:"updateTime,omitempty"`
207 | 	ReadTime   interface{}    `json:"readTime,omitempty"`
208 | }
209 | 
210 | // QueryResponse represents the full response including optional metrics
211 | type QueryResponse struct {
212 | 	Documents      []QueryResult  `json:"documents"`
213 | 	ExplainMetrics map[string]any `json:"explainMetrics,omitempty"`
214 | }
215 | 
216 | // Invoke executes the Firestore query based on the provided parameters
217 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
218 | 	paramsMap := params.AsMap()
219 | 
220 | 	// Process collection path with template substitution
221 | 	collectionPath, err := tools.PopulateTemplate("collectionPath", t.CollectionPathTemplate, paramsMap)
222 | 	if err != nil {
223 | 		return nil, fmt.Errorf("failed to process collection path: %w", err)
224 | 	}
225 | 
226 | 	// Build the query
227 | 	query, err := t.buildQuery(collectionPath, paramsMap)
228 | 	if err != nil {
229 | 		return nil, err
230 | 	}
231 | 
232 | 	// Execute the query and return results
233 | 	return t.executeQuery(ctx, query)
234 | }
235 | 
236 | // buildQuery constructs the Firestore query from parameters
237 | func (t Tool) buildQuery(collectionPath string, params map[string]any) (*firestoreapi.Query, error) {
238 | 	collection := t.Client.Collection(collectionPath)
239 | 	query := collection.Query
240 | 
241 | 	// Process and apply filters if template is provided
242 | 	if t.FiltersTemplate != "" {
243 | 		// Apply template substitution to filters
244 | 		filtersJSON, err := tools.PopulateTemplateWithJSON("filters", t.FiltersTemplate, params)
245 | 		if err != nil {
246 | 			return nil, fmt.Errorf("failed to process filters template: %w", err)
247 | 		}
248 | 
249 | 		// Parse the simplified filter format
250 | 		var simplifiedFilter SimplifiedFilter
251 | 		if err := json.Unmarshal([]byte(filtersJSON), &simplifiedFilter); err != nil {
252 | 			return nil, fmt.Errorf(errFilterParseFailed, err)
253 | 		}
254 | 
255 | 		// Convert simplified filter to Firestore filter
256 | 		if filter := t.convertToFirestoreFilter(simplifiedFilter); filter != nil {
257 | 			query = query.WhereEntity(filter)
258 | 		}
259 | 	}
260 | 
261 | 	// Process select fields
262 | 	selectFields, err := t.processSelectFields(params)
263 | 	if err != nil {
264 | 		return nil, err
265 | 	}
266 | 	if len(selectFields) > 0 {
267 | 		query = query.Select(selectFields...)
268 | 	}
269 | 
270 | 	// Process and apply ordering
271 | 	orderBy, err := t.getOrderBy(params)
272 | 	if err != nil {
273 | 		return nil, err
274 | 	}
275 | 	if orderBy != nil {
276 | 		query = query.OrderBy(orderBy.Field, orderBy.GetDirection())
277 | 	}
278 | 
279 | 	// Process and apply limit
280 | 	limit, err := t.getLimit(params)
281 | 	if err != nil {
282 | 		return nil, err
283 | 	}
284 | 	query = query.Limit(limit)
285 | 
286 | 	// Apply analyze options if enabled
287 | 	if t.AnalyzeQuery {
288 | 		query = query.WithRunOptions(firestoreapi.ExplainOptions{
289 | 			Analyze: true,
290 | 		})
291 | 	}
292 | 
293 | 	return &query, nil
294 | }
295 | 
296 | // convertToFirestoreFilter converts simplified filter format to Firestore EntityFilter
297 | func (t Tool) convertToFirestoreFilter(filter SimplifiedFilter) firestoreapi.EntityFilter {
298 | 	// Handle AND filters
299 | 	if len(filter.And) > 0 {
300 | 		filters := make([]firestoreapi.EntityFilter, 0, len(filter.And))
301 | 		for _, f := range filter.And {
302 | 			if converted := t.convertToFirestoreFilter(f); converted != nil {
303 | 				filters = append(filters, converted)
304 | 			}
305 | 		}
306 | 		if len(filters) > 0 {
307 | 			return firestoreapi.AndFilter{Filters: filters}
308 | 		}
309 | 		return nil
310 | 	}
311 | 
312 | 	// Handle OR filters
313 | 	if len(filter.Or) > 0 {
314 | 		filters := make([]firestoreapi.EntityFilter, 0, len(filter.Or))
315 | 		for _, f := range filter.Or {
316 | 			if converted := t.convertToFirestoreFilter(f); converted != nil {
317 | 				filters = append(filters, converted)
318 | 			}
319 | 		}
320 | 		if len(filters) > 0 {
321 | 			return firestoreapi.OrFilter{Filters: filters}
322 | 		}
323 | 		return nil
324 | 	}
325 | 
326 | 	// Handle simple property filter
327 | 	if filter.Field != "" && filter.Op != "" && filter.Value != nil {
328 | 		if validOperators[filter.Op] {
329 | 			// Convert the value using the Firestore native JSON converter
330 | 			convertedValue, err := util.JSONToFirestoreValue(filter.Value, t.Client)
331 | 			if err != nil {
332 | 				// If conversion fails, use the original value
333 | 				convertedValue = filter.Value
334 | 			}
335 | 
336 | 			return firestoreapi.PropertyFilter{
337 | 				Path:     filter.Field,
338 | 				Operator: filter.Op,
339 | 				Value:    convertedValue,
340 | 			}
341 | 		}
342 | 	}
343 | 
344 | 	return nil
345 | }
346 | 
347 | // processSelectFields processes the select fields with parameter substitution
348 | func (t Tool) processSelectFields(params map[string]any) ([]string, error) {
349 | 	var selectFields []string
350 | 
351 | 	// Process configured select fields with template substitution
352 | 	for _, field := range t.SelectTemplate {
353 | 		// Check if it's a template
354 | 		if strings.Contains(field, "{{") {
355 | 			processed, err := tools.PopulateTemplate("selectField", field, params)
356 | 			if err != nil {
357 | 				return nil, err
358 | 			}
359 | 			if processed != "" {
360 | 				// The processed field might be an array format [a b c] or a single value
361 | 				trimmedProcessed := strings.TrimSpace(processed)
362 | 
363 | 				// Check if it's in array format [a b c]
364 | 				if strings.HasPrefix(trimmedProcessed, "[") && strings.HasSuffix(trimmedProcessed, "]") {
365 | 					// Remove brackets and split by spaces
366 | 					arrayContent := strings.TrimPrefix(trimmedProcessed, "[")
367 | 					arrayContent = strings.TrimSuffix(arrayContent, "]")
368 | 					fields := strings.Fields(arrayContent) // Fields splits by any whitespace
369 | 					for _, f := range fields {
370 | 						if f != "" {
371 | 							selectFields = append(selectFields, f)
372 | 						}
373 | 					}
374 | 				} else {
375 | 					selectFields = append(selectFields, processed)
376 | 				}
377 | 			}
378 | 		} else {
379 | 			selectFields = append(selectFields, field)
380 | 		}
381 | 	}
382 | 
383 | 	return selectFields, nil
384 | }
385 | 
386 | // getOrderBy processes the orderBy configuration with parameter substitution
387 | func (t Tool) getOrderBy(params map[string]any) (*OrderByConfig, error) {
388 | 	if t.OrderByTemplate == nil {
389 | 		return nil, nil
390 | 	}
391 | 
392 | 	orderBy := &OrderByConfig{}
393 | 
394 | 	// Process field
395 | 	field, err := t.getOrderByForKey("field", params)
396 | 	if err != nil {
397 | 		return nil, err
398 | 	}
399 | 	orderBy.Field = field
400 | 
401 | 	// Process direction
402 | 	direction, err := t.getOrderByForKey("direction", params)
403 | 	if err != nil {
404 | 		return nil, err
405 | 	}
406 | 	orderBy.Direction = direction
407 | 
408 | 	if orderBy.Field == "" {
409 | 		return nil, nil
410 | 	}
411 | 
412 | 	return orderBy, nil
413 | }
414 | 
415 | func (t Tool) getOrderByForKey(key string, params map[string]any) (string, error) {
416 | 	value, ok := t.OrderByTemplate[key].(string)
417 | 	if !ok {
418 | 		return "", nil
419 | 	}
420 | 
421 | 	processedValue, err := tools.PopulateTemplate(fmt.Sprintf("orderBy%s", key), value, params)
422 | 	if err != nil {
423 | 		return "", err
424 | 	}
425 | 
426 | 	return processedValue, nil
427 | }
428 | 
429 | // processLimit processes the limit field with parameter substitution
430 | func (t Tool) getLimit(params map[string]any) (int, error) {
431 | 	limit := defaultLimit
432 | 	if t.LimitTemplate != "" {
433 | 		processedValue, err := tools.PopulateTemplate("limit", t.LimitTemplate, params)
434 | 		if err != nil {
435 | 			return 0, err
436 | 		}
437 | 
438 | 		// Try to parse as integer
439 | 		if processedValue != "" {
440 | 			parsedLimit, err := strconv.Atoi(processedValue)
441 | 			if err != nil {
442 | 				return 0, fmt.Errorf(errLimitParseFailed, processedValue, err)
443 | 			}
444 | 			limit = parsedLimit
445 | 		}
446 | 	}
447 | 	return limit, nil
448 | }
449 | 
450 | // executeQuery runs the query and formats the results
451 | func (t Tool) executeQuery(ctx context.Context, query *firestoreapi.Query) (any, error) {
452 | 	docIterator := query.Documents(ctx)
453 | 	docs, err := docIterator.GetAll()
454 | 	if err != nil {
455 | 		return nil, fmt.Errorf(errQueryExecutionFailed, err)
456 | 	}
457 | 
458 | 	// Convert results to structured format
459 | 	results := make([]QueryResult, len(docs))
460 | 	for i, doc := range docs {
461 | 		results[i] = QueryResult{
462 | 			ID:         doc.Ref.ID,
463 | 			Path:       doc.Ref.Path,
464 | 			Data:       doc.Data(),
465 | 			CreateTime: doc.CreateTime,
466 | 			UpdateTime: doc.UpdateTime,
467 | 			ReadTime:   doc.ReadTime,
468 | 		}
469 | 	}
470 | 
471 | 	// Return with explain metrics if requested
472 | 	if t.AnalyzeQuery {
473 | 		explainMetrics, err := t.getExplainMetrics(docIterator)
474 | 		if err == nil && explainMetrics != nil {
475 | 			response := QueryResponse{
476 | 				Documents:      results,
477 | 				ExplainMetrics: explainMetrics,
478 | 			}
479 | 			return response, nil
480 | 		}
481 | 	}
482 | 
483 | 	return results, nil
484 | }
485 | 
486 | // getExplainMetrics extracts explain metrics from the query iterator
487 | func (t Tool) getExplainMetrics(docIterator *firestoreapi.DocumentIterator) (map[string]any, error) {
488 | 	explainMetrics, err := docIterator.ExplainMetrics()
489 | 	if err != nil || explainMetrics == nil {
490 | 		return nil, err
491 | 	}
492 | 
493 | 	metricsData := make(map[string]any)
494 | 
495 | 	// Add plan summary if available
496 | 	if explainMetrics.PlanSummary != nil {
497 | 		planSummary := make(map[string]any)
498 | 		planSummary["indexesUsed"] = explainMetrics.PlanSummary.IndexesUsed
499 | 		metricsData["planSummary"] = planSummary
500 | 	}
501 | 
502 | 	// Add execution stats if available
503 | 	if explainMetrics.ExecutionStats != nil {
504 | 		executionStats := make(map[string]any)
505 | 		executionStats["resultsReturned"] = explainMetrics.ExecutionStats.ResultsReturned
506 | 		executionStats["readOperations"] = explainMetrics.ExecutionStats.ReadOperations
507 | 
508 | 		if explainMetrics.ExecutionStats.ExecutionDuration != nil {
509 | 			executionStats["executionDuration"] = explainMetrics.ExecutionStats.ExecutionDuration.String()
510 | 		}
511 | 
512 | 		if explainMetrics.ExecutionStats.DebugStats != nil {
513 | 			executionStats["debugStats"] = *explainMetrics.ExecutionStats.DebugStats
514 | 		}
515 | 
516 | 		metricsData["executionStats"] = executionStats
517 | 	}
518 | 
519 | 	return metricsData, nil
520 | }
521 | 
522 | // ParseParams parses and validates input parameters
523 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
524 | 	return tools.ParseParams(t.Parameters, data, claims)
525 | }
526 | 
527 | // Manifest returns the tool manifest
528 | func (t Tool) Manifest() tools.Manifest {
529 | 	return t.manifest
530 | }
531 | 
532 | // McpManifest returns the MCP manifest
533 | func (t Tool) McpManifest() tools.McpManifest {
534 | 	return t.mcpManifest
535 | }
536 | 
537 | // Authorized checks if the tool is authorized based on verified auth services
538 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
539 | 	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
540 | }
541 | 
542 | func (t Tool) RequiresClientAuthorization() bool {
543 | 	return false
544 | }
545 | 
```

--------------------------------------------------------------------------------
/internal/tools/neo4j/neo4jexecutecypher/classifier/classifier.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | /*
 16 | Package classifier provides tools to classify Cypher queries as either read-only or write operations.
 17 | 
 18 | It uses a keyword-based and procedure-based approach to determine the query's nature.
 19 | The main entry point is the `Classify` method on a `QueryClassifier` object. The classifier
 20 | is designed to be conservative, defaulting to classifying unknown procedures as write
 21 | operations to ensure safety in read-only environments.
 22 | 
 23 | It can handle:
 24 | - Standard Cypher keywords (MATCH, CREATE, MERGE, etc.).
 25 | - Multi-word keywords (DETACH DELETE, ORDER BY).
 26 | - Comments and string literals, which are ignored during classification.
 27 | - Procedure calls (CALL db.labels), with predefined lists of known read/write procedures.
 28 | - Subqueries (CALL { ... }), checking for write operations within the subquery block.
 29 | */
 30 | package classifier
 31 | 
 32 | import (
 33 | 	"regexp"
 34 | 	"sort"
 35 | 	"strings"
 36 | )
 37 | 
 38 | // QueryType represents the classification of a Cypher query as either read or write.
 39 | type QueryType int
 40 | 
 41 | const (
 42 | 	// ReadQuery indicates a query that only reads data.
 43 | 	ReadQuery QueryType = iota
 44 | 	// WriteQuery indicates a query that modifies data.
 45 | 	WriteQuery
 46 | )
 47 | 
 48 | // String provides a human-readable representation of the QueryType.
 49 | func (qt QueryType) String() string {
 50 | 	if qt == ReadQuery {
 51 | 		return "READ"
 52 | 	}
 53 | 	return "WRITE"
 54 | }
 55 | 
 56 | // QueryClassification represents the detailed result of a query classification.
 57 | type QueryClassification struct {
 58 | 	// Type is the overall classification of the query (READ or WRITE).
 59 | 	Type QueryType
 60 | 	// Confidence is a score from 0.0 to 1.0 indicating the classifier's certainty.
 61 | 	// 1.0 is fully confident. Lower scores may be assigned for ambiguous cases,
 62 | 	// like unknown procedures.
 63 | 	Confidence float64
 64 | 	// WriteTokens is a list of keywords or procedures found that indicate a write operation.
 65 | 	WriteTokens []string
 66 | 	// ReadTokens is a list of keywords or procedures found that indicate a read operation.
 67 | 	ReadTokens []string
 68 | 	// HasSubquery is true if the query contains a `CALL { ... }` block.
 69 | 	HasSubquery bool
 70 | 	// Error holds any error that occurred during classification, though this is not
 71 | 	// currently used in the implementation.
 72 | 	Error error
 73 | }
 74 | 
 75 | // QueryClassifier contains the logic and data for classifying Cypher queries.
 76 | // It should be instantiated via the NewQueryClassifier() function.
 77 | type QueryClassifier struct {
 78 | 	writeKeywords map[string]struct{}
 79 | 	readKeywords  map[string]struct{}
 80 | 	// writeProcedures is a map of known write procedure prefixes for quick lookup.
 81 | 	writeProcedures map[string]struct{}
 82 | 	// readProcedures is a map of known read procedure prefixes for quick lookup.
 83 | 	readProcedures         map[string]struct{}
 84 | 	multiWordWriteKeywords []string
 85 | 	multiWordReadKeywords  []string
 86 | 	commentPattern         *regexp.Regexp
 87 | 	stringLiteralPattern   *regexp.Regexp
 88 | 	procedureCallPattern   *regexp.Regexp
 89 | 	subqueryPattern        *regexp.Regexp
 90 | 	whitespacePattern      *regexp.Regexp
 91 | 	tokenSplitPattern      *regexp.Regexp
 92 | }
 93 | 
 94 | // NewQueryClassifier creates and initializes a new QueryClassifier instance.
 95 | // It pre-compiles regular expressions and populates the internal lists of
 96 | // known Cypher keywords and procedures.
 97 | func NewQueryClassifier() *QueryClassifier {
 98 | 	c := &QueryClassifier{
 99 | 		writeKeywords:        make(map[string]struct{}),
100 | 		readKeywords:         make(map[string]struct{}),
101 | 		writeProcedures:      make(map[string]struct{}),
102 | 		readProcedures:       make(map[string]struct{}),
103 | 		commentPattern:       regexp.MustCompile(`(?m)//.*?$|/\*[\s\S]*?\*/`),
104 | 		stringLiteralPattern: regexp.MustCompile(`'[^']*'|"[^"]*"`),
105 | 		procedureCallPattern: regexp.MustCompile(`(?i)\bCALL\s+([a-zA-Z0-9_.]+)`),
106 | 		subqueryPattern:      regexp.MustCompile(`(?i)\bCALL\s*\{`),
107 | 		whitespacePattern:    regexp.MustCompile(`\s+`),
108 | 		tokenSplitPattern:    regexp.MustCompile(`[\s,(){}[\]]+`),
109 | 	}
110 | 
111 | 	// Lists of known keywords that perform write operations.
112 | 	writeKeywordsList := []string{
113 | 		"CREATE", "MERGE", "DELETE", "DETACH DELETE", "SET", "REMOVE", "FOREACH",
114 | 		"CREATE INDEX", "DROP INDEX", "CREATE CONSTRAINT", "DROP CONSTRAINT",
115 | 	}
116 | 	// Lists of known keywords that perform read operations.
117 | 	readKeywordsList := []string{
118 | 		"MATCH", "OPTIONAL MATCH", "WITH", "WHERE", "RETURN", "ORDER BY", "SKIP", "LIMIT",
119 | 		"UNION", "UNION ALL", "UNWIND", "CASE", "WHEN", "THEN", "ELSE", "END",
120 | 		"SHOW", "PROFILE", "EXPLAIN",
121 | 	}
122 | 	// A list of procedure prefixes known to perform write operations.
123 | 	writeProceduresList := []string{
124 | 		"apoc.create", "apoc.merge", "apoc.refactor", "apoc.atomic", "apoc.trigger",
125 | 		"apoc.periodic.commit", "apoc.load.jdbc", "apoc.load.json", "apoc.load.csv",
126 | 		"apoc.export", "apoc.import", "db.create", "db.drop", "db.index.create",
127 | 		"db.constraints.create", "dbms.security.create", "gds.graph.create", "gds.graph.drop",
128 | 	}
129 | 	// A list of procedure prefixes known to perform read operations.
130 | 	readProceduresList := []string{
131 | 		"apoc.meta", "apoc.help", "apoc.version", "apoc.text", "apoc.math", "apoc.coll",
132 | 		"apoc.path", "apoc.algo", "apoc.date", "db.labels", "db.propertyKeys",
133 | 		"db.relationshipTypes", "db.schema", "db.indexes", "db.constraints",
134 | 		"dbms.components", "dbms.listConfig", "gds.graph.list", "gds.util",
135 | 	}
136 | 
137 | 	c.populateKeywords(writeKeywordsList, c.writeKeywords, &c.multiWordWriteKeywords)
138 | 	c.populateKeywords(readKeywordsList, c.readKeywords, &c.multiWordReadKeywords)
139 | 	c.populateProcedures(writeProceduresList, c.writeProcedures)
140 | 	c.populateProcedures(readProceduresList, c.readProcedures)
141 | 
142 | 	return c
143 | }
144 | 
145 | // populateKeywords processes a list of keyword strings, separating them into
146 | // single-word and multi-word lists for easier processing later.
147 | // Multi-word keywords (e.g., "DETACH DELETE") are sorted by length descending
148 | // to ensure longer matches are replaced first.
149 | func (c *QueryClassifier) populateKeywords(keywords []string, keywordMap map[string]struct{}, multiWord *[]string) {
150 | 	for _, kw := range keywords {
151 | 		if strings.Contains(kw, " ") {
152 | 			*multiWord = append(*multiWord, kw)
153 | 		}
154 | 		// Replace spaces with underscores for unified tokenization.
155 | 		keywordMap[strings.ReplaceAll(kw, " ", "_")] = struct{}{}
156 | 	}
157 | 	// Sort multi-word keywords by length (longest first) to prevent
158 | 	// partial matches, e.g., replacing "CREATE OR REPLACE" before "CREATE".
159 | 	sort.SliceStable(*multiWord, func(i, j int) bool {
160 | 		return len((*multiWord)[i]) > len((*multiWord)[j])
161 | 	})
162 | }
163 | 
164 | // populateProcedures adds a list of procedure prefixes to the given map.
165 | func (c *QueryClassifier) populateProcedures(procedures []string, procedureMap map[string]struct{}) {
166 | 	for _, proc := range procedures {
167 | 		procedureMap[strings.ToLower(proc)] = struct{}{}
168 | 	}
169 | }
170 | 
171 | // Classify analyzes a Cypher query string and returns a QueryClassification result.
172 | // It is the main method for this package.
173 | //
174 | // The process is as follows:
175 | // 1. Normalize the query by removing comments and extra whitespace.
176 | // 2. Replace string literals to prevent keywords inside them from being classified.
177 | // 3. Unify multi-word keywords (e.g., "DETACH DELETE" becomes "DETACH_DELETE").
178 | // 4. Extract all procedure calls (e.g., `CALL db.labels`).
179 | // 5. Tokenize the remaining query string.
180 | // 6. Check tokens and procedures against known read/write lists.
181 | // 7. If a subquery `CALL { ... }` exists, check its contents for write operations.
182 | // 8. Assign a final classification and confidence score.
183 | //
184 | // Usage example:
185 | //
186 | //	classifier := NewQueryClassifier()
187 | //	query := "MATCH (n:Person) WHERE n.name = 'Alice' SET n.age = 30"
188 | //	result := classifier.Classify(query)
189 | //	fmt.Printf("Query is a %s query with confidence %f\n", result.Type, result.Confidence)
190 | //	// Output: Query is a WRITE query with confidence 0.900000
191 | //	fmt.Printf("Write tokens found: %v\n", result.WriteTokens)
192 | //	// Output: Write tokens found: [SET]
193 | func (c *QueryClassifier) Classify(query string) QueryClassification {
194 | 	result := QueryClassification{
195 | 		Type:       ReadQuery, // Default to read, upgrade to write if write tokens are found.
196 | 		Confidence: 1.0,
197 | 	}
198 | 
199 | 	normalizedQuery := c.normalizeQuery(query)
200 | 	if normalizedQuery == "" {
201 | 		return result // Return default for empty queries.
202 | 	}
203 | 
204 | 	// Early check for subqueries to set the flag.
205 | 	result.HasSubquery = c.subqueryPattern.MatchString(normalizedQuery)
206 | 	procedures := c.extractProcedureCalls(normalizedQuery)
207 | 
208 | 	// Sanitize the query by replacing string literals to avoid misinterpreting their contents.
209 | 	sanitizedQuery := c.stringLiteralPattern.ReplaceAllString(normalizedQuery, "STRING_LITERAL")
210 | 	// Unify multi-word keywords to treat them as single tokens.
211 | 	unifiedQuery := c.unifyMultiWordKeywords(sanitizedQuery)
212 | 	tokens := c.extractTokens(unifiedQuery)
213 | 
214 | 	// Classify based on standard keywords.
215 | 	for _, token := range tokens {
216 | 		upperToken := strings.ToUpper(token)
217 | 
218 | 		if _, isWrite := c.writeKeywords[upperToken]; isWrite {
219 | 			result.WriteTokens = append(result.WriteTokens, upperToken)
220 | 			result.Type = WriteQuery
221 | 		} else if _, isRead := c.readKeywords[upperToken]; isRead {
222 | 			result.ReadTokens = append(result.ReadTokens, upperToken)
223 | 		}
224 | 	}
225 | 
226 | 	// Classify based on procedure calls.
227 | 	for _, proc := range procedures {
228 | 		if c.isWriteProcedure(proc) {
229 | 			result.WriteTokens = append(result.WriteTokens, "CALL "+proc)
230 | 			result.Type = WriteQuery
231 | 		} else if c.isReadProcedure(proc) {
232 | 			result.ReadTokens = append(result.ReadTokens, "CALL "+proc)
233 | 		} else {
234 | 			// CONSERVATIVE APPROACH: If a procedure is not in a known list,
235 | 			// we guess its type. If it looks like a read (get, list), we treat it as such.
236 | 			// Otherwise, we assume it's a write operation with lower confidence.
237 | 			if strings.Contains(proc, ".get") || strings.Contains(proc, ".list") ||
238 | 				strings.Contains(proc, ".show") || strings.Contains(proc, ".meta") {
239 | 				result.ReadTokens = append(result.ReadTokens, "CALL "+proc)
240 | 			} else {
241 | 				result.WriteTokens = append(result.WriteTokens, "CALL "+proc)
242 | 				result.Type = WriteQuery
243 | 				result.Confidence = 0.8 // Lower confidence for unknown procedures.
244 | 			}
245 | 		}
246 | 	}
247 | 
248 | 	// If a subquery exists, explicitly check its contents for write operations.
249 | 	if result.HasSubquery && c.hasWriteInSubquery(unifiedQuery) {
250 | 		result.Type = WriteQuery
251 | 		// Add a specific token to indicate the reason for the write classification.
252 | 		found := false
253 | 		for _, t := range result.WriteTokens {
254 | 			if t == "WRITE_IN_SUBQUERY" {
255 | 				found = true
256 | 				break
257 | 			}
258 | 		}
259 | 		if !found {
260 | 			result.WriteTokens = append(result.WriteTokens, "WRITE_IN_SUBQUERY")
261 | 		}
262 | 	}
263 | 
264 | 	// If a query contains both read and write operations (e.g., MATCH ... DELETE),
265 | 	// it's a write query. We lower the confidence slightly to reflect the mixed nature.
266 | 	if len(result.WriteTokens) > 0 && len(result.ReadTokens) > 0 {
267 | 		result.Confidence = 0.9
268 | 	}
269 | 
270 | 	return result
271 | }
272 | 
273 | // unifyMultiWordKeywords replaces multi-word keywords in a query with a single,
274 | // underscore-separated token. This simplifies the tokenization process.
275 | // Example: "DETACH DELETE" becomes "DETACH_DELETE".
276 | func (c *QueryClassifier) unifyMultiWordKeywords(query string) string {
277 | 	upperQuery := strings.ToUpper(query)
278 | 	// Combine all multi-word keywords for a single pass.
279 | 	allMultiWord := append(c.multiWordWriteKeywords, c.multiWordReadKeywords...)
280 | 
281 | 	for _, kw := range allMultiWord {
282 | 		placeholder := strings.ReplaceAll(kw, " ", "_")
283 | 		upperQuery = strings.ReplaceAll(upperQuery, kw, placeholder)
284 | 	}
285 | 	return upperQuery
286 | }
287 | 
288 | // normalizeQuery cleans a query string by removing comments and collapsing
289 | // all whitespace into single spaces.
290 | func (c *QueryClassifier) normalizeQuery(query string) string {
291 | 	// Remove single-line and multi-line comments.
292 | 	query = c.commentPattern.ReplaceAllString(query, " ")
293 | 	// Collapse consecutive whitespace characters into a single space.
294 | 	query = c.whitespacePattern.ReplaceAllString(query, " ")
295 | 	return strings.TrimSpace(query)
296 | }
297 | 
298 | // extractTokens splits a query string into a slice of individual tokens.
299 | // It splits on whitespace and various punctuation marks.
300 | func (c *QueryClassifier) extractTokens(query string) []string {
301 | 	tokens := c.tokenSplitPattern.Split(query, -1)
302 | 	// Filter out empty strings that can result from the split.
303 | 	result := make([]string, 0, len(tokens))
304 | 	for _, token := range tokens {
305 | 		if token != "" {
306 | 			result = append(result, token)
307 | 		}
308 | 	}
309 | 	return result
310 | }
311 | 
312 | // extractProcedureCalls finds all procedure calls (e.g., `CALL db.labels`)
313 | // in the query and returns a slice of their names.
314 | func (c *QueryClassifier) extractProcedureCalls(query string) []string {
315 | 	matches := c.procedureCallPattern.FindAllStringSubmatch(query, -1)
316 | 	procedures := make([]string, 0, len(matches))
317 | 	for _, match := range matches {
318 | 		if len(match) > 1 {
319 | 			procedures = append(procedures, strings.ToLower(match[1]))
320 | 		}
321 | 	}
322 | 	return procedures
323 | }
324 | 
325 | // isWriteProcedure checks if a given procedure name matches any of the known
326 | // write procedure prefixes.
327 | func (c *QueryClassifier) isWriteProcedure(procedure string) bool {
328 | 	procedure = strings.ToLower(procedure)
329 | 	for wp := range c.writeProcedures {
330 | 		if strings.HasPrefix(procedure, wp) {
331 | 			return true
332 | 		}
333 | 	}
334 | 	return false
335 | }
336 | 
337 | // isReadProcedure checks if a given procedure name matches any of the known
338 | // read procedure prefixes.
339 | func (c *QueryClassifier) isReadProcedure(procedure string) bool {
340 | 	procedure = strings.ToLower(procedure)
341 | 	for rp := range c.readProcedures {
342 | 		if strings.HasPrefix(procedure, rp) {
343 | 			return true
344 | 		}
345 | 	}
346 | 	return false
347 | }
348 | 
349 | // hasWriteInSubquery detects if a write keyword exists within a `CALL { ... }` block.
350 | // It correctly handles nested braces to find the content of the top-level subquery.
351 | func (c *QueryClassifier) hasWriteInSubquery(unifiedQuery string) bool {
352 | 	loc := c.subqueryPattern.FindStringIndex(unifiedQuery)
353 | 	if loc == nil {
354 | 		return false
355 | 	}
356 | 
357 | 	// The search starts from the beginning of the `CALL {` match.
358 | 	subqueryContent := unifiedQuery[loc[0]:]
359 | 	openBraces := 0
360 | 	startIndex := -1
361 | 	endIndex := -1
362 | 
363 | 	// Find the boundaries of the first complete `{...}` block.
364 | 	for i, char := range subqueryContent {
365 | 		if char == '{' {
366 | 			if openBraces == 0 {
367 | 				startIndex = i + 1
368 | 			}
369 | 			openBraces++
370 | 		} else if char == '}' {
371 | 			openBraces--
372 | 			if openBraces == 0 {
373 | 				endIndex = i
374 | 				break
375 | 			}
376 | 		}
377 | 	}
378 | 
379 | 	var block string
380 | 	if startIndex != -1 {
381 | 		if endIndex != -1 {
382 | 			// A complete `{...}` block was found.
383 | 			block = subqueryContent[startIndex:endIndex]
384 | 		} else {
385 | 			// An opening brace was found but no closing one; this indicates a
386 | 			// likely syntax error, but we check the rest of the string anyway.
387 | 			block = subqueryContent[startIndex:]
388 | 		}
389 | 
390 | 		// Check if any write keyword exists as a whole word within the subquery block.
391 | 		for writeOp := range c.writeKeywords {
392 | 			// Use regex to match the keyword as a whole word to avoid partial matches
393 | 			// (e.g., finding "SET" in "ASSET").
394 | 			re := regexp.MustCompile(`\b` + writeOp + `\b`)
395 | 			if re.MatchString(block) {
396 | 				return true
397 | 			}
398 | 		}
399 | 	}
400 | 
401 | 	return false
402 | }
403 | 
404 | // AddWriteProcedure allows users to dynamically add a custom procedure prefix to the
405 | // list of known write procedures. This is useful for environments with custom plugins.
406 | // The pattern is matched using `strings.HasPrefix`.
407 | //
408 | // Usage example:
409 | //
410 | //	classifier := NewQueryClassifier()
411 | //	classifier.AddWriteProcedure("my.custom.writer")
412 | //	result := classifier.Classify("CALL my.custom.writer.createUser()")
413 | //	// result.Type will be WriteQuery
414 | func (c *QueryClassifier) AddWriteProcedure(pattern string) {
415 | 	if pattern != "" {
416 | 		c.writeProcedures[strings.ToLower(pattern)] = struct{}{}
417 | 	}
418 | }
419 | 
420 | // AddReadProcedure allows users to dynamically add a custom procedure prefix to the
421 | // list of known read procedures.
422 | // The pattern is matched using `strings.HasPrefix`.
423 | //
424 | // Usage example:
425 | //
426 | //	classifier := NewQueryClassifier()
427 | //	classifier.AddReadProcedure("my.custom.reader")
428 | //	result := classifier.Classify("CALL my.custom.reader.getData()")
429 | //	// result.Type will be ReadQuery
430 | func (c *QueryClassifier) AddReadProcedure(pattern string) {
431 | 	if pattern != "" {
432 | 		c.readProcedures[strings.ToLower(pattern)] = struct{}{}
433 | 	}
434 | }
435 | 
```

--------------------------------------------------------------------------------
/internal/tools/looker/lookerhealthanalyze/lookerhealthanalyze.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //	http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | package lookerhealthanalyze
 15 | 
 16 | import (
 17 | 	"context"
 18 | 	"encoding/json"
 19 | 	"fmt"
 20 | 	"regexp"
 21 | 	"strings"
 22 | 
 23 | 	yaml "github.com/goccy/go-yaml"
 24 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 25 | 	lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
 26 | 	"github.com/googleapis/genai-toolbox/internal/tools"
 27 | 	"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
 28 | 	"github.com/googleapis/genai-toolbox/internal/util"
 29 | 	"github.com/looker-open-source/sdk-codegen/go/rtl"
 30 | 	v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
 31 | )
 32 | 
 33 | // =================================================================================================================
 34 | // START MCP SERVER CORE LOGIC
 35 | // =================================================================================================================
 36 | const kind string = "looker-health-analyze"
 37 | 
 38 | func init() {
 39 | 	if !tools.Register(kind, newConfig) {
 40 | 		panic(fmt.Sprintf("tool kind %q already registered", kind))
 41 | 	}
 42 | }
 43 | 
 44 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
 45 | 	actual := Config{Name: name}
 46 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 47 | 		return nil, err
 48 | 	}
 49 | 	return actual, nil
 50 | }
 51 | 
 52 | type Config struct {
 53 | 	Name         string         `yaml:"name" validate:"required"`
 54 | 	Kind         string         `yaml:"kind" validate:"required"`
 55 | 	Source       string         `yaml:"source" validate:"required"`
 56 | 	Description  string         `yaml:"description" validate:"required"`
 57 | 	AuthRequired []string       `yaml:"authRequired"`
 58 | 	Parameters   map[string]any `yaml:"parameters"`
 59 | }
 60 | 
 61 | var _ tools.ToolConfig = Config{}
 62 | 
 63 | func (cfg Config) ToolConfigKind() string {
 64 | 	return kind
 65 | }
 66 | 
 67 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
 68 | 	rawS, ok := srcs[cfg.Source]
 69 | 	if !ok {
 70 | 		return nil, fmt.Errorf("no source named %q configured", cfg.Source)
 71 | 	}
 72 | 
 73 | 	s, ok := rawS.(*lookersrc.Source)
 74 | 	if !ok {
 75 | 		return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
 76 | 	}
 77 | 
 78 | 	actionParameter := tools.NewStringParameterWithRequired("action", "The analysis to run. Can be 'projects', 'models', or 'explores'.", true)
 79 | 	projectParameter := tools.NewStringParameterWithRequired("project", "The Looker project to analyze (optional).", false)
 80 | 	modelParameter := tools.NewStringParameterWithRequired("model", "The Looker model to analyze (optional).", false)
 81 | 	exploreParameter := tools.NewStringParameterWithRequired("explore", "The Looker explore to analyze (optional).", false)
 82 | 	timeframeParameter := tools.NewIntParameterWithDefault("timeframe", 90, "The timeframe in days to analyze.")
 83 | 	minQueriesParameter := tools.NewIntParameterWithDefault("min_queries", 0, "The minimum number of queries for a model or explore to be considered used.")
 84 | 
 85 | 	parameters := tools.Parameters{
 86 | 		actionParameter,
 87 | 		projectParameter,
 88 | 		modelParameter,
 89 | 		exploreParameter,
 90 | 		timeframeParameter,
 91 | 		minQueriesParameter,
 92 | 	}
 93 | 
 94 | 	mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
 95 | 
 96 | 	return Tool{
 97 | 		Name:           cfg.Name,
 98 | 		Kind:           kind,
 99 | 		Parameters:     parameters,
100 | 		AuthRequired:   cfg.AuthRequired,
101 | 		UseClientOAuth: s.UseClientOAuth,
102 | 		Client:         s.Client,
103 | 		ApiSettings:    s.ApiSettings,
104 | 		manifest: tools.Manifest{
105 | 			Description:  cfg.Description,
106 | 			Parameters:   parameters.Manifest(),
107 | 			AuthRequired: cfg.AuthRequired,
108 | 		},
109 | 		mcpManifest: mcpManifest,
110 | 	}, nil
111 | }
112 | 
113 | var _ tools.Tool = Tool{}
114 | 
115 | type Tool struct {
116 | 	Name           string `yaml:"name"`
117 | 	Kind           string `yaml:"kind"`
118 | 	UseClientOAuth bool
119 | 	Client         *v4.LookerSDK
120 | 	ApiSettings    *rtl.ApiSettings
121 | 	AuthRequired   []string `yaml:"authRequired"`
122 | 	Parameters     tools.Parameters
123 | 	manifest       tools.Manifest
124 | 	mcpManifest    tools.McpManifest
125 | }
126 | 
127 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
128 | 	logger, err := util.LoggerFromContext(ctx)
129 | 	if err != nil {
130 | 		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
131 | 	}
132 | 
133 | 	sdk, err := lookercommon.GetLookerSDK(t.UseClientOAuth, t.ApiSettings, t.Client, accessToken)
134 | 	if err != nil {
135 | 		return nil, fmt.Errorf("error getting sdk: %w", err)
136 | 	}
137 | 
138 | 	paramsMap := params.AsMap()
139 | 	timeframe, _ := paramsMap["timeframe"].(int)
140 | 	if timeframe == 0 {
141 | 		timeframe = 90
142 | 	}
143 | 	minQueries, _ := paramsMap["min_queries"].(int)
144 | 	if minQueries == 0 {
145 | 		minQueries = 1
146 | 	}
147 | 
148 | 	analyzeTool := &analyzeTool{
149 | 		SdkClient:  sdk,
150 | 		timeframe:  timeframe,
151 | 		minQueries: minQueries,
152 | 	}
153 | 
154 | 	action, ok := paramsMap["action"].(string)
155 | 	if !ok {
156 | 		return nil, fmt.Errorf("action parameter not found")
157 | 	}
158 | 
159 | 	switch action {
160 | 	case "projects":
161 | 		projectId, _ := paramsMap["project"].(string)
162 | 		result, err := analyzeTool.projects(ctx, projectId)
163 | 		if err != nil {
164 | 			return nil, fmt.Errorf("error analyzing projects: %w", err)
165 | 		}
166 | 		logger.DebugContext(ctx, "result = ", result)
167 | 		return result, nil
168 | 	case "models":
169 | 		projectName, _ := paramsMap["project"].(string)
170 | 		modelName, _ := paramsMap["model"].(string)
171 | 		result, err := analyzeTool.models(ctx, projectName, modelName)
172 | 		if err != nil {
173 | 			return nil, fmt.Errorf("error analyzing models: %w", err)
174 | 		}
175 | 		logger.DebugContext(ctx, "result = ", result)
176 | 		return result, nil
177 | 	case "explores":
178 | 		modelName, _ := paramsMap["model"].(string)
179 | 		exploreName, _ := paramsMap["explore"].(string)
180 | 		result, err := analyzeTool.explores(ctx, modelName, exploreName)
181 | 		if err != nil {
182 | 			return nil, fmt.Errorf("error analyzing explores: %w", err)
183 | 		}
184 | 		logger.DebugContext(ctx, "result = ", result)
185 | 		return result, nil
186 | 	default:
187 | 		return nil, fmt.Errorf("unknown action: %s", action)
188 | 	}
189 | }
190 | func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
191 | 	return tools.ParseParams(t.Parameters, data, claims)
192 | }
193 | 
194 | func (t Tool) Manifest() tools.Manifest {
195 | 	return t.manifest
196 | }
197 | 
198 | func (t Tool) McpManifest() tools.McpManifest {
199 | 	return t.mcpManifest
200 | }
201 | 
202 | func (t Tool) Authorized(verifiedAuthServices []string) bool {
203 | 	return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
204 | }
205 | 
206 | func (t Tool) RequiresClientAuthorization() bool {
207 | 	return t.UseClientOAuth
208 | }
209 | 
210 | // =================================================================================================================
211 | // END MCP SERVER CORE LOGIC
212 | // =================================================================================================================
213 | 
214 | // =================================================================================================================
215 | // START LOOKER HEALTH ANALYZE CORE LOGIC
216 | // =================================================================================================================
217 | type analyzeTool struct {
218 | 	SdkClient  *v4.LookerSDK
219 | 	timeframe  int
220 | 	minQueries int
221 | }
222 | 
223 | func (t *analyzeTool) projects(ctx context.Context, id string) ([]map[string]interface{}, error) {
224 | 	logger, err := util.LoggerFromContext(ctx)
225 | 	if err != nil {
226 | 		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
227 | 	}
228 | 
229 | 	var projects []*v4.Project
230 | 	if id != "" {
231 | 		p, err := t.SdkClient.Project(id, "", nil)
232 | 		if err != nil {
233 | 			return nil, fmt.Errorf("error fetching project %s: %w", id, err)
234 | 		}
235 | 		projects = append(projects, &p)
236 | 	} else {
237 | 		allProjects, err := t.SdkClient.AllProjects("", nil)
238 | 		if err != nil {
239 | 			return nil, fmt.Errorf("error fetching all projects: %w", err)
240 | 		}
241 | 		for i := range allProjects {
242 | 			projects = append(projects, &allProjects[i])
243 | 		}
244 | 	}
245 | 
246 | 	var results []map[string]interface{}
247 | 	for _, p := range projects {
248 | 		pName := *p.Name
249 | 		pID := *p.Id
250 | 		logger.InfoContext(ctx, fmt.Sprintf("Analyzing project: %s", pName))
251 | 
252 | 		projectFiles, err := t.SdkClient.AllProjectFiles(pID, "", nil)
253 | 		if err != nil {
254 | 			return nil, fmt.Errorf("error fetching files for project %s: %w", pName, err)
255 | 		}
256 | 
257 | 		modelCount := 0
258 | 		viewFileCount := 0
259 | 		for _, f := range projectFiles {
260 | 			if f.Type != nil {
261 | 				if *f.Type == "model" {
262 | 					modelCount++
263 | 				}
264 | 				if *f.Type == "view" {
265 | 					viewFileCount++
266 | 				}
267 | 			}
268 | 		}
269 | 
270 | 		gitConnectionStatus := "OK"
271 | 		if p.GitRemoteUrl == nil {
272 | 			gitConnectionStatus = "No repo found"
273 | 		} else if strings.Contains(*p.GitRemoteUrl, "/bare_models/") {
274 | 			gitConnectionStatus = "Bare repo, no tests required"
275 | 		}
276 | 
277 | 		results = append(results, map[string]interface{}{
278 | 			"Project":                pName,
279 | 			"# Models":               modelCount,
280 | 			"# View Files":           viewFileCount,
281 | 			"Git Connection Status":  gitConnectionStatus,
282 | 			"PR Mode":                string(*p.PullRequestMode),
283 | 			"Is Validation Required": *p.ValidationRequired,
284 | 		})
285 | 	}
286 | 	return results, nil
287 | }
288 | 
289 | func (t *analyzeTool) models(ctx context.Context, project, model string) ([]map[string]interface{}, error) {
290 | 	logger, err := util.LoggerFromContext(ctx)
291 | 	if err != nil {
292 | 		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
293 | 	}
294 | 	logger.InfoContext(ctx, "Analyzing models...")
295 | 
296 | 	usedModels, err := t.getUsedModels(ctx)
297 | 	if err != nil {
298 | 		return nil, err
299 | 	}
300 | 
301 | 	lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
302 | 	if err != nil {
303 | 		return nil, fmt.Errorf("error fetching LookML models: %w", err)
304 | 	}
305 | 
306 | 	var results []map[string]interface{}
307 | 	for _, m := range lookmlModels {
308 | 		if (project == "" || (m.ProjectName != nil && *m.ProjectName == project)) &&
309 | 			(model == "" || (m.Name != nil && *m.Name == model)) {
310 | 
311 | 			queryCount := 0
312 | 			if qc, ok := usedModels[*m.Name]; ok {
313 | 				queryCount = qc
314 | 			}
315 | 
316 | 			exploreCount := 0
317 | 			if m.Explores != nil {
318 | 				exploreCount = len(*m.Explores)
319 | 			}
320 | 
321 | 			results = append(results, map[string]interface{}{
322 | 				"Project":     *m.ProjectName,
323 | 				"Model":       *m.Name,
324 | 				"# Explores":  exploreCount,
325 | 				"Query Count": queryCount,
326 | 			})
327 | 		}
328 | 	}
329 | 	return results, nil
330 | }
331 | 
332 | func (t *analyzeTool) getUsedModels(ctx context.Context) (map[string]int, error) {
333 | 	limit := "5000"
334 | 	query := &v4.WriteQuery{
335 | 		Model:  "system__activity",
336 | 		View:   "history",
337 | 		Fields: &[]string{"history.query_run_count", "query.model"},
338 | 		Filters: &map[string]any{
339 | 			"history.created_date":    fmt.Sprintf("%d days", t.timeframe),
340 | 			"query.model":             "-system__activity, -i__looker",
341 | 			"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
342 | 			"user.dev_branch_name":    "NULL",
343 | 		},
344 | 		Limit: &limit,
345 | 	}
346 | 	raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
347 | 	if err != nil {
348 | 		return nil, err
349 | 	}
350 | 
351 | 	var data []map[string]interface{}
352 | 	_ = json.Unmarshal([]byte(raw), &data)
353 | 
354 | 	results := make(map[string]int)
355 | 	for _, row := range data {
356 | 		model, _ := row["query.model"].(string)
357 | 		count, _ := row["history.query_run_count"].(float64)
358 | 		results[model] = int(count)
359 | 	}
360 | 	return results, nil
361 | }
362 | 
363 | func (t *analyzeTool) getUsedExploreFields(ctx context.Context, model, explore string) (map[string]int, error) {
364 | 	limit := "5000"
365 | 	query := &v4.WriteQuery{
366 | 		Model:  "system__activity",
367 | 		View:   "history",
368 | 		Fields: &[]string{"query.formatted_fields", "query.filters", "history.query_run_count"},
369 | 		Filters: &map[string]any{
370 | 			"history.created_date":   fmt.Sprintf("%d days", t.timeframe),
371 | 			"query.model":            strings.ReplaceAll(model, "_", "^_"),
372 | 			"query.view":             strings.ReplaceAll(explore, "_", "^_"),
373 | 			"query.formatted_fields": "-NULL",
374 | 			"history.workspace_id":   "production",
375 | 		},
376 | 		Limit: &limit,
377 | 	}
378 | 	raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
379 | 	if err != nil {
380 | 		return nil, err
381 | 	}
382 | 
383 | 	var data []map[string]interface{}
384 | 	_ = json.Unmarshal([]byte(raw), &data)
385 | 
386 | 	results := make(map[string]int)
387 | 	fieldRegex := regexp.MustCompile(`(\w+\.\w+)`)
388 | 
389 | 	for _, row := range data {
390 | 		count, _ := row["history.query_run_count"].(float64)
391 | 		formattedFields, _ := row["query.formatted_fields"].(string)
392 | 		filters, _ := row["query.filters"].(string)
393 | 
394 | 		usedFields := make(map[string]bool)
395 | 
396 | 		for _, field := range fieldRegex.FindAllString(formattedFields, -1) {
397 | 			results[field] += int(count)
398 | 			usedFields[field] = true
399 | 		}
400 | 
401 | 		for _, field := range fieldRegex.FindAllString(filters, -1) {
402 | 			if _, ok := usedFields[field]; !ok {
403 | 				results[field] += int(count)
404 | 			}
405 | 		}
406 | 	}
407 | 	return results, nil
408 | }
409 | 
410 | func (t *analyzeTool) explores(ctx context.Context, model, explore string) ([]map[string]interface{}, error) {
411 | 	logger, err := util.LoggerFromContext(ctx)
412 | 	if err != nil {
413 | 		return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
414 | 	}
415 | 	logger.InfoContext(ctx, "Analyzing explores...")
416 | 
417 | 	lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
418 | 	if err != nil {
419 | 		return nil, fmt.Errorf("error fetching LookML models: %w", err)
420 | 	}
421 | 
422 | 	var results []map[string]interface{}
423 | 	for _, m := range lookmlModels {
424 | 		if model != "" && (m.Name == nil || *m.Name != model) {
425 | 			continue
426 | 		}
427 | 		if m.Explores == nil {
428 | 			continue
429 | 		}
430 | 
431 | 		for _, e := range *m.Explores {
432 | 			if explore != "" && (e.Name == nil || *e.Name != explore) {
433 | 				continue
434 | 			}
435 | 			if e.Name == nil {
436 | 				continue
437 | 			}
438 | 
439 | 			// Get detailed explore info to count fields and joins
440 | 			req := v4.RequestLookmlModelExplore{
441 | 				LookmlModelName: *m.Name,
442 | 				ExploreName:     *e.Name,
443 | 			}
444 | 			exploreDetail, err := t.SdkClient.LookmlModelExplore(req, nil)
445 | 			if err != nil {
446 | 				// Log the error but continue to the next explore if possible
447 | 				logger.ErrorContext(ctx, fmt.Sprintf("Error fetching detail for explore %s.%s: %v", *m.Name, *e.Name, err))
448 | 				continue
449 | 			}
450 | 
451 | 			fieldCount := 0
452 | 			if exploreDetail.Fields != nil {
453 | 				fieldCount = len(*exploreDetail.Fields.Dimensions) + len(*exploreDetail.Fields.Measures)
454 | 			}
455 | 
456 | 			joinCount := 0
457 | 			if exploreDetail.Joins != nil {
458 | 				joinCount = len(*exploreDetail.Joins)
459 | 			}
460 | 
461 | 			usedFields, err := t.getUsedExploreFields(ctx, *m.Name, *e.Name)
462 | 			if err != nil {
463 | 				logger.ErrorContext(ctx, fmt.Sprintf("Error fetching used fields for explore %s.%s: %v", *m.Name, *e.Name, err))
464 | 				continue
465 | 			}
466 | 
467 | 			allFields := []string{}
468 | 			if exploreDetail.Fields != nil {
469 | 				for _, d := range *exploreDetail.Fields.Dimensions {
470 | 					if !*d.Hidden {
471 | 						allFields = append(allFields, *d.Name)
472 | 					}
473 | 				}
474 | 				for _, ms := range *exploreDetail.Fields.Measures {
475 | 					if !*ms.Hidden {
476 | 						allFields = append(allFields, *ms.Name)
477 | 					}
478 | 				}
479 | 			}
480 | 
481 | 			unusedFieldsCount := 0
482 | 			for _, field := range allFields {
483 | 				if _, ok := usedFields[field]; !ok {
484 | 					unusedFieldsCount++
485 | 				}
486 | 			}
487 | 
488 | 			joinStats := make(map[string]int)
489 | 			if exploreDetail.Joins != nil {
490 | 				for field, queryCount := range usedFields {
491 | 					join := strings.Split(field, ".")[0]
492 | 					joinStats[join] += queryCount
493 | 				}
494 | 				for _, join := range *exploreDetail.Joins {
495 | 					if _, ok := joinStats[*join.Name]; !ok {
496 | 						joinStats[*join.Name] = 0
497 | 					}
498 | 				}
499 | 			}
500 | 
501 | 			unusedJoinsCount := 0
502 | 			for _, count := range joinStats {
503 | 				if count == 0 {
504 | 					unusedJoinsCount++
505 | 				}
506 | 			}
507 | 
508 | 			// Use an inline query to get query count for the explore
509 | 			limit := "1"
510 | 			queryCountQueryBody := &v4.WriteQuery{
511 | 				Model:  "system__activity",
512 | 				View:   "history",
513 | 				Fields: &[]string{"history.query_run_count"},
514 | 				Filters: &map[string]any{
515 | 					"query.model":             *m.Name,
516 | 					"query.view":              *e.Name,
517 | 					"history.created_date":    fmt.Sprintf("%d days", t.timeframe),
518 | 					"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
519 | 					"user.dev_branch_name":    "NULL",
520 | 				},
521 | 				Limit: &limit,
522 | 			}
523 | 
524 | 			rawQueryCount, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, queryCountQueryBody, "json", nil)
525 | 			if err != nil {
526 | 				return nil, err
527 | 			}
528 | 			queryCount := 0
529 | 			var data []map[string]interface{}
530 | 			_ = json.Unmarshal([]byte(rawQueryCount), &data)
531 | 			if len(data) > 0 {
532 | 				if count, ok := data[0]["history.query_run_count"].(float64); ok {
533 | 					queryCount = int(count)
534 | 				}
535 | 			}
536 | 
537 | 			results = append(results, map[string]interface{}{
538 | 				"Model":           *m.Name,
539 | 				"Explore":         *e.Name,
540 | 				"Is Hidden":       *e.Hidden,
541 | 				"Has Description": e.Description != nil && *e.Description != "",
542 | 				"# Joins":         joinCount,
543 | 				"# Unused Joins":  unusedJoinsCount,
544 | 				"# Unused Fields": unusedFieldsCount,
545 | 				"# Fields":        fieldCount,
546 | 				"Query Count":     queryCount,
547 | 			})
548 | 		}
549 | 	}
550 | 	return results, nil
551 | }
552 | 
553 | // =================================================================================================================
554 | // END LOOKER HEALTH ANALYZE CORE LOGIC
555 | // =================================================================================================================
556 | 
```

--------------------------------------------------------------------------------
/internal/sources/bigquery/bigquery.go:
--------------------------------------------------------------------------------

```go
  1 | // Copyright 2025 Google LLC
  2 | //
  3 | // Licensed under the Apache License, Version 2.0 (the "License");
  4 | // you may not use this file except in compliance with the License.
  5 | // You may obtain a copy of the License at
  6 | //
  7 | //     http://www.apache.org/licenses/LICENSE-2.0
  8 | //
  9 | // Unless required by applicable law or agreed to in writing, software
 10 | // distributed under the License is distributed on an "AS IS" BASIS,
 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | // See the License for the specific language governing permissions and
 13 | // limitations under the License.
 14 | 
 15 | package bigquery
 16 | 
 17 | import (
 18 | 	"context"
 19 | 	"fmt"
 20 | 	"net/http"
 21 | 	"strings"
 22 | 	"sync"
 23 | 	"time"
 24 | 
 25 | 	bigqueryapi "cloud.google.com/go/bigquery"
 26 | 	dataplexapi "cloud.google.com/go/dataplex/apiv1"
 27 | 	"github.com/goccy/go-yaml"
 28 | 	"github.com/googleapis/genai-toolbox/internal/sources"
 29 | 	"github.com/googleapis/genai-toolbox/internal/util"
 30 | 	"go.opentelemetry.io/otel/trace"
 31 | 	"golang.org/x/oauth2"
 32 | 	"golang.org/x/oauth2/google"
 33 | 	bigqueryrestapi "google.golang.org/api/bigquery/v2"
 34 | 	"google.golang.org/api/googleapi"
 35 | 	"google.golang.org/api/option"
 36 | )
 37 | 
 38 | const SourceKind string = "bigquery"
 39 | 
 40 | const (
 41 | 	// No write operations are allowed.
 42 | 	WriteModeBlocked string = "blocked"
 43 | 	// Only protected write operations are allowed in a BigQuery session.
 44 | 	WriteModeProtected string = "protected"
 45 | 	// All write operations are allowed.
 46 | 	WriteModeAllowed string = "allowed"
 47 | )
 48 | 
 49 | // validate interface
 50 | var _ sources.SourceConfig = Config{}
 51 | 
 52 | type BigqueryClientCreator func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
 53 | 
 54 | type BigQuerySessionProvider func(ctx context.Context) (*Session, error)
 55 | 
 56 | type DataplexClientCreator func(tokenString string) (*dataplexapi.CatalogClient, error)
 57 | 
 58 | func init() {
 59 | 	if !sources.Register(SourceKind, newConfig) {
 60 | 		panic(fmt.Sprintf("source kind %q already registered", SourceKind))
 61 | 	}
 62 | }
 63 | 
 64 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
 65 | 	actual := Config{Name: name}
 66 | 	if err := decoder.DecodeContext(ctx, &actual); err != nil {
 67 | 		return nil, err
 68 | 	}
 69 | 	return actual, nil
 70 | }
 71 | 
 72 | type Config struct {
 73 | 	// BigQuery configs
 74 | 	Name            string   `yaml:"name" validate:"required"`
 75 | 	Kind            string   `yaml:"kind" validate:"required"`
 76 | 	Project         string   `yaml:"project" validate:"required"`
 77 | 	Location        string   `yaml:"location"`
 78 | 	WriteMode       string   `yaml:"writeMode"`
 79 | 	AllowedDatasets []string `yaml:"allowedDatasets"`
 80 | 	UseClientOAuth  bool     `yaml:"useClientOAuth"`
 81 | }
 82 | 
 83 | func (r Config) SourceConfigKind() string {
 84 | 	// Returns BigQuery source kind
 85 | 	return SourceKind
 86 | }
 87 | 
 88 | func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
 89 | 	if r.WriteMode == "" {
 90 | 		r.WriteMode = WriteModeAllowed
 91 | 	}
 92 | 
 93 | 	if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
 94 | 		return nil, fmt.Errorf("writeMode 'protected' cannot be used with useClientOAuth 'true'")
 95 | 	}
 96 | 
 97 | 	var client *bigqueryapi.Client
 98 | 	var restService *bigqueryrestapi.Service
 99 | 	var tokenSource oauth2.TokenSource
100 | 	var clientCreator BigqueryClientCreator
101 | 	var err error
102 | 
103 | 	if r.UseClientOAuth {
104 | 		clientCreator, err = newBigQueryClientCreator(ctx, tracer, r.Project, r.Location, r.Name)
105 | 		if err != nil {
106 | 			return nil, fmt.Errorf("error constructing client creator: %w", err)
107 | 		}
108 | 	} else {
109 | 		// Initializes a BigQuery Google SQL source
110 | 		client, restService, tokenSource, err = initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location)
111 | 		if err != nil {
112 | 			return nil, fmt.Errorf("error creating client from ADC: %w", err)
113 | 		}
114 | 	}
115 | 
116 | 	allowedDatasets := make(map[string]struct{})
117 | 	// Get full id of allowed datasets and verify they exist.
118 | 	if len(r.AllowedDatasets) > 0 {
119 | 		for _, allowed := range r.AllowedDatasets {
120 | 			var projectID, datasetID, allowedFullID string
121 | 			if strings.Contains(allowed, ".") {
122 | 				parts := strings.Split(allowed, ".")
123 | 				if len(parts) != 2 {
124 | 					return nil, fmt.Errorf("invalid allowedDataset format: %q, expected 'project.dataset' or 'dataset'", allowed)
125 | 				}
126 | 				projectID = parts[0]
127 | 				datasetID = parts[1]
128 | 				allowedFullID = allowed
129 | 			} else {
130 | 				projectID = r.Project
131 | 				datasetID = allowed
132 | 				allowedFullID = fmt.Sprintf("%s.%s", projectID, datasetID)
133 | 			}
134 | 
135 | 			if client != nil {
136 | 				dataset := client.DatasetInProject(projectID, datasetID)
137 | 				_, err := dataset.Metadata(ctx)
138 | 				if err != nil {
139 | 					if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound {
140 | 						return nil, fmt.Errorf("allowedDataset '%s' not found in project '%s'", datasetID, projectID)
141 | 					}
142 | 					return nil, fmt.Errorf("failed to verify allowedDataset '%s' in project '%s': %w", datasetID, projectID, err)
143 | 				}
144 | 			}
145 | 			allowedDatasets[allowedFullID] = struct{}{}
146 | 		}
147 | 	}
148 | 
149 | 	s := &Source{
150 | 		Name:               r.Name,
151 | 		Kind:               SourceKind,
152 | 		Project:            r.Project,
153 | 		Location:           r.Location,
154 | 		Client:             client,
155 | 		RestService:        restService,
156 | 		TokenSource:        tokenSource,
157 | 		MaxQueryResultRows: 50,
158 | 		ClientCreator:      clientCreator,
159 | 		WriteMode:          r.WriteMode,
160 | 		AllowedDatasets:    allowedDatasets,
161 | 		UseClientOAuth:     r.UseClientOAuth,
162 | 	}
163 | 	s.SessionProvider = s.newBigQuerySessionProvider()
164 | 
165 | 	if r.WriteMode != WriteModeAllowed && r.WriteMode != WriteModeBlocked && r.WriteMode != WriteModeProtected {
166 | 		return nil, fmt.Errorf("invalid writeMode %q: must be one of %q, %q, or %q", r.WriteMode, WriteModeAllowed, WriteModeProtected, WriteModeBlocked)
167 | 	}
168 | 	s.makeDataplexCatalogClient = s.lazyInitDataplexClient(ctx, tracer)
169 | 	return s, nil
170 | 
171 | }
172 | 
173 | var _ sources.Source = &Source{}
174 | 
175 | type Source struct {
176 | 	// BigQuery Google SQL struct with client
177 | 	Name                      string `yaml:"name"`
178 | 	Kind                      string `yaml:"kind"`
179 | 	Project                   string
180 | 	Location                  string
181 | 	Client                    *bigqueryapi.Client
182 | 	RestService               *bigqueryrestapi.Service
183 | 	TokenSource               oauth2.TokenSource
184 | 	MaxQueryResultRows        int
185 | 	ClientCreator             BigqueryClientCreator
186 | 	AllowedDatasets           map[string]struct{}
187 | 	UseClientOAuth            bool
188 | 	WriteMode                 string
189 | 	sessionMutex              sync.Mutex
190 | 	makeDataplexCatalogClient func() (*dataplexapi.CatalogClient, DataplexClientCreator, error)
191 | 	SessionProvider           BigQuerySessionProvider
192 | 	Session                   *Session
193 | }
194 | 
195 | type Session struct {
196 | 	ID           string
197 | 	ProjectID    string
198 | 	DatasetID    string
199 | 	CreationTime time.Time
200 | 	LastUsed     time.Time
201 | }
202 | 
203 | func (s *Source) SourceKind() string {
204 | 	// Returns BigQuery Google SQL source kind
205 | 	return SourceKind
206 | }
207 | 
208 | func (s *Source) BigQueryClient() *bigqueryapi.Client {
209 | 	return s.Client
210 | }
211 | 
212 | func (s *Source) BigQueryRestService() *bigqueryrestapi.Service {
213 | 	return s.RestService
214 | }
215 | 
216 | func (s *Source) BigQueryWriteMode() string {
217 | 	return s.WriteMode
218 | }
219 | 
220 | func (s *Source) BigQuerySession() BigQuerySessionProvider {
221 | 	return s.SessionProvider
222 | }
223 | 
224 | func (s *Source) newBigQuerySessionProvider() BigQuerySessionProvider {
225 | 	return func(ctx context.Context) (*Session, error) {
226 | 		if s.WriteMode != WriteModeProtected {
227 | 			return nil, nil
228 | 		}
229 | 
230 | 		s.sessionMutex.Lock()
231 | 		defer s.sessionMutex.Unlock()
232 | 
233 | 		logger, err := util.LoggerFromContext(ctx)
234 | 		if err != nil {
235 | 			return nil, fmt.Errorf("failed to get logger from context: %w", err)
236 | 		}
237 | 
238 | 		if s.Session != nil {
239 | 			// Absolute 7-day lifetime check.
240 | 			const sessionMaxLifetime = 7 * 24 * time.Hour
241 | 			// This assumes a single task will not exceed 30 minutes, preventing it from failing mid-execution.
242 | 			const refreshThreshold = 30 * time.Minute
243 | 			if time.Since(s.Session.CreationTime) > (sessionMaxLifetime - refreshThreshold) {
244 | 				logger.DebugContext(ctx, "Session is approaching its 7-day maximum lifetime. Creating a new one.")
245 | 			} else {
246 | 				job := &bigqueryrestapi.Job{
247 | 					Configuration: &bigqueryrestapi.JobConfiguration{
248 | 						DryRun: true,
249 | 						Query: &bigqueryrestapi.JobConfigurationQuery{
250 | 							Query:                "SELECT 1",
251 | 							UseLegacySql:         new(bool),
252 | 							ConnectionProperties: []*bigqueryrestapi.ConnectionProperty{{Key: "session_id", Value: s.Session.ID}},
253 | 						},
254 | 					},
255 | 				}
256 | 				_, err := s.RestService.Jobs.Insert(s.Project, job).Do()
257 | 				if err == nil {
258 | 					s.Session.LastUsed = time.Now()
259 | 					return s.Session, nil
260 | 				}
261 | 				logger.DebugContext(ctx, "Session validation failed (likely expired), creating a new one.", "error", err)
262 | 			}
263 | 		}
264 | 
265 | 		// Create a new session if one doesn't exist, it has passed its 7-day lifetime,
266 | 		// or it failed the validation dry run.
267 | 
268 | 		creationTime := time.Now()
269 | 		job := &bigqueryrestapi.Job{
270 | 			JobReference: &bigqueryrestapi.JobReference{
271 | 				ProjectId: s.Project,
272 | 				Location:  s.Location,
273 | 			},
274 | 			Configuration: &bigqueryrestapi.JobConfiguration{
275 | 				DryRun: true,
276 | 				Query: &bigqueryrestapi.JobConfigurationQuery{
277 | 					Query:         "SELECT 1",
278 | 					CreateSession: true,
279 | 				},
280 | 			},
281 | 		}
282 | 
283 | 		createdJob, err := s.RestService.Jobs.Insert(s.Project, job).Do()
284 | 		if err != nil {
285 | 			return nil, fmt.Errorf("failed to create new session: %w", err)
286 | 		}
287 | 
288 | 		var sessionID, sessionDatasetID, projectID string
289 | 		if createdJob.Status != nil && createdJob.Statistics.SessionInfo != nil {
290 | 			sessionID = createdJob.Statistics.SessionInfo.SessionId
291 | 		} else {
292 | 			return nil, fmt.Errorf("failed to get session ID from new session job")
293 | 		}
294 | 
295 | 		if createdJob.Configuration != nil && createdJob.Configuration.Query != nil && createdJob.Configuration.Query.DestinationTable != nil {
296 | 			sessionDatasetID = createdJob.Configuration.Query.DestinationTable.DatasetId
297 | 			projectID = createdJob.Configuration.Query.DestinationTable.ProjectId
298 | 		} else {
299 | 			return nil, fmt.Errorf("failed to get session dataset ID from new session job")
300 | 		}
301 | 
302 | 		s.Session = &Session{
303 | 			ID:           sessionID,
304 | 			ProjectID:    projectID,
305 | 			DatasetID:    sessionDatasetID,
306 | 			CreationTime: creationTime,
307 | 			LastUsed:     creationTime,
308 | 		}
309 | 		return s.Session, nil
310 | 	}
311 | }
312 | 
313 | func (s *Source) UseClientAuthorization() bool {
314 | 	return s.UseClientOAuth
315 | }
316 | 
317 | func (s *Source) BigQueryProject() string {
318 | 	return s.Project
319 | }
320 | 
321 | func (s *Source) BigQueryLocation() string {
322 | 	return s.Location
323 | }
324 | 
325 | func (s *Source) BigQueryTokenSource() oauth2.TokenSource {
326 | 	return s.TokenSource
327 | }
328 | 
329 | func (s *Source) BigQueryTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error) {
330 | 	return google.DefaultTokenSource(ctx, scope)
331 | }
332 | 
333 | func (s *Source) GetMaxQueryResultRows() int {
334 | 	return s.MaxQueryResultRows
335 | }
336 | 
337 | func (s *Source) BigQueryClientCreator() BigqueryClientCreator {
338 | 	return s.ClientCreator
339 | }
340 | 
341 | func (s *Source) BigQueryAllowedDatasets() []string {
342 | 	if len(s.AllowedDatasets) == 0 {
343 | 		return nil
344 | 	}
345 | 	datasets := make([]string, 0, len(s.AllowedDatasets))
346 | 	for d := range s.AllowedDatasets {
347 | 		datasets = append(datasets, d)
348 | 	}
349 | 	return datasets
350 | }
351 | 
352 | // IsDatasetAllowed checks if a given dataset is accessible based on the source's configuration.
353 | func (s *Source) IsDatasetAllowed(projectID, datasetID string) bool {
354 | 	// If the normalized map is empty, it means no restrictions were configured.
355 | 	if len(s.AllowedDatasets) == 0 {
356 | 		return true
357 | 	}
358 | 
359 | 	targetDataset := fmt.Sprintf("%s.%s", projectID, datasetID)
360 | 	_, ok := s.AllowedDatasets[targetDataset]
361 | 	return ok
362 | }
363 | 
364 | func (s *Source) MakeDataplexCatalogClient() func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
365 | 	return s.makeDataplexCatalogClient
366 | }
367 | 
368 | func (s *Source) lazyInitDataplexClient(ctx context.Context, tracer trace.Tracer) func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
369 | 	var once sync.Once
370 | 	var client *dataplexapi.CatalogClient
371 | 	var clientCreator DataplexClientCreator
372 | 	var err error
373 | 
374 | 	return func() (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
375 | 		once.Do(func() {
376 | 			c, cc, e := initDataplexConnection(ctx, tracer, s.Name, s.Project, s.UseClientOAuth)
377 | 			if e != nil {
378 | 				err = fmt.Errorf("failed to initialize dataplex client: %w", e)
379 | 				return
380 | 			}
381 | 			client = c
382 | 			clientCreator = cc
383 | 		})
384 | 		return client, clientCreator, err
385 | 	}
386 | }
387 | 
388 | func initBigQueryConnection(
389 | 	ctx context.Context,
390 | 	tracer trace.Tracer,
391 | 	name string,
392 | 	project string,
393 | 	location string,
394 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, oauth2.TokenSource, error) {
395 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
396 | 	defer span.End()
397 | 
398 | 	cred, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/cloud-platform")
399 | 	if err != nil {
400 | 		return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
401 | 	}
402 | 
403 | 	userAgent, err := util.UserAgentFromContext(ctx)
404 | 	if err != nil {
405 | 		return nil, nil, nil, err
406 | 	}
407 | 
408 | 	// Initialize the high-level BigQuery client
409 | 	client, err := bigqueryapi.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithCredentials(cred))
410 | 	if err != nil {
411 | 		return nil, nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
412 | 	}
413 | 	client.Location = location
414 | 
415 | 	// Initialize the low-level BigQuery REST service using the same credentials
416 | 	restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithCredentials(cred))
417 | 	if err != nil {
418 | 		return nil, nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
419 | 	}
420 | 
421 | 	return client, restService, cred.TokenSource, nil
422 | }
423 | 
424 | // initBigQueryConnectionWithOAuthToken initialize a BigQuery client with an
425 | // OAuth access token.
426 | func initBigQueryConnectionWithOAuthToken(
427 | 	ctx context.Context,
428 | 	tracer trace.Tracer,
429 | 	project string,
430 | 	location string,
431 | 	name string,
432 | 	userAgent string,
433 | 	tokenString string,
434 | 	wantRestService bool,
435 | ) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
436 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
437 | 	defer span.End()
438 | 	// Construct token source
439 | 	token := &oauth2.Token{
440 | 		AccessToken: string(tokenString),
441 | 	}
442 | 	ts := oauth2.StaticTokenSource(token)
443 | 
444 | 	// Initialize the BigQuery client with tokenSource
445 | 	client, err := bigqueryapi.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
446 | 	if err != nil {
447 | 		return nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
448 | 	}
449 | 	client.Location = location
450 | 
451 | 	if wantRestService {
452 | 		// Initialize the low-level BigQuery REST service using the same credentials
453 | 		restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
454 | 		if err != nil {
455 | 			return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
456 | 		}
457 | 		return client, restService, nil
458 | 	}
459 | 
460 | 	return client, nil, nil
461 | }
462 | 
463 | // newBigQueryClientCreator sets the project parameters for the init helper
464 | // function. The returned function takes in an OAuth access token and uses it to
465 | // create a BQ client.
466 | func newBigQueryClientCreator(
467 | 	ctx context.Context,
468 | 	tracer trace.Tracer,
469 | 	project string,
470 | 	location string,
471 | 	name string,
472 | ) (func(string, bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error), error) {
473 | 	userAgent, err := util.UserAgentFromContext(ctx)
474 | 	if err != nil {
475 | 		return nil, err
476 | 	}
477 | 
478 | 	return func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
479 | 		return initBigQueryConnectionWithOAuthToken(ctx, tracer, project, location, name, userAgent, tokenString, wantRestService)
480 | 	}, nil
481 | }
482 | 
483 | func initDataplexConnection(
484 | 	ctx context.Context,
485 | 	tracer trace.Tracer,
486 | 	name string,
487 | 	project string,
488 | 	useClientOAuth bool,
489 | ) (*dataplexapi.CatalogClient, DataplexClientCreator, error) {
490 | 	var client *dataplexapi.CatalogClient
491 | 	var clientCreator DataplexClientCreator
492 | 	var err error
493 | 
494 | 	ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
495 | 	defer span.End()
496 | 
497 | 	cred, err := google.FindDefaultCredentials(ctx)
498 | 	if err != nil {
499 | 		return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials: %w", err)
500 | 	}
501 | 
502 | 	userAgent, err := util.UserAgentFromContext(ctx)
503 | 	if err != nil {
504 | 		return nil, nil, err
505 | 	}
506 | 
507 | 	if useClientOAuth {
508 | 		clientCreator = newDataplexClientCreator(ctx, project, userAgent)
509 | 	} else {
510 | 		client, err = dataplexapi.NewCatalogClient(ctx, option.WithUserAgent(userAgent), option.WithCredentials(cred))
511 | 		if err != nil {
512 | 			return nil, nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
513 | 		}
514 | 	}
515 | 
516 | 	return client, clientCreator, nil
517 | }
518 | 
519 | func initDataplexConnectionWithOAuthToken(
520 | 	ctx context.Context,
521 | 	project string,
522 | 	userAgent string,
523 | 	tokenString string,
524 | ) (*dataplexapi.CatalogClient, error) {
525 | 	// Construct token source
526 | 	token := &oauth2.Token{
527 | 		AccessToken: string(tokenString),
528 | 	}
529 | 	ts := oauth2.StaticTokenSource(token)
530 | 
531 | 	client, err := dataplexapi.NewCatalogClient(ctx, option.WithUserAgent(userAgent), option.WithTokenSource(ts))
532 | 	if err != nil {
533 | 		return nil, fmt.Errorf("failed to create Dataplex client for project %q: %w", project, err)
534 | 	}
535 | 	return client, nil
536 | }
537 | 
538 | func newDataplexClientCreator(
539 | 	ctx context.Context,
540 | 	project string,
541 | 	userAgent string,
542 | ) func(string) (*dataplexapi.CatalogClient, error) {
543 | 	return func(tokenString string) (*dataplexapi.CatalogClient, error) {
544 | 		return initDataplexConnectionWithOAuthToken(ctx, project, userAgent, tokenString)
545 | 	}
546 | }
547 | 
```

--------------------------------------------------------------------------------
/docs/en/getting-started/introduction/_index.md:
--------------------------------------------------------------------------------

```markdown
  1 | ---
  2 | title: "Introduction"
  3 | type: docs
  4 | weight: 1
  5 | description: >
  6 |   An introduction to MCP Toolbox for Databases.
  7 | ---
  8 | 
  9 | MCP Toolbox for Databases is an open source MCP server for databases. It enables
 10 | you to develop tools easier, faster, and more securely by handling the complexities
 11 | such as connection pooling, authentication, and more.
 12 | 
 13 | {{< notice note >}}
 14 | This solution was originally named “Gen AI Toolbox for
 15 | Databases” as its initial development predated MCP, but was renamed to align
 16 | with recently added MCP compatibility.
 17 | {{< /notice >}}
 18 | 
 19 | ## Why Toolbox?
 20 | 
 21 | Toolbox helps you build Gen AI tools that let your agents access data in your
 22 | database. Toolbox provides:
 23 | 
 24 | - **Simplified development**: Integrate tools to your agent in less than 10
 25 |   lines of code, reuse tools between multiple agents or frameworks, and deploy
 26 |   new versions of tools more easily.
 27 | - **Better performance**: Best practices such as connection pooling,
 28 |   authentication, and more.
 29 | - **Enhanced security**: Integrated auth for more secure access to your data
 30 | - **End-to-end observability**: Out of the box metrics and tracing with built-in
 31 |   support for OpenTelemetry.
 32 | 
 33 | **⚡ Supercharge Your Workflow with an AI Database Assistant ⚡**
 34 | 
 35 | Stop context-switching and let your AI assistant become a true co-developer. By
 36 | [connecting your IDE to your databases with MCP Toolbox][connect-ide], you can
 37 | delegate complex and time-consuming database tasks, allowing you to build faster
 38 | and focus on what matters. This isn't just about code completion; it's about
 39 | giving your AI the context it needs to handle the entire development lifecycle.
 40 | 
 41 | Here’s how it will save you time:
 42 | 
 43 | - **Query in Plain English**: Interact with your data using natural language
 44 |   right from your IDE. Ask complex questions like, *"How many orders were
 45 |   delivered in 2024, and what items were in them?"* without writing any SQL.
 46 | - **Automate Database Management**: Simply describe your data needs, and let the
 47 |   AI assistant manage your database for you. It can handle generating queries,
 48 |   creating tables, adding indexes, and more.
 49 | - **Generate Context-Aware Code**: Empower your AI assistant to generate
 50 |   application code and tests with a deep understanding of your real-time
 51 |   database schema.  This accelerates the development cycle by ensuring the
 52 |   generated code is directly usable.
 53 | - **Slash Development Overhead**: Radically reduce the time spent on manual
 54 |   setup and boilerplate. MCP Toolbox helps streamline lengthy database
 55 |   configurations, repetitive code, and error-prone schema migrations.
 56 | 
 57 | Learn [how to connect your AI tools (IDEs) to Toolbox using MCP][connect-ide].
 58 | 
 59 | [connect-ide]: ../../how-to/connect-ide/
 60 | 
 61 | ## General Architecture
 62 | 
 63 | Toolbox sits between your application's orchestration framework and your
 64 | database, providing a control plane that is used to modify, distribute, or
 65 | invoke tools. It simplifies the management of your tools by providing you with a
 66 | centralized location to store and update tools, allowing you to share tools
 67 | between agents and applications and update those tools without necessarily
 68 | redeploying your application.
 69 | 
 70 | ![architecture](./architecture.png)
 71 | 
 72 | ## Getting Started
 73 | 
 74 | ### Installing the server
 75 | 
 76 | For the latest version, check the [releases page][releases] and use the
 77 | following instructions for your OS and CPU architecture.
 78 | 
 79 | [releases]: https://github.com/googleapis/genai-toolbox/releases
 80 | 
 81 | <!-- {x-release-please-start-version} -->
 82 | {{< tabpane text=true >}}
 83 | {{% tab header="Binary" lang="en" %}}
 84 | {{< tabpane text=true >}}
 85 | {{% tab header="Linux (AMD64)" lang="en" %}}
 86 | To install Toolbox as a binary on Linux (AMD64):
 87 | ```sh
 88 | # see releases page for other versions
 89 | export VERSION=0.17.0
 90 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
 91 | chmod +x toolbox
 92 | ```
 93 | {{% /tab %}}
 94 | {{% tab header="macOS (Apple Silicon)" lang="en" %}}
 95 | To install Toolbox as a binary on macOS (Apple Silicon):
 96 | ```sh
 97 | # see releases page for other versions
 98 | export VERSION=0.17.0
 99 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
100 | chmod +x toolbox
101 | ```
102 | {{% /tab %}}
103 | {{% tab header="macOS (Intel)" lang="en" %}}
104 | To install Toolbox as a binary on macOS (Intel):
105 | ```sh
106 | # see releases page for other versions
107 | export VERSION=0.17.0
108 | curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
109 | chmod +x toolbox
110 | ```
111 | {{% /tab %}}
112 | {{% tab header="Windows (AMD64)" lang="en" %}}
113 | To install Toolbox as a binary on Windows (AMD64):
114 | ```powershell
115 | # see releases page for other versions
116 | $VERSION = "0.17.0"
117 | Invoke-WebRequest -Uri "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe" -OutFile "toolbox.exe"
118 | ```
119 | {{% /tab %}}
120 | {{< /tabpane >}}
121 | {{% /tab %}}
122 | {{% tab header="Container image" lang="en" %}}
123 | You can also install Toolbox as a container:
124 | 
125 | ```sh
126 | # see releases page for other versions
127 | export VERSION=0.17.0
128 | docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
129 | ```
130 | 
131 | {{% /tab %}}
132 | {{% tab header="Homebrew" lang="en" %}}
133 | To install Toolbox using Homebrew on macOS or Linux:
134 | 
135 | ```sh
136 | brew install mcp-toolbox
137 | ```
138 | 
139 | {{% /tab %}}
140 | {{% tab header="Compile from source" lang="en" %}}
141 | 
142 | To install from source, ensure you have the latest version of
143 | [Go installed](https://go.dev/doc/install), and then run the following command:
144 | 
145 | ```sh
146 | go install github.com/googleapis/[email protected]
147 | ```
148 | 
149 | {{% /tab %}}
150 | {{< /tabpane >}}
151 | <!-- {x-release-please-end} -->
152 | 
153 | ### Running the server
154 | 
155 | [Configure](../configure.md) a `tools.yaml` to define your tools, and then
156 | execute `toolbox` to start the server:
157 | 
158 | ```sh
159 | ./toolbox --tools-file "tools.yaml"
160 | ```
161 | 
162 | {{< notice note >}}
163 | Toolbox enables dynamic reloading by default. To disable, use the
164 | `--disable-reload` flag.
165 | {{< /notice >}}
166 | 
167 | #### Launching Toolbox UI
168 | 
169 | To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
170 | tools and toolsets with features such as authorized parameters. To learn more,
171 | visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
172 | 
173 | ```sh
174 | ./toolbox --ui
175 | ```
176 | 
177 | #### Homebrew Users
178 | 
179 | If you installed Toolbox using Homebrew, the `toolbox` binary is available in
180 | your system path. You can start the server with the same command:
181 | 
182 | ```sh
183 | toolbox --tools-file "tools.yaml"
184 | ```
185 | 
186 | You can use `toolbox help` for a full list of flags! To stop the server, send a
187 | terminate signal (`ctrl+c` on most platforms).
188 | 
189 | For more detailed documentation on deploying to different environments, check
190 | out the resources in the [How-to section](../../how-to/)
191 | 
192 | ### Integrating your application
193 | 
194 | Once your server is up and running, you can load the tools into your
195 | application. See below the list of Client SDKs for using various frameworks:
196 | 
197 | #### Python
198 | 
199 | {{< tabpane text=true persist=header >}}
200 | {{% tab header="Core" lang="en" %}}
201 | 
202 | Once you've installed the [Toolbox Core
203 | SDK](https://pypi.org/project/toolbox-core/), you can load
204 | tools:
205 | 
206 | {{< highlight python >}}
207 | from toolbox_core import ToolboxClient
208 | 
209 | # update the url to point to your server
210 | 
211 | async with ToolboxClient("http://127.0.0.1:5000") as client:
212 | 
213 |     # these tools can be passed to your application!
214 |     tools = await client.load_toolset("toolset_name")
215 | {{< /highlight >}}
216 | 
217 | For more detailed instructions on using the Toolbox Core SDK, see the
218 | [project's
219 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md).
220 | 
221 | {{% /tab %}}
222 | {{% tab header="LangChain" lang="en" %}}
223 | 
224 | Once you've installed the [Toolbox LangChain
225 | SDK](https://pypi.org/project/toolbox-langchain/), you can load
226 | tools:
227 | 
228 | {{< highlight python >}}
229 | from toolbox_langchain import ToolboxClient
230 | 
231 | # update the url to point to your server
232 | 
233 | async with ToolboxClient("http://127.0.0.1:5000") as client:
234 | 
235 |     # these tools can be passed to your application!
236 |     tools = client.load_toolset()
237 | {{< /highlight >}}
238 | 
239 | For more detailed instructions on using the Toolbox LangChain SDK, see the
240 | [project's
241 | README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md).
242 | 
243 | {{% /tab %}}
244 | {{% tab header="Llamaindex" lang="en" %}}
245 | 
246 | Once you've installed the [Toolbox Llamaindex
247 | SDK](https://github.com/googleapis/genai-toolbox-llamaindex-python), you can load
248 | tools:
249 | 
250 | {{< highlight python >}}
251 | from toolbox_llamaindex import ToolboxClient
252 | 
253 | # update the url to point to your server
254 | 
255 | async with ToolboxClient("http://127.0.0.1:5000") as client:
256 | 
257 | # these tools can be passed to your application
258 | 
259 |   tools = client.load_toolset()
260 | {{< /highlight >}}
261 | 
262 | For more detailed instructions on using the Toolbox Llamaindex SDK, see the
263 | [project's
264 | README](https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md).
265 | 
266 | {{% /tab %}}
267 | {{< /tabpane >}}
268 | 
269 | #### Javascript/Typescript
270 | 
271 | Once you've installed the [Toolbox Core
272 | SDK](https://www.npmjs.com/package/@toolbox-sdk/core), you can load
273 | tools:
274 | 
275 | {{< tabpane text=true persist=header >}}
276 | {{% tab header="Core" lang="en" %}}
277 | 
278 | {{< highlight javascript >}}
279 | import { ToolboxClient } from '@toolbox-sdk/core';
280 | 
281 | // update the url to point to your server
282 | const URL = 'http://127.0.0.1:5000';
283 | let client = new ToolboxClient(URL);
284 | 
285 | // these tools can be passed to your application!
286 | const toolboxTools = await client.loadToolset('toolsetName');
287 | {{< /highlight >}}
288 | 
289 | {{% /tab %}}
290 | {{% tab header="LangChain/Langraph" lang="en" %}}
291 | 
292 | {{< highlight javascript >}}
293 | import { ToolboxClient } from '@toolbox-sdk/core';
294 | 
295 | // update the url to point to your server
296 | const URL = 'http://127.0.0.1:5000';
297 | let client = new ToolboxClient(URL);
298 | 
299 | // these tools can be passed to your application!
300 | const toolboxTools = await client.loadToolset('toolsetName');
301 | 
302 | // Define the basics of the tool: name, description, schema and core logic
303 | const getTool = (toolboxTool) => tool(currTool, {
304 |     name: toolboxTool.getName(),
305 |     description: toolboxTool.getDescription(),
306 |     schema: toolboxTool.getParamSchema()
307 | });
308 | 
309 | // Use these tools in your Langchain/Langraph applications
310 | const tools = toolboxTools.map(getTool);
311 | {{< /highlight >}}
312 | 
313 | {{% /tab %}}
314 | {{% tab header="Genkit" lang="en" %}}
315 | 
316 | {{< highlight javascript >}}
317 | import { ToolboxClient } from '@toolbox-sdk/core';
318 | import { genkit } from 'genkit';
319 | 
320 | // Initialise genkit
321 | const ai = genkit({
322 |     plugins: [
323 |         googleAI({
324 |             apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
325 |         })
326 |     ],
327 |     model: googleAI.model('gemini-2.0-flash'),
328 | });
329 | 
330 | // update the url to point to your server
331 | const URL = 'http://127.0.0.1:5000';
332 | let client = new ToolboxClient(URL);
333 | 
334 | // these tools can be passed to your application!
335 | const toolboxTools = await client.loadToolset('toolsetName');
336 | 
337 | // Define the basics of the tool: name, description, schema and core logic
338 | const getTool = (toolboxTool) => ai.defineTool({
339 |     name: toolboxTool.getName(),
340 |     description: toolboxTool.getDescription(),
341 |     schema: toolboxTool.getParamSchema()
342 | }, toolboxTool)
343 | 
344 | // Use these tools in your Genkit applications
345 | const tools = toolboxTools.map(getTool);
346 | {{< /highlight >}}
347 | 
348 | {{% /tab %}}
349 | {{% tab header="LlamaIndex" lang="en" %}}
350 | 
351 | {{< highlight javascript >}}
352 | import { ToolboxClient } from '@toolbox-sdk/core';
353 | import { tool } from "llamaindex";
354 | 
355 | // update the url to point to your server
356 | const URL = 'http://127.0.0.1:5000';
357 | let client = new ToolboxClient(URL);
358 | 
359 | // these tools can be passed to your application!
360 | const toolboxTools = await client.loadToolset('toolsetName');
361 | 
362 | // Define the basics of the tool: name, description, schema and core logic
363 | const getTool = (toolboxTool) => tool({
364 |     name: toolboxTool.getName(),
365 |     description: toolboxTool.getDescription(),
366 |     parameters: toolboxTool.getParamSchema(),
367 |     execute: toolboxTool
368 | });;
369 | 
370 | // Use these tools in your LlamaIndex applications
371 | const tools = toolboxTools.map(getTool);
372 | 
373 | {{< /highlight >}}
374 | 
375 | {{% /tab %}}
376 | {{< /tabpane >}}
377 | 
378 | For more detailed instructions on using the Toolbox Core SDK, see the
379 | [project's
380 | README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
381 | 
382 | #### Go
383 | 
384 | Once you've installed the [Toolbox Go
385 | SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core), you can load
386 | tools:
387 | 
388 | {{< tabpane text=true persist=header >}}
389 | {{% tab header="Core" lang="en" %}}
390 | 
391 | {{< highlight go >}}
392 | package main
393 | 
394 | import (
395 | 	"context"
396 | 	"log"
397 | 
398 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
399 | )
400 | 
401 | func main() {
402 | 	// update the url to point to your server
403 | 	URL := "http://127.0.0.1:5000"
404 | 	ctx := context.Background()
405 | 
406 | 	client, err := core.NewToolboxClient(URL)
407 | 	if err != nil {
408 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
409 | 	}
410 | 
411 | 	// Framework agnostic tools
412 | 	tools, err := client.LoadToolset("toolsetName", ctx)
413 | 	if err != nil {
414 | 		log.Fatalf("Failed to load tools: %v", err)
415 | 	}
416 | }
417 | {{< /highlight >}}
418 | 
419 | {{% /tab %}}
420 | {{% tab header="LangChain Go" lang="en" %}}
421 | 
422 | {{< highlight go >}}
423 | package main
424 | 
425 | import (
426 | 	"context"
427 | 	"encoding/json"
428 | 	"log"
429 | 
430 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
431 | 	"github.com/tmc/langchaingo/llms"
432 | )
433 | 
434 | func main() {
435 | 	// Make sure to add the error checks
436 | 	// update the url to point to your server
437 | 	URL := "http://127.0.0.1:5000"
438 | 	ctx := context.Background()
439 | 
440 | 	client, err := core.NewToolboxClient(URL)
441 | 	if err != nil {
442 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
443 | 	}
444 | 
445 | 	// Framework agnostic tool
446 | 	tool, err := client.LoadTool("toolName", ctx)
447 | 	if err != nil {
448 | 		log.Fatalf("Failed to load tools: %v", err)
449 | 	}
450 | 
451 | 	// Fetch the tool's input schema
452 | 	inputschema, err := tool.InputSchema()
453 | 	if err != nil {
454 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
455 | 	}
456 | 
457 | 	var paramsSchema map[string]any
458 | 	_ = json.Unmarshal(inputschema, &paramsSchema)
459 | 
460 | 	// Use this tool with LangChainGo
461 | 	langChainTool := llms.Tool{
462 | 		Type: "function",
463 | 		Function: &llms.FunctionDefinition{
464 | 			Name:        tool.Name(),
465 | 			Description: tool.Description(),
466 | 			Parameters:  paramsSchema,
467 | 		},
468 | 	}
469 | }
470 | {{< /highlight >}}
471 | 
472 | {{% /tab %}}
473 | {{% tab header="Genkit Go" lang="en" %}}
474 | 
475 | {{< highlight go >}}
476 | package main
477 | import (
478 | 	"context"
479 | 	"encoding/json"
480 | 	"log"
481 | 
482 | 	"github.com/firebase/genkit/go/ai"
483 | 	"github.com/firebase/genkit/go/genkit"
484 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
485 | 	"github.com/googleapis/mcp-toolbox-sdk-go/tbgenkit"
486 | 	"github.com/invopop/jsonschema"
487 | )
488 | 
489 | func main() {
490 | 	// Make sure to add the error checks
491 | 	// Update the url to point to your server
492 | 	URL := "http://127.0.0.1:5000"
493 | 	ctx := context.Background()
494 | 	g, err := genkit.Init(ctx)
495 | 
496 | 	client, err := core.NewToolboxClient(URL)
497 | 	if err != nil {
498 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
499 | 	}
500 | 
501 | 	// Framework agnostic tool
502 | 	tool, err := client.LoadTool("toolName", ctx)
503 | 	if err != nil {
504 | 		log.Fatalf("Failed to load tools: %v", err)
505 | 	}
506 | 
507 | 	// Convert the tool using the tbgenkit package
508 |  	// Use this tool with Genkit Go
509 | 	genkitTool, err := tbgenkit.ToGenkitTool(tool, g)
510 | 	if err != nil {
511 | 		log.Fatalf("Failed to convert tool: %v\n", err)
512 | 	}
513 | }
514 | {{< /highlight >}}
515 | 
516 | {{% /tab %}}
517 | {{% tab header="Go GenAI" lang="en" %}}
518 | 
519 | {{< highlight go >}}
520 | package main
521 | 
522 | import (
523 | 	"context"
524 | 	"encoding/json"
525 | 	"log"
526 | 
527 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
528 | 	"google.golang.org/genai"
529 | )
530 | 
531 | func main() {
532 | 	// Make sure to add the error checks
533 | 	// Update the url to point to your server
534 | 	URL := "http://127.0.0.1:5000"
535 | 	ctx := context.Background()
536 | 
537 | 	client, err := core.NewToolboxClient(URL)
538 | 	if err != nil {
539 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
540 | 	}
541 | 
542 | 	// Framework agnostic tool
543 | 	tool, err := client.LoadTool("toolName", ctx)
544 | 	if err != nil {
545 | 		log.Fatalf("Failed to load tools: %v", err)
546 | 	}
547 | 
548 | 	// Fetch the tool's input schema
549 | 	inputschema, err := tool.InputSchema()
550 | 	if err != nil {
551 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
552 | 	}
553 | 
554 | 	var schema *genai.Schema
555 | 	_ = json.Unmarshal(inputschema, &schema)
556 | 
557 | 	funcDeclaration := &genai.FunctionDeclaration{
558 | 		Name:        tool.Name(),
559 | 		Description: tool.Description(),
560 | 		Parameters:  schema,
561 | 	}
562 | 
563 | 	// Use this tool with Go GenAI
564 | 	genAITool := &genai.Tool{
565 | 		FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
566 | 	}
567 | }
568 | {{< /highlight >}}
569 | 
570 | {{% /tab %}}
571 | 
572 | {{% tab header="OpenAI Go" lang="en" %}}
573 | 
574 | {{< highlight go >}}
575 | package main
576 | 
577 | import (
578 | 	"context"
579 | 	"encoding/json"
580 | 	"log"
581 | 
582 | 	"github.com/googleapis/mcp-toolbox-sdk-go/core"
583 | 	openai "github.com/openai/openai-go"
584 | )
585 | 
586 | func main() {
587 | 	// Make sure to add the error checks
588 | 	// Update the url to point to your server
589 | 	URL := "http://127.0.0.1:5000"
590 | 	ctx := context.Background()
591 | 
592 | 	client, err := core.NewToolboxClient(URL)
593 | 	if err != nil {
594 | 		log.Fatalf("Failed to create Toolbox client: %v", err)
595 | 	}
596 | 
597 | 	// Framework agnostic tool
598 | 	tool, err := client.LoadTool("toolName", ctx)
599 | 	if err != nil {
600 | 		log.Fatalf("Failed to load tools: %v", err)
601 | 	}
602 | 
603 | 	// Fetch the tool's input schema
604 | 	inputschema, err := tool.InputSchema()
605 | 	if err != nil {
606 | 		log.Fatalf("Failed to fetch inputSchema: %v", err)
607 | 	}
608 | 
609 | 	var paramsSchema openai.FunctionParameters
610 | 	_ = json.Unmarshal(inputschema, &paramsSchema)
611 | 
612 | 	// Use this tool with OpenAI Go
613 | 	openAITool := openai.ChatCompletionToolParam{
614 | 		Function: openai.FunctionDefinitionParam{
615 | 			Name:        tool.Name(),
616 | 			Description: openai.String(tool.Description()),
617 | 			Parameters:  paramsSchema,
618 | 		},
619 | 	}
620 | }
621 | {{< /highlight >}}
622 | 
623 | {{% /tab %}}
624 | {{< /tabpane >}}
625 | 
626 | For more detailed instructions on using the Toolbox Go SDK, see the
627 | [project's
628 | README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
629 | 
630 | For end-to-end samples on using the Toolbox Go SDK with orchestration
631 | frameworks, see the [project's
632 | samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
633 | 
```
Page 31/45FirstPrevNextLast