This is page 22 of 35. Use http://codebase.md/googleapis/genai-toolbox?lines=false&page={x} to view the full context. # Directory Structure ``` ├── .ci │ ├── continuous.release.cloudbuild.yaml │ ├── generate_release_table.sh │ ├── integration.cloudbuild.yaml │ ├── quickstart_test │ │ ├── go.integration.cloudbuild.yaml │ │ ├── js.integration.cloudbuild.yaml │ │ ├── py.integration.cloudbuild.yaml │ │ ├── run_go_tests.sh │ │ ├── run_js_tests.sh │ │ ├── run_py_tests.sh │ │ └── setup_hotels_sample.sql │ ├── test_with_coverage.sh │ └── versioned.release.cloudbuild.yaml ├── .github │ ├── auto-label.yaml │ ├── blunderbuss.yml │ ├── CODEOWNERS │ ├── header-checker-lint.yml │ ├── ISSUE_TEMPLATE │ │ ├── bug_report.yml │ │ ├── config.yml │ │ ├── feature_request.yml │ │ └── question.yml │ ├── label-sync.yml │ ├── labels.yaml │ ├── PULL_REQUEST_TEMPLATE.md │ ├── release-please.yml │ ├── renovate.json5 │ ├── sync-repo-settings.yaml │ └── workflows │ ├── cloud_build_failure_reporter.yml │ ├── deploy_dev_docs.yaml │ ├── deploy_previous_version_docs.yaml │ ├── deploy_versioned_docs.yaml │ ├── docs_deploy.yaml │ ├── docs_preview_clean.yaml │ ├── docs_preview_deploy.yaml │ ├── lint.yaml │ ├── schedule_reporter.yml │ ├── sync-labels.yaml │ └── tests.yaml ├── .gitignore ├── .gitmodules ├── .golangci.yaml ├── .hugo │ ├── archetypes │ │ └── default.md │ ├── assets │ │ ├── icons │ │ │ └── logo.svg │ │ └── scss │ │ ├── _styles_project.scss │ │ └── _variables_project.scss │ ├── go.mod │ ├── go.sum │ ├── hugo.toml │ ├── layouts │ │ ├── _default │ │ │ └── home.releases.releases │ │ ├── index.llms-full.txt │ │ ├── index.llms.txt │ │ ├── partials │ │ │ ├── hooks │ │ │ │ └── head-end.html │ │ │ ├── navbar-version-selector.html │ │ │ ├── page-meta-links.html │ │ │ └── td │ │ │ └── render-heading.html │ │ ├── robot.txt │ │ └── shortcodes │ │ ├── include.html │ │ ├── ipynb.html │ │ └── regionInclude.html │ ├── package-lock.json │ ├── package.json │ └── static │ ├── favicons │ │ ├── android-chrome-192x192.png │ │ ├── android-chrome-512x512.png │ │ ├── apple-touch-icon.png │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ └── favicon.ico │ └── js │ └── w3.js ├── CHANGELOG.md ├── cmd │ ├── options_test.go │ ├── options.go │ ├── root_test.go │ ├── root.go │ └── version.txt ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── DEVELOPER.md ├── Dockerfile ├── docs │ └── en │ ├── _index.md │ ├── about │ │ ├── _index.md │ │ └── faq.md │ ├── concepts │ │ ├── _index.md │ │ └── telemetry │ │ ├── index.md │ │ ├── telemetry_flow.png │ │ └── telemetry_traces.png │ ├── getting-started │ │ ├── _index.md │ │ ├── colab_quickstart.ipynb │ │ ├── configure.md │ │ ├── introduction │ │ │ ├── _index.md │ │ │ └── architecture.png │ │ ├── local_quickstart_go.md │ │ ├── local_quickstart_js.md │ │ ├── local_quickstart.md │ │ ├── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── quickstart │ │ ├── go │ │ │ ├── genAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── genkit │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── langchain │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── openAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ └── quickstart_test.go │ │ ├── golden.txt │ │ ├── js │ │ │ ├── genAI │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── genkit │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── langchain │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── llamaindex │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ └── quickstart.test.js │ │ ├── python │ │ │ ├── __init__.py │ │ │ ├── adk │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── core │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── langchain │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── llamaindex │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ └── quickstart_test.py │ │ └── shared │ │ ├── cloud_setup.md │ │ ├── configure_toolbox.md │ │ └── database_setup.md │ ├── how-to │ │ ├── _index.md │ │ ├── connect_via_geminicli.md │ │ ├── connect_via_mcp.md │ │ ├── connect-ide │ │ │ ├── _index.md │ │ │ ├── alloydb_pg_admin_mcp.md │ │ │ ├── alloydb_pg_mcp.md │ │ │ ├── bigquery_mcp.md │ │ │ ├── cloud_sql_mssql_admin_mcp.md │ │ │ ├── cloud_sql_mssql_mcp.md │ │ │ ├── cloud_sql_mysql_admin_mcp.md │ │ │ ├── cloud_sql_mysql_mcp.md │ │ │ ├── cloud_sql_pg_admin_mcp.md │ │ │ ├── cloud_sql_pg_mcp.md │ │ │ ├── firestore_mcp.md │ │ │ ├── looker_mcp.md │ │ │ ├── mssql_mcp.md │ │ │ ├── mysql_mcp.md │ │ │ ├── neo4j_mcp.md │ │ │ ├── postgres_mcp.md │ │ │ ├── spanner_mcp.md │ │ │ └── sqlite_mcp.md │ │ ├── deploy_docker.md │ │ ├── deploy_gke.md │ │ ├── deploy_toolbox.md │ │ ├── export_telemetry.md │ │ └── toolbox-ui │ │ ├── edit-headers.gif │ │ ├── edit-headers.png │ │ ├── index.md │ │ ├── optional-param-checked.png │ │ ├── optional-param-unchecked.png │ │ ├── run-tool.gif │ │ ├── tools.png │ │ └── toolsets.png │ ├── reference │ │ ├── _index.md │ │ ├── cli.md │ │ └── prebuilt-tools.md │ ├── resources │ │ ├── _index.md │ │ ├── authServices │ │ │ ├── _index.md │ │ │ └── google.md │ │ ├── sources │ │ │ ├── _index.md │ │ │ ├── alloydb-admin.md │ │ │ ├── alloydb-pg.md │ │ │ ├── bigquery.md │ │ │ ├── bigtable.md │ │ │ ├── cassandra.md │ │ │ ├── clickhouse.md │ │ │ ├── cloud-monitoring.md │ │ │ ├── cloud-sql-admin.md │ │ │ ├── cloud-sql-mssql.md │ │ │ ├── cloud-sql-mysql.md │ │ │ ├── cloud-sql-pg.md │ │ │ ├── couchbase.md │ │ │ ├── dataplex.md │ │ │ ├── dgraph.md │ │ │ ├── firebird.md │ │ │ ├── firestore.md │ │ │ ├── http.md │ │ │ ├── looker.md │ │ │ ├── mongodb.md │ │ │ ├── mssql.md │ │ │ ├── mysql.md │ │ │ ├── neo4j.md │ │ │ ├── oceanbase.md │ │ │ ├── oracle.md │ │ │ ├── postgres.md │ │ │ ├── redis.md │ │ │ ├── spanner.md │ │ │ ├── sqlite.md │ │ │ ├── tidb.md │ │ │ ├── trino.md │ │ │ ├── valkey.md │ │ │ └── yugabytedb.md │ │ └── tools │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── alloydb-create-cluster.md │ │ │ ├── alloydb-create-instance.md │ │ │ ├── alloydb-create-user.md │ │ │ ├── alloydb-get-cluster.md │ │ │ ├── alloydb-get-instance.md │ │ │ ├── alloydb-get-user.md │ │ │ ├── alloydb-list-clusters.md │ │ │ ├── alloydb-list-instances.md │ │ │ ├── alloydb-list-users.md │ │ │ └── alloydb-wait-for-operation.md │ │ ├── alloydbainl │ │ │ ├── _index.md │ │ │ └── alloydb-ai-nl.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── bigquery-analyze-contribution.md │ │ │ ├── bigquery-conversational-analytics.md │ │ │ ├── bigquery-execute-sql.md │ │ │ ├── bigquery-forecast.md │ │ │ ├── bigquery-get-dataset-info.md │ │ │ ├── bigquery-get-table-info.md │ │ │ ├── bigquery-list-dataset-ids.md │ │ │ ├── bigquery-list-table-ids.md │ │ │ ├── bigquery-search-catalog.md │ │ │ └── bigquery-sql.md │ │ ├── bigtable │ │ │ ├── _index.md │ │ │ └── bigtable-sql.md │ │ ├── cassandra │ │ │ ├── _index.md │ │ │ └── cassandra-cql.md │ │ ├── clickhouse │ │ │ ├── _index.md │ │ │ ├── clickhouse-execute-sql.md │ │ │ ├── clickhouse-list-databases.md │ │ │ ├── clickhouse-list-tables.md │ │ │ └── clickhouse-sql.md │ │ ├── cloudmonitoring │ │ │ ├── _index.md │ │ │ └── cloud-monitoring-query-prometheus.md │ │ ├── cloudsql │ │ │ ├── _index.md │ │ │ ├── cloudsqlcreatedatabase.md │ │ │ ├── cloudsqlcreateusers.md │ │ │ ├── cloudsqlgetinstances.md │ │ │ ├── cloudsqllistdatabases.md │ │ │ ├── cloudsqllistinstances.md │ │ │ ├── cloudsqlmssqlcreateinstance.md │ │ │ ├── cloudsqlmysqlcreateinstance.md │ │ │ ├── cloudsqlpgcreateinstances.md │ │ │ └── cloudsqlwaitforoperation.md │ │ ├── couchbase │ │ │ ├── _index.md │ │ │ └── couchbase-sql.md │ │ ├── dataform │ │ │ ├── _index.md │ │ │ └── dataform-compile-local.md │ │ ├── dataplex │ │ │ ├── _index.md │ │ │ ├── dataplex-lookup-entry.md │ │ │ ├── dataplex-search-aspect-types.md │ │ │ └── dataplex-search-entries.md │ │ ├── dgraph │ │ │ ├── _index.md │ │ │ └── dgraph-dql.md │ │ ├── firebird │ │ │ ├── _index.md │ │ │ ├── firebird-execute-sql.md │ │ │ └── firebird-sql.md │ │ ├── firestore │ │ │ ├── _index.md │ │ │ ├── firestore-add-documents.md │ │ │ ├── firestore-delete-documents.md │ │ │ ├── firestore-get-documents.md │ │ │ ├── firestore-get-rules.md │ │ │ ├── firestore-list-collections.md │ │ │ ├── firestore-query-collection.md │ │ │ ├── firestore-query.md │ │ │ ├── firestore-update-document.md │ │ │ └── firestore-validate-rules.md │ │ ├── http │ │ │ ├── _index.md │ │ │ └── http.md │ │ ├── looker │ │ │ ├── _index.md │ │ │ ├── looker-add-dashboard-element.md │ │ │ ├── looker-conversational-analytics.md │ │ │ ├── looker-create-project-file.md │ │ │ ├── looker-delete-project-file.md │ │ │ ├── looker-dev-mode.md │ │ │ ├── looker-get-dashboards.md │ │ │ ├── looker-get-dimensions.md │ │ │ ├── looker-get-explores.md │ │ │ ├── looker-get-filters.md │ │ │ ├── looker-get-looks.md │ │ │ ├── looker-get-measures.md │ │ │ ├── looker-get-models.md │ │ │ ├── looker-get-parameters.md │ │ │ ├── looker-get-project-file.md │ │ │ ├── looker-get-project-files.md │ │ │ ├── looker-get-projects.md │ │ │ ├── looker-health-analyze.md │ │ │ ├── looker-health-pulse.md │ │ │ ├── looker-health-vacuum.md │ │ │ ├── looker-make-dashboard.md │ │ │ ├── looker-make-look.md │ │ │ ├── looker-query-sql.md │ │ │ ├── looker-query-url.md │ │ │ ├── looker-query.md │ │ │ ├── looker-run-look.md │ │ │ └── looker-update-project-file.md │ │ ├── mongodb │ │ │ ├── _index.md │ │ │ ├── mongodb-aggregate.md │ │ │ ├── mongodb-delete-many.md │ │ │ ├── mongodb-delete-one.md │ │ │ ├── mongodb-find-one.md │ │ │ ├── mongodb-find.md │ │ │ ├── mongodb-insert-many.md │ │ │ ├── mongodb-insert-one.md │ │ │ ├── mongodb-update-many.md │ │ │ └── mongodb-update-one.md │ │ ├── mssql │ │ │ ├── _index.md │ │ │ ├── mssql-execute-sql.md │ │ │ ├── mssql-list-tables.md │ │ │ └── mssql-sql.md │ │ ├── mysql │ │ │ ├── _index.md │ │ │ ├── mysql-execute-sql.md │ │ │ ├── mysql-list-active-queries.md │ │ │ ├── mysql-list-table-fragmentation.md │ │ │ ├── mysql-list-tables-missing-unique-indexes.md │ │ │ ├── mysql-list-tables.md │ │ │ └── mysql-sql.md │ │ ├── neo4j │ │ │ ├── _index.md │ │ │ ├── neo4j-cypher.md │ │ │ ├── neo4j-execute-cypher.md │ │ │ └── neo4j-schema.md │ │ ├── oceanbase │ │ │ ├── _index.md │ │ │ ├── oceanbase-execute-sql.md │ │ │ └── oceanbase-sql.md │ │ ├── oracle │ │ │ ├── _index.md │ │ │ ├── oracle-execute-sql.md │ │ │ └── oracle-sql.md │ │ ├── postgres │ │ │ ├── _index.md │ │ │ ├── postgres-execute-sql.md │ │ │ ├── postgres-list-active-queries.md │ │ │ ├── postgres-list-available-extensions.md │ │ │ ├── postgres-list-installed-extensions.md │ │ │ ├── postgres-list-tables.md │ │ │ └── postgres-sql.md │ │ ├── redis │ │ │ ├── _index.md │ │ │ └── redis.md │ │ ├── spanner │ │ │ ├── _index.md │ │ │ ├── spanner-execute-sql.md │ │ │ ├── spanner-list-tables.md │ │ │ └── spanner-sql.md │ │ ├── sqlite │ │ │ ├── _index.md │ │ │ ├── sqlite-execute-sql.md │ │ │ └── sqlite-sql.md │ │ ├── tidb │ │ │ ├── _index.md │ │ │ ├── tidb-execute-sql.md │ │ │ └── tidb-sql.md │ │ ├── trino │ │ │ ├── _index.md │ │ │ ├── trino-execute-sql.md │ │ │ └── trino-sql.md │ │ ├── utility │ │ │ ├── _index.md │ │ │ └── wait.md │ │ ├── valkey │ │ │ ├── _index.md │ │ │ └── valkey.md │ │ └── yuagbytedb │ │ ├── _index.md │ │ └── yugabytedb-sql.md │ ├── samples │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── ai-nl │ │ │ │ ├── alloydb_ai_nl.ipynb │ │ │ │ └── index.md │ │ │ └── mcp_quickstart.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── colab_quickstart_bigquery.ipynb │ │ │ ├── local_quickstart.md │ │ │ └── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── looker │ │ ├── _index.md │ │ ├── looker_gemini_oauth │ │ │ ├── _index.md │ │ │ ├── authenticated.png │ │ │ ├── authorize.png │ │ │ └── registration.png │ │ ├── looker_gemini.md │ │ └── looker_mcp_inspector │ │ ├── _index.md │ │ ├── inspector_tools.png │ │ └── inspector.png │ └── sdks │ ├── _index.md │ ├── go-sdk.md │ ├── js-sdk.md │ └── python-sdk.md ├── gemini-extension.json ├── go.mod ├── go.sum ├── internal │ ├── auth │ │ ├── auth.go │ │ └── google │ │ └── google.go │ ├── log │ │ ├── handler.go │ │ ├── log_test.go │ │ ├── log.go │ │ └── logger.go │ ├── prebuiltconfigs │ │ ├── prebuiltconfigs_test.go │ │ ├── prebuiltconfigs.go │ │ └── tools │ │ ├── alloydb-postgres-admin.yaml │ │ ├── alloydb-postgres-observability.yaml │ │ ├── alloydb-postgres.yaml │ │ ├── bigquery.yaml │ │ ├── clickhouse.yaml │ │ ├── cloud-sql-mssql-admin.yaml │ │ ├── cloud-sql-mssql-observability.yaml │ │ ├── cloud-sql-mssql.yaml │ │ ├── cloud-sql-mysql-admin.yaml │ │ ├── cloud-sql-mysql-observability.yaml │ │ ├── cloud-sql-mysql.yaml │ │ ├── cloud-sql-postgres-admin.yaml │ │ ├── cloud-sql-postgres-observability.yaml │ │ ├── cloud-sql-postgres.yaml │ │ ├── dataplex.yaml │ │ ├── firestore.yaml │ │ ├── looker-conversational-analytics.yaml │ │ ├── looker.yaml │ │ ├── mssql.yaml │ │ ├── mysql.yaml │ │ ├── neo4j.yaml │ │ ├── oceanbase.yaml │ │ ├── postgres.yaml │ │ ├── spanner-postgres.yaml │ │ ├── spanner.yaml │ │ └── sqlite.yaml │ ├── server │ │ ├── api_test.go │ │ ├── api.go │ │ ├── common_test.go │ │ ├── config.go │ │ ├── mcp │ │ │ ├── jsonrpc │ │ │ │ ├── jsonrpc_test.go │ │ │ │ └── jsonrpc.go │ │ │ ├── mcp.go │ │ │ ├── util │ │ │ │ └── lifecycle.go │ │ │ ├── v20241105 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ ├── v20250326 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ └── v20250618 │ │ │ ├── method.go │ │ │ └── types.go │ │ ├── mcp_test.go │ │ ├── mcp.go │ │ ├── server_test.go │ │ ├── server.go │ │ ├── static │ │ │ ├── assets │ │ │ │ └── mcptoolboxlogo.png │ │ │ ├── css │ │ │ │ └── style.css │ │ │ ├── index.html │ │ │ ├── js │ │ │ │ ├── auth.js │ │ │ │ ├── loadTools.js │ │ │ │ ├── mainContent.js │ │ │ │ ├── navbar.js │ │ │ │ ├── runTool.js │ │ │ │ ├── toolDisplay.js │ │ │ │ ├── tools.js │ │ │ │ └── toolsets.js │ │ │ ├── tools.html │ │ │ └── toolsets.html │ │ ├── web_test.go │ │ └── web.go │ ├── sources │ │ ├── alloydbadmin │ │ │ ├── alloydbadmin_test.go │ │ │ └── alloydbadmin.go │ │ ├── alloydbpg │ │ │ ├── alloydb_pg_test.go │ │ │ └── alloydb_pg.go │ │ ├── bigquery │ │ │ ├── bigquery_test.go │ │ │ └── bigquery.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ ├── cassandra_test.go │ │ │ └── cassandra.go │ │ ├── clickhouse │ │ │ ├── clickhouse_test.go │ │ │ └── clickhouse.go │ │ ├── cloudmonitoring │ │ │ ├── cloud_monitoring_test.go │ │ │ └── cloud_monitoring.go │ │ ├── cloudsqladmin │ │ │ ├── cloud_sql_admin_test.go │ │ │ └── cloud_sql_admin.go │ │ ├── cloudsqlmssql │ │ │ ├── cloud_sql_mssql_test.go │ │ │ └── cloud_sql_mssql.go │ │ ├── cloudsqlmysql │ │ │ ├── cloud_sql_mysql_test.go │ │ │ └── cloud_sql_mysql.go │ │ ├── cloudsqlpg │ │ │ ├── cloud_sql_pg_test.go │ │ │ └── cloud_sql_pg.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataplex │ │ │ ├── dataplex_test.go │ │ │ └── dataplex.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── dialect.go │ │ ├── firebird │ │ │ ├── firebird_test.go │ │ │ └── firebird.go │ │ ├── firestore │ │ │ ├── firestore_test.go │ │ │ └── firestore.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── ip_type.go │ │ ├── looker │ │ │ ├── looker_test.go │ │ │ └── looker.go │ │ ├── mongodb │ │ │ ├── mongodb_test.go │ │ │ └── mongodb.go │ │ ├── mssql │ │ │ ├── mssql_test.go │ │ │ └── mssql.go │ │ ├── mysql │ │ │ ├── mysql_test.go │ │ │ └── mysql.go │ │ ├── neo4j │ │ │ ├── neo4j_test.go │ │ │ └── neo4j.go │ │ ├── oceanbase │ │ │ ├── oceanbase_test.go │ │ │ └── oceanbase.go │ │ ├── oracle │ │ │ └── oracle.go │ │ ├── postgres │ │ │ ├── postgres_test.go │ │ │ └── postgres.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── sources.go │ │ ├── spanner │ │ │ ├── spanner_test.go │ │ │ └── spanner.go │ │ ├── sqlite │ │ │ ├── sqlite_test.go │ │ │ └── sqlite.go │ │ ├── tidb │ │ │ ├── tidb_test.go │ │ │ └── tidb.go │ │ ├── trino │ │ │ ├── trino_test.go │ │ │ └── trino.go │ │ ├── util.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedb │ │ ├── yugabytedb_test.go │ │ └── yugabytedb.go │ ├── telemetry │ │ ├── instrumentation.go │ │ └── telemetry.go │ ├── testutils │ │ └── testutils.go │ ├── tools │ │ ├── alloydb │ │ │ ├── alloydbcreatecluster │ │ │ │ ├── alloydbcreatecluster_test.go │ │ │ │ └── alloydbcreatecluster.go │ │ │ ├── alloydbcreateinstance │ │ │ │ ├── alloydbcreateinstance_test.go │ │ │ │ └── alloydbcreateinstance.go │ │ │ ├── alloydbcreateuser │ │ │ │ ├── alloydbcreateuser_test.go │ │ │ │ └── alloydbcreateuser.go │ │ │ ├── alloydbgetcluster │ │ │ │ ├── alloydbgetcluster_test.go │ │ │ │ └── alloydbgetcluster.go │ │ │ ├── alloydbgetinstance │ │ │ │ ├── alloydbgetinstance_test.go │ │ │ │ └── alloydbgetinstance.go │ │ │ ├── alloydbgetuser │ │ │ │ ├── alloydbgetuser_test.go │ │ │ │ └── alloydbgetuser.go │ │ │ ├── alloydblistclusters │ │ │ │ ├── alloydblistclusters_test.go │ │ │ │ └── alloydblistclusters.go │ │ │ ├── alloydblistinstances │ │ │ │ ├── alloydblistinstances_test.go │ │ │ │ └── alloydblistinstances.go │ │ │ ├── alloydblistusers │ │ │ │ ├── alloydblistusers_test.go │ │ │ │ └── alloydblistusers.go │ │ │ └── alloydbwaitforoperation │ │ │ ├── alloydbwaitforoperation_test.go │ │ │ └── alloydbwaitforoperation.go │ │ ├── alloydbainl │ │ │ ├── alloydbainl_test.go │ │ │ └── alloydbainl.go │ │ ├── bigquery │ │ │ ├── bigqueryanalyzecontribution │ │ │ │ ├── bigqueryanalyzecontribution_test.go │ │ │ │ └── bigqueryanalyzecontribution.go │ │ │ ├── bigquerycommon │ │ │ │ ├── table_name_parser_test.go │ │ │ │ ├── table_name_parser.go │ │ │ │ └── util.go │ │ │ ├── bigqueryconversationalanalytics │ │ │ │ ├── bigqueryconversationalanalytics_test.go │ │ │ │ └── bigqueryconversationalanalytics.go │ │ │ ├── bigqueryexecutesql │ │ │ │ ├── bigqueryexecutesql_test.go │ │ │ │ └── bigqueryexecutesql.go │ │ │ ├── bigqueryforecast │ │ │ │ ├── bigqueryforecast_test.go │ │ │ │ └── bigqueryforecast.go │ │ │ ├── bigquerygetdatasetinfo │ │ │ │ ├── bigquerygetdatasetinfo_test.go │ │ │ │ └── bigquerygetdatasetinfo.go │ │ │ ├── bigquerygettableinfo │ │ │ │ ├── bigquerygettableinfo_test.go │ │ │ │ └── bigquerygettableinfo.go │ │ │ ├── bigquerylistdatasetids │ │ │ │ ├── bigquerylistdatasetids_test.go │ │ │ │ └── bigquerylistdatasetids.go │ │ │ ├── bigquerylisttableids │ │ │ │ ├── bigquerylisttableids_test.go │ │ │ │ └── bigquerylisttableids.go │ │ │ ├── bigquerysearchcatalog │ │ │ │ ├── bigquerysearchcatalog_test.go │ │ │ │ └── bigquerysearchcatalog.go │ │ │ └── bigquerysql │ │ │ ├── bigquerysql_test.go │ │ │ └── bigquerysql.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ └── cassandracql │ │ │ ├── cassandracql_test.go │ │ │ └── cassandracql.go │ │ ├── clickhouse │ │ │ ├── clickhouseexecutesql │ │ │ │ ├── clickhouseexecutesql_test.go │ │ │ │ └── clickhouseexecutesql.go │ │ │ ├── clickhouselistdatabases │ │ │ │ ├── clickhouselistdatabases_test.go │ │ │ │ └── clickhouselistdatabases.go │ │ │ ├── clickhouselisttables │ │ │ │ ├── clickhouselisttables_test.go │ │ │ │ └── clickhouselisttables.go │ │ │ └── clickhousesql │ │ │ ├── clickhousesql_test.go │ │ │ └── clickhousesql.go │ │ ├── cloudmonitoring │ │ │ ├── cloudmonitoring_test.go │ │ │ └── cloudmonitoring.go │ │ ├── cloudsql │ │ │ ├── cloudsqlcreatedatabase │ │ │ │ ├── cloudsqlcreatedatabase_test.go │ │ │ │ └── cloudsqlcreatedatabase.go │ │ │ ├── cloudsqlcreateusers │ │ │ │ ├── cloudsqlcreateusers_test.go │ │ │ │ └── cloudsqlcreateusers.go │ │ │ ├── cloudsqlgetinstances │ │ │ │ ├── cloudsqlgetinstances_test.go │ │ │ │ └── cloudsqlgetinstances.go │ │ │ ├── cloudsqllistdatabases │ │ │ │ ├── cloudsqllistdatabases_test.go │ │ │ │ └── cloudsqllistdatabases.go │ │ │ ├── cloudsqllistinstances │ │ │ │ ├── cloudsqllistinstances_test.go │ │ │ │ └── cloudsqllistinstances.go │ │ │ └── cloudsqlwaitforoperation │ │ │ ├── cloudsqlwaitforoperation_test.go │ │ │ └── cloudsqlwaitforoperation.go │ │ ├── cloudsqlmssql │ │ │ └── cloudsqlmssqlcreateinstance │ │ │ ├── cloudsqlmssqlcreateinstance_test.go │ │ │ └── cloudsqlmssqlcreateinstance.go │ │ ├── cloudsqlmysql │ │ │ └── cloudsqlmysqlcreateinstance │ │ │ ├── cloudsqlmysqlcreateinstance_test.go │ │ │ └── cloudsqlmysqlcreateinstance.go │ │ ├── cloudsqlpg │ │ │ └── cloudsqlpgcreateinstances │ │ │ ├── cloudsqlpgcreateinstances_test.go │ │ │ └── cloudsqlpgcreateinstances.go │ │ ├── common_test.go │ │ ├── common.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataform │ │ │ └── dataformcompilelocal │ │ │ ├── dataformcompilelocal_test.go │ │ │ └── dataformcompilelocal.go │ │ ├── dataplex │ │ │ ├── dataplexlookupentry │ │ │ │ ├── dataplexlookupentry_test.go │ │ │ │ └── dataplexlookupentry.go │ │ │ ├── dataplexsearchaspecttypes │ │ │ │ ├── dataplexsearchaspecttypes_test.go │ │ │ │ └── dataplexsearchaspecttypes.go │ │ │ └── dataplexsearchentries │ │ │ ├── dataplexsearchentries_test.go │ │ │ └── dataplexsearchentries.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── firebird │ │ │ ├── firebirdexecutesql │ │ │ │ ├── firebirdexecutesql_test.go │ │ │ │ └── firebirdexecutesql.go │ │ │ └── firebirdsql │ │ │ ├── firebirdsql_test.go │ │ │ └── firebirdsql.go │ │ ├── firestore │ │ │ ├── firestoreadddocuments │ │ │ │ ├── firestoreadddocuments_test.go │ │ │ │ └── firestoreadddocuments.go │ │ │ ├── firestoredeletedocuments │ │ │ │ ├── firestoredeletedocuments_test.go │ │ │ │ └── firestoredeletedocuments.go │ │ │ ├── firestoregetdocuments │ │ │ │ ├── firestoregetdocuments_test.go │ │ │ │ └── firestoregetdocuments.go │ │ │ ├── firestoregetrules │ │ │ │ ├── firestoregetrules_test.go │ │ │ │ └── firestoregetrules.go │ │ │ ├── firestorelistcollections │ │ │ │ ├── firestorelistcollections_test.go │ │ │ │ └── firestorelistcollections.go │ │ │ ├── firestorequery │ │ │ │ ├── firestorequery_test.go │ │ │ │ └── firestorequery.go │ │ │ ├── firestorequerycollection │ │ │ │ ├── firestorequerycollection_test.go │ │ │ │ └── firestorequerycollection.go │ │ │ ├── firestoreupdatedocument │ │ │ │ ├── firestoreupdatedocument_test.go │ │ │ │ └── firestoreupdatedocument.go │ │ │ ├── firestorevalidaterules │ │ │ │ ├── firestorevalidaterules_test.go │ │ │ │ └── firestorevalidaterules.go │ │ │ └── util │ │ │ ├── converter_test.go │ │ │ ├── converter.go │ │ │ ├── validator_test.go │ │ │ └── validator.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── http_method.go │ │ ├── looker │ │ │ ├── lookeradddashboardelement │ │ │ │ ├── lookeradddashboardelement_test.go │ │ │ │ └── lookeradddashboardelement.go │ │ │ ├── lookercommon │ │ │ │ ├── lookercommon_test.go │ │ │ │ └── lookercommon.go │ │ │ ├── lookerconversationalanalytics │ │ │ │ ├── lookerconversationalanalytics_test.go │ │ │ │ └── lookerconversationalanalytics.go │ │ │ ├── lookercreateprojectfile │ │ │ │ ├── lookercreateprojectfile_test.go │ │ │ │ └── lookercreateprojectfile.go │ │ │ ├── lookerdeleteprojectfile │ │ │ │ ├── lookerdeleteprojectfile_test.go │ │ │ │ └── lookerdeleteprojectfile.go │ │ │ ├── lookerdevmode │ │ │ │ ├── lookerdevmode_test.go │ │ │ │ └── lookerdevmode.go │ │ │ ├── lookergetdashboards │ │ │ │ ├── lookergetdashboards_test.go │ │ │ │ └── lookergetdashboards.go │ │ │ ├── lookergetdimensions │ │ │ │ ├── lookergetdimensions_test.go │ │ │ │ └── lookergetdimensions.go │ │ │ ├── lookergetexplores │ │ │ │ ├── lookergetexplores_test.go │ │ │ │ └── lookergetexplores.go │ │ │ ├── lookergetfilters │ │ │ │ ├── lookergetfilters_test.go │ │ │ │ └── lookergetfilters.go │ │ │ ├── lookergetlooks │ │ │ │ ├── lookergetlooks_test.go │ │ │ │ └── lookergetlooks.go │ │ │ ├── lookergetmeasures │ │ │ │ ├── lookergetmeasures_test.go │ │ │ │ └── lookergetmeasures.go │ │ │ ├── lookergetmodels │ │ │ │ ├── lookergetmodels_test.go │ │ │ │ └── lookergetmodels.go │ │ │ ├── lookergetparameters │ │ │ │ ├── lookergetparameters_test.go │ │ │ │ └── lookergetparameters.go │ │ │ ├── lookergetprojectfile │ │ │ │ ├── lookergetprojectfile_test.go │ │ │ │ └── lookergetprojectfile.go │ │ │ ├── lookergetprojectfiles │ │ │ │ ├── lookergetprojectfiles_test.go │ │ │ │ └── lookergetprojectfiles.go │ │ │ ├── lookergetprojects │ │ │ │ ├── lookergetprojects_test.go │ │ │ │ └── lookergetprojects.go │ │ │ ├── lookerhealthanalyze │ │ │ │ ├── lookerhealthanalyze_test.go │ │ │ │ └── lookerhealthanalyze.go │ │ │ ├── lookerhealthpulse │ │ │ │ ├── lookerhealthpulse_test.go │ │ │ │ └── lookerhealthpulse.go │ │ │ ├── lookerhealthvacuum │ │ │ │ ├── lookerhealthvacuum_test.go │ │ │ │ └── lookerhealthvacuum.go │ │ │ ├── lookermakedashboard │ │ │ │ ├── lookermakedashboard_test.go │ │ │ │ └── lookermakedashboard.go │ │ │ ├── lookermakelook │ │ │ │ ├── lookermakelook_test.go │ │ │ │ └── lookermakelook.go │ │ │ ├── lookerquery │ │ │ │ ├── lookerquery_test.go │ │ │ │ └── lookerquery.go │ │ │ ├── lookerquerysql │ │ │ │ ├── lookerquerysql_test.go │ │ │ │ └── lookerquerysql.go │ │ │ ├── lookerqueryurl │ │ │ │ ├── lookerqueryurl_test.go │ │ │ │ └── lookerqueryurl.go │ │ │ ├── lookerrunlook │ │ │ │ ├── lookerrunlook_test.go │ │ │ │ └── lookerrunlook.go │ │ │ └── lookerupdateprojectfile │ │ │ ├── lookerupdateprojectfile_test.go │ │ │ └── lookerupdateprojectfile.go │ │ ├── mongodb │ │ │ ├── mongodbaggregate │ │ │ │ ├── mongodbaggregate_test.go │ │ │ │ └── mongodbaggregate.go │ │ │ ├── mongodbdeletemany │ │ │ │ ├── mongodbdeletemany_test.go │ │ │ │ └── mongodbdeletemany.go │ │ │ ├── mongodbdeleteone │ │ │ │ ├── mongodbdeleteone_test.go │ │ │ │ └── mongodbdeleteone.go │ │ │ ├── mongodbfind │ │ │ │ ├── mongodbfind_test.go │ │ │ │ └── mongodbfind.go │ │ │ ├── mongodbfindone │ │ │ │ ├── mongodbfindone_test.go │ │ │ │ └── mongodbfindone.go │ │ │ ├── mongodbinsertmany │ │ │ │ ├── mongodbinsertmany_test.go │ │ │ │ └── mongodbinsertmany.go │ │ │ ├── mongodbinsertone │ │ │ │ ├── mongodbinsertone_test.go │ │ │ │ └── mongodbinsertone.go │ │ │ ├── mongodbupdatemany │ │ │ │ ├── mongodbupdatemany_test.go │ │ │ │ └── mongodbupdatemany.go │ │ │ └── mongodbupdateone │ │ │ ├── mongodbupdateone_test.go │ │ │ └── mongodbupdateone.go │ │ ├── mssql │ │ │ ├── mssqlexecutesql │ │ │ │ ├── mssqlexecutesql_test.go │ │ │ │ └── mssqlexecutesql.go │ │ │ ├── mssqllisttables │ │ │ │ ├── mssqllisttables_test.go │ │ │ │ └── mssqllisttables.go │ │ │ └── mssqlsql │ │ │ ├── mssqlsql_test.go │ │ │ └── mssqlsql.go │ │ ├── mysql │ │ │ ├── mysqlcommon │ │ │ │ └── mysqlcommon.go │ │ │ ├── mysqlexecutesql │ │ │ │ ├── mysqlexecutesql_test.go │ │ │ │ └── mysqlexecutesql.go │ │ │ ├── mysqllistactivequeries │ │ │ │ ├── mysqllistactivequeries_test.go │ │ │ │ └── mysqllistactivequeries.go │ │ │ ├── mysqllisttablefragmentation │ │ │ │ ├── mysqllisttablefragmentation_test.go │ │ │ │ └── mysqllisttablefragmentation.go │ │ │ ├── mysqllisttables │ │ │ │ ├── mysqllisttables_test.go │ │ │ │ └── mysqllisttables.go │ │ │ ├── mysqllisttablesmissinguniqueindexes │ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go │ │ │ │ └── mysqllisttablesmissinguniqueindexes.go │ │ │ └── mysqlsql │ │ │ ├── mysqlsql_test.go │ │ │ └── mysqlsql.go │ │ ├── neo4j │ │ │ ├── neo4jcypher │ │ │ │ ├── neo4jcypher_test.go │ │ │ │ └── neo4jcypher.go │ │ │ ├── neo4jexecutecypher │ │ │ │ ├── classifier │ │ │ │ │ ├── classifier_test.go │ │ │ │ │ └── classifier.go │ │ │ │ ├── neo4jexecutecypher_test.go │ │ │ │ └── neo4jexecutecypher.go │ │ │ └── neo4jschema │ │ │ ├── cache │ │ │ │ ├── cache_test.go │ │ │ │ └── cache.go │ │ │ ├── helpers │ │ │ │ ├── helpers_test.go │ │ │ │ └── helpers.go │ │ │ ├── neo4jschema_test.go │ │ │ ├── neo4jschema.go │ │ │ └── types │ │ │ └── types.go │ │ ├── oceanbase │ │ │ ├── oceanbaseexecutesql │ │ │ │ ├── oceanbaseexecutesql_test.go │ │ │ │ └── oceanbaseexecutesql.go │ │ │ └── oceanbasesql │ │ │ ├── oceanbasesql_test.go │ │ │ └── oceanbasesql.go │ │ ├── oracle │ │ │ ├── oracleexecutesql │ │ │ │ └── oracleexecutesql.go │ │ │ └── oraclesql │ │ │ └── oraclesql.go │ │ ├── parameters_test.go │ │ ├── parameters.go │ │ ├── postgres │ │ │ ├── postgresexecutesql │ │ │ │ ├── postgresexecutesql_test.go │ │ │ │ └── postgresexecutesql.go │ │ │ ├── postgreslistactivequeries │ │ │ │ ├── postgreslistactivequeries_test.go │ │ │ │ └── postgreslistactivequeries.go │ │ │ ├── postgreslistavailableextensions │ │ │ │ ├── postgreslistavailableextensions_test.go │ │ │ │ └── postgreslistavailableextensions.go │ │ │ ├── postgreslistinstalledextensions │ │ │ │ ├── postgreslistinstalledextensions_test.go │ │ │ │ └── postgreslistinstalledextensions.go │ │ │ ├── postgreslisttables │ │ │ │ ├── postgreslisttables_test.go │ │ │ │ └── postgreslisttables.go │ │ │ └── postgressql │ │ │ ├── postgressql_test.go │ │ │ └── postgressql.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── spanner │ │ │ ├── spannerexecutesql │ │ │ │ ├── spannerexecutesql_test.go │ │ │ │ └── spannerexecutesql.go │ │ │ ├── spannerlisttables │ │ │ │ ├── spannerlisttables_test.go │ │ │ │ └── spannerlisttables.go │ │ │ └── spannersql │ │ │ ├── spanner_test.go │ │ │ └── spannersql.go │ │ ├── sqlite │ │ │ ├── sqliteexecutesql │ │ │ │ ├── sqliteexecutesql_test.go │ │ │ │ └── sqliteexecutesql.go │ │ │ └── sqlitesql │ │ │ ├── sqlitesql_test.go │ │ │ └── sqlitesql.go │ │ ├── tidb │ │ │ ├── tidbexecutesql │ │ │ │ ├── tidbexecutesql_test.go │ │ │ │ └── tidbexecutesql.go │ │ │ └── tidbsql │ │ │ ├── tidbsql_test.go │ │ │ └── tidbsql.go │ │ ├── tools_test.go │ │ ├── tools.go │ │ ├── toolsets.go │ │ ├── trino │ │ │ ├── trinoexecutesql │ │ │ │ ├── trinoexecutesql_test.go │ │ │ │ └── trinoexecutesql.go │ │ │ └── trinosql │ │ │ ├── trinosql_test.go │ │ │ └── trinosql.go │ │ ├── utility │ │ │ └── wait │ │ │ ├── wait_test.go │ │ │ └── wait.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedbsql │ │ ├── yugabytedbsql_test.go │ │ └── yugabytedbsql.go │ └── util │ └── util.go ├── LICENSE ├── logo.png ├── main.go ├── MCP-TOOLBOX-EXTENSION.md ├── README.md └── tests ├── alloydb │ ├── alloydb_integration_test.go │ └── alloydb_wait_for_operation_test.go ├── alloydbainl │ └── alloydb_ai_nl_integration_test.go ├── alloydbpg │ └── alloydb_pg_integration_test.go ├── auth.go ├── bigquery │ └── bigquery_integration_test.go ├── bigtable │ └── bigtable_integration_test.go ├── cassandra │ └── cassandra_integration_test.go ├── clickhouse │ └── clickhouse_integration_test.go ├── cloudmonitoring │ └── cloud_monitoring_integration_test.go ├── cloudsql │ ├── cloud_sql_create_database_test.go │ ├── cloud_sql_create_users_test.go │ ├── cloud_sql_get_instances_test.go │ ├── cloud_sql_list_databases_test.go │ ├── cloudsql_list_instances_test.go │ └── cloudsql_wait_for_operation_test.go ├── cloudsqlmssql │ ├── cloud_sql_mssql_create_instance_integration_test.go │ └── cloud_sql_mssql_integration_test.go ├── cloudsqlmysql │ ├── cloud_sql_mysql_create_instance_integration_test.go │ └── cloud_sql_mysql_integration_test.go ├── cloudsqlpg │ ├── cloud_sql_pg_create_instances_test.go │ └── cloud_sql_pg_integration_test.go ├── common.go ├── couchbase │ └── couchbase_integration_test.go ├── dataform │ └── dataform_integration_test.go ├── dataplex │ └── dataplex_integration_test.go ├── dgraph │ └── dgraph_integration_test.go ├── firebird │ └── firebird_integration_test.go ├── firestore │ └── firestore_integration_test.go ├── http │ └── http_integration_test.go ├── looker │ └── looker_integration_test.go ├── mongodb │ └── mongodb_integration_test.go ├── mssql │ └── mssql_integration_test.go ├── mysql │ └── mysql_integration_test.go ├── neo4j │ └── neo4j_integration_test.go ├── oceanbase │ └── oceanbase_integration_test.go ├── option.go ├── oracle │ └── oracle_integration_test.go ├── postgres │ └── postgres_integration_test.go ├── redis │ └── redis_test.go ├── server.go ├── source.go ├── spanner │ └── spanner_integration_test.go ├── sqlite │ └── sqlite_integration_test.go ├── tidb │ └── tidb_integration_test.go ├── tool.go ├── trino │ └── trino_integration_test.go ├── utility │ └── wait_integration_test.go ├── valkey │ └── valkey_test.go └── yugabytedb └── yugabytedb_integration_test.go ``` # Files -------------------------------------------------------------------------------- /internal/tools/neo4j/neo4jschema/helpers/helpers.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package helpers provides utility functions for transforming and processing Neo4j // schema data. It includes functions for converting raw query results from both // APOC and native Cypher queries into a standardized, structured format. package helpers import ( "fmt" "sort" "github.com/goccy/go-yaml" "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types" "github.com/neo4j/neo4j-go-driver/v5/neo4j" ) // ConvertToStringSlice converts a slice of any type to a slice of strings. // It uses fmt.Sprintf to perform the conversion for each element. // Example: // // input: []any{"user", 123, true} // output: []string{"user", "123", "true"} func ConvertToStringSlice(slice []any) []string { result := make([]string, len(slice)) for i, v := range slice { result[i] = fmt.Sprintf("%v", v) } return result } // GetStringValue safely converts any value to its string representation. // If the input value is nil, it returns an empty string. func GetStringValue(val any) string { if val == nil { return "" } return fmt.Sprintf("%v", val) } // MapToAPOCSchema converts a raw map from a Cypher query into a structured // APOCSchemaResult. This is a workaround for database drivers that may return // complex nested structures as `map[string]any` instead of unmarshalling // directly into a struct. It achieves this by marshalling the map to YAML and // then unmarshalling into the target struct. func MapToAPOCSchema(schemaMap map[string]any) (*types.APOCSchemaResult, error) { schemaBytes, err := yaml.Marshal(schemaMap) if err != nil { return nil, fmt.Errorf("failed to marshal schema map: %w", err) } var entities map[string]types.APOCEntity if err = yaml.Unmarshal(schemaBytes, &entities); err != nil { return nil, fmt.Errorf("failed to unmarshal schema map into entities: %w", err) } return &types.APOCSchemaResult{Value: entities}, nil } // ProcessAPOCSchema transforms the nested result from the `apoc.meta.schema()` // procedure into flat lists of node labels and relationships, along with // aggregated database statistics. It iterates through entities, processes nodes, // and extracts outgoing relationship information nested within those nodes. func ProcessAPOCSchema(apocSchema *types.APOCSchemaResult) ([]types.NodeLabel, []types.Relationship, *types.Statistics) { var nodeLabels []types.NodeLabel relMap := make(map[string]*types.Relationship) stats := &types.Statistics{ NodesByLabel: make(map[string]int64), RelationshipsByType: make(map[string]int64), PropertiesByLabel: make(map[string]int64), PropertiesByRelType: make(map[string]int64), } for name, entity := range apocSchema.Value { // We only process top-level entities of type "node". Relationship info is // derived from the "relationships" field within each node entity. if entity.Type != "node" { continue } nodeLabel := types.NodeLabel{ Name: name, Count: entity.Count, Properties: extractAPOCProperties(entity.Properties), } nodeLabels = append(nodeLabels, nodeLabel) // Aggregate statistics for the node. stats.NodesByLabel[name] = entity.Count stats.TotalNodes += entity.Count propCount := int64(len(nodeLabel.Properties)) stats.PropertiesByLabel[name] = propCount stats.TotalProperties += propCount * entity.Count // Extract relationship information from the node. for relName, relInfo := range entity.Relationships { // Only process outgoing relationships to avoid double-counting. if relInfo.Direction != "out" { continue } rel, exists := relMap[relName] if !exists { rel = &types.Relationship{ Type: relName, Properties: extractAPOCProperties(relInfo.Properties), } if len(relInfo.Labels) > 0 { rel.EndNode = relInfo.Labels[0] } rel.StartNode = name relMap[relName] = rel } rel.Count += relInfo.Count } } // Consolidate the relationships from the map into a slice and update stats. relationships := make([]types.Relationship, 0, len(relMap)) for _, rel := range relMap { relationships = append(relationships, *rel) stats.RelationshipsByType[rel.Type] = rel.Count stats.TotalRelationships += rel.Count propCount := int64(len(rel.Properties)) stats.PropertiesByRelType[rel.Type] = propCount stats.TotalProperties += propCount * rel.Count } sortAndClean(nodeLabels, relationships, stats) // Set empty maps and lists to nil for cleaner output. if len(nodeLabels) == 0 { nodeLabels = nil } if len(relationships) == 0 { relationships = nil } return nodeLabels, relationships, stats } // ProcessNonAPOCSchema serves as an alternative to ProcessAPOCSchema for environments // where APOC procedures are not available. It converts schema data gathered from // multiple separate, native Cypher queries (providing node counts, property maps, etc.) // into the same standardized, structured format. func ProcessNonAPOCSchema( nodeCounts map[string]int64, nodePropsMap map[string]map[string]map[string]bool, relCounts map[string]int64, relPropsMap map[string]map[string]map[string]bool, relConnectivity map[string]types.RelConnectivityInfo, ) ([]types.NodeLabel, []types.Relationship, *types.Statistics) { stats := &types.Statistics{ NodesByLabel: make(map[string]int64), RelationshipsByType: make(map[string]int64), PropertiesByLabel: make(map[string]int64), PropertiesByRelType: make(map[string]int64), } // Process node information. nodeLabels := make([]types.NodeLabel, 0, len(nodeCounts)) for label, count := range nodeCounts { properties := make([]types.PropertyInfo, 0) if props, ok := nodePropsMap[label]; ok { for key, typeSet := range props { typeList := make([]string, 0, len(typeSet)) for tp := range typeSet { typeList = append(typeList, tp) } sort.Strings(typeList) properties = append(properties, types.PropertyInfo{Name: key, Types: typeList}) } } sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name }) nodeLabels = append(nodeLabels, types.NodeLabel{Name: label, Count: count, Properties: properties}) // Aggregate node statistics. stats.NodesByLabel[label] = count stats.TotalNodes += count propCount := int64(len(properties)) stats.PropertiesByLabel[label] = propCount stats.TotalProperties += propCount * count } // Process relationship information. relationships := make([]types.Relationship, 0, len(relCounts)) for relType, count := range relCounts { properties := make([]types.PropertyInfo, 0) if props, ok := relPropsMap[relType]; ok { for key, typeSet := range props { typeList := make([]string, 0, len(typeSet)) for tp := range typeSet { typeList = append(typeList, tp) } sort.Strings(typeList) properties = append(properties, types.PropertyInfo{Name: key, Types: typeList}) } } sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name }) conn := relConnectivity[relType] relationships = append(relationships, types.Relationship{ Type: relType, Count: count, StartNode: conn.StartNode, EndNode: conn.EndNode, Properties: properties, }) // Aggregate relationship statistics. stats.RelationshipsByType[relType] = count stats.TotalRelationships += count propCount := int64(len(properties)) stats.PropertiesByRelType[relType] = propCount stats.TotalProperties += propCount * count } sortAndClean(nodeLabels, relationships, stats) // Set empty maps and lists to nil for cleaner output. if len(nodeLabels) == 0 { nodeLabels = nil } if len(relationships) == 0 { relationships = nil } return nodeLabels, relationships, stats } // extractAPOCProperties is a helper that converts a map of APOC property // information into a slice of standardized PropertyInfo structs. The resulting // slice is sorted by property name for consistent ordering. func extractAPOCProperties(props map[string]types.APOCProperty) []types.PropertyInfo { properties := make([]types.PropertyInfo, 0, len(props)) for name, info := range props { properties = append(properties, types.PropertyInfo{ Name: name, Types: []string{info.Type}, Indexed: info.Indexed, Unique: info.Unique, Mandatory: info.Existence, }) } sort.Slice(properties, func(i, j int) bool { return properties[i].Name < properties[j].Name }) return properties } // sortAndClean performs final processing on the schema data. It sorts node and // relationship slices for consistent output, primarily by count (descending) and // secondarily by name/type. It also sets any empty maps in the statistics // struct to nil, which can simplify downstream serialization (e.g., omitting // empty fields in JSON). func sortAndClean(nodeLabels []types.NodeLabel, relationships []types.Relationship, stats *types.Statistics) { // Sort nodes by count (desc) then name (asc). sort.Slice(nodeLabels, func(i, j int) bool { if nodeLabels[i].Count != nodeLabels[j].Count { return nodeLabels[i].Count > nodeLabels[j].Count } return nodeLabels[i].Name < nodeLabels[j].Name }) // Sort relationships by count (desc) then type (asc). sort.Slice(relationships, func(i, j int) bool { if relationships[i].Count != relationships[j].Count { return relationships[i].Count > relationships[j].Count } return relationships[i].Type < relationships[j].Type }) // Nil out empty maps for cleaner output. if len(stats.NodesByLabel) == 0 { stats.NodesByLabel = nil } if len(stats.RelationshipsByType) == 0 { stats.RelationshipsByType = nil } if len(stats.PropertiesByLabel) == 0 { stats.PropertiesByLabel = nil } if len(stats.PropertiesByRelType) == 0 { stats.PropertiesByRelType = nil } } // ConvertValue converts Neo4j value to JSON-compatible value. func ConvertValue(value any) any { switch v := value.(type) { case nil, neo4j.InvalidValue: return nil case bool, string, int, int8, int16, int32, int64, float32, float64: return v case neo4j.Date, neo4j.LocalTime, neo4j.Time, neo4j.LocalDateTime, neo4j.Duration: if iv, ok := v.(types.ValueType); ok { return iv.String() } case neo4j.Node: return map[string]any{ "elementId": v.GetElementId(), "labels": v.Labels, "properties": ConvertValue(v.GetProperties()), } case neo4j.Relationship: return map[string]any{ "elementId": v.GetElementId(), "type": v.Type, "startElementId": v.StartElementId, "endElementId": v.EndElementId, "properties": ConvertValue(v.GetProperties()), } case neo4j.Entity: return map[string]any{ "elementId": v.GetElementId(), "properties": ConvertValue(v.GetProperties()), } case neo4j.Path: var nodes []any var relationships []any for _, r := range v.Relationships { relationships = append(relationships, ConvertValue(r)) } for _, n := range v.Nodes { nodes = append(nodes, ConvertValue(n)) } return map[string]any{ "nodes": nodes, "relationships": relationships, } case neo4j.Record: m := make(map[string]any) for i, key := range v.Keys { m[key] = ConvertValue(v.Values[i]) } return m case neo4j.Point2D: return map[string]any{"x": v.X, "y": v.Y, "srid": v.SpatialRefId} case neo4j.Point3D: return map[string]any{"x": v.X, "y": v.Y, "z": v.Z, "srid": v.SpatialRefId} case []any: arr := make([]any, len(v)) for i, elem := range v { arr[i] = ConvertValue(elem) } return arr case map[string]any: m := make(map[string]any) for key, val := range v { m[key] = ConvertValue(val) } return m } return fmt.Sprintf("%v", value) } ``` -------------------------------------------------------------------------------- /internal/prebuiltconfigs/tools/spanner-postgres.yaml: -------------------------------------------------------------------------------- ```yaml # Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. sources: spanner-source: kind: "spanner" project: ${SPANNER_PROJECT} instance: ${SPANNER_INSTANCE} database: ${SPANNER_DATABASE} dialect: "postgresql" tools: execute_sql: kind: spanner-execute-sql source: spanner-source description: Use this tool to execute DML SQL. Please use the PostgreSQL interface for Spanner. execute_sql_dql: kind: spanner-execute-sql source: spanner-source description: Use this tool to execute DQL SQL. Please use the PostgreSQL interface for Spanner. readOnly: true list_tables: kind: spanner-sql source: spanner-source readOnly: true description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas." statement: | WITH table_info_cte AS ( SELECT T.TABLE_SCHEMA, T.TABLE_NAME, T.TABLE_TYPE, T.PARENT_TABLE_NAME, T.ON_DELETE_ACTION FROM INFORMATION_SCHEMA.TABLES AS T WHERE T.TABLE_SCHEMA = 'public' AND T.TABLE_TYPE = 'BASE TABLE' AND ( NULLIF(TRIM($1), '') IS NULL OR T.TABLE_NAME IN ( SELECT table_name FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name) ) ), columns_info_cte AS ( SELECT C.TABLE_SCHEMA, C.TABLE_NAME, ARRAY_AGG( CONCAT( '{', '"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",', '"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",', '"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',', '"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',', '"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END, '}' ) ORDER BY C.ORDINAL_POSITION ) AS columns_json_array_elements FROM INFORMATION_SCHEMA.COLUMNS AS C WHERE C.TABLE_SCHEMA = 'public' AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME) GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME ), constraint_columns_agg_cte AS ( SELECT CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, ARRAY_AGG('"' || REPLACE(COLUMN_NAME, '"', '\"') || '"' ORDER BY ORDINAL_POSITION) AS column_names_json_list FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE CONSTRAINT_SCHEMA = 'public' GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME ), constraints_info_cte AS ( SELECT TC.TABLE_SCHEMA, TC.TABLE_NAME, ARRAY_AGG( CONCAT( '{', '"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",', '"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",', '"constraint_definition":', CASE TC.CONSTRAINT_TYPE WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"') WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"') WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ', COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''), ' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"') ELSE 'null' END, ',', '"constraint_columns":[', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '],', '"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',', '"foreign_key_referenced_columns":[', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), ']', '}' ) ORDER BY TC.CONSTRAINT_NAME ) AS constraints_json_array_elements FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME LEFT JOIN constraint_columns_agg_cte AS KeyCols ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME LEFT JOIN constraint_columns_agg_cte AS RefKeyCols ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY' WHERE TC.TABLE_SCHEMA = 'public' AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME) GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME ), index_key_columns_agg_cte AS ( SELECT TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, ARRAY_AGG( CONCAT( '{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",', '"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}' ) ORDER BY ORDINAL_POSITION ) AS key_column_json_details FROM INFORMATION_SCHEMA.INDEX_COLUMNS WHERE ORDINAL_POSITION IS NOT NULL AND TABLE_SCHEMA = 'public' GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME ), index_storing_columns_agg_cte AS ( SELECT TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names FROM INFORMATION_SCHEMA.INDEX_COLUMNS WHERE ORDINAL_POSITION IS NULL AND TABLE_SCHEMA = 'public' GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME ), indexes_info_cte AS ( SELECT I.TABLE_SCHEMA, I.TABLE_NAME, ARRAY_AGG( CONCAT( '{', '"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",', '"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",', '"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',', '"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',', '"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',', '"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],', '"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']', '}' ) ORDER BY I.INDEX_NAME ) AS indexes_json_array_elements FROM INFORMATION_SCHEMA.INDEXES AS I LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL') WHERE I.TABLE_SCHEMA = 'public' AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME) GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME ) SELECT TI.TABLE_SCHEMA AS schema_name, TI.TABLE_NAME AS object_name, CASE WHEN $2 = 'simple' THEN -- IF format is 'simple', return basic JSON CONCAT('{"name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '"}') ELSE CONCAT( '{', '"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",', '"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",', '"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",', '"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],', '"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],', '"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']', '}' ) END AS object_details FROM table_info_cte AS TI LEFT JOIN columns_info_cte AS CI ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME LEFT JOIN constraints_info_cte AS CONSI ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME LEFT JOIN indexes_info_cte AS II ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME; parameters: - name: table_names type: string description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed." default: "" - name: output_format type: string description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema." default: "detailed" toolsets: spanner_postgres_database_tools: - execute_sql - execute_sql_dql - list_tables ``` -------------------------------------------------------------------------------- /tests/bigtable/bigtable_integration_test.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package bigtable import ( "bytes" "context" "encoding/binary" "fmt" "log" "os" "regexp" "slices" "strings" "testing" "time" "cloud.google.com/go/bigtable" "github.com/google/uuid" "github.com/googleapis/genai-toolbox/internal/testutils" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/tests" ) var ( BigtableSourceKind = "bigtable" BigtableToolKind = "bigtable-sql" BigtableProject = os.Getenv("BIGTABLE_PROJECT") BigtableInstance = os.Getenv("BIGTABLE_INSTANCE") ) func getBigtableVars(t *testing.T) map[string]any { switch "" { case BigtableProject: t.Fatal("'BIGTABLE_PROJECT' not set") case BigtableInstance: t.Fatal("'BIGTABLE_INSTANCE' not set") } return map[string]any{ "kind": BigtableSourceKind, "project": BigtableProject, "instance": BigtableInstance, } } type TestRow struct { RowKey string ColumnName string Data []byte } func TestBigtableToolEndpoints(t *testing.T) { sourceConfig := getBigtableVars(t) ctx, cancel := context.WithTimeout(context.Background(), time.Minute) defer cancel() var args []string tableName := "param_table" + strings.ReplaceAll(uuid.New().String(), "-", "") tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "") tableNameTemplateParam := "tmpl_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "") columnFamilyName := "cf" muts, rowKeys := getTestData(columnFamilyName) // Do not change the shape of statement without checking tests/common_test.go. // The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251 paramTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE TO_INT64(cf['id']) = @id OR CAST(cf['name'] AS string) = @name;", tableName) idParamTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE TO_INT64(cf['id']) = @id;", tableName) nameParamTestStatement := fmt.Sprintf("SELECT TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM %s WHERE CAST(cf['name'] AS string) = @name;", tableName) arrayTestStatement := fmt.Sprintf( "SELECT TO_INT64(cf['id']) AS id, CAST(cf['name'] AS string) AS name FROM %s WHERE TO_INT64(cf['id']) IN UNNEST(@idArray) AND CAST(cf['name'] AS string) IN UNNEST(@nameArray);", tableName, ) teardownTable1 := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableName, columnFamilyName, muts, rowKeys) defer teardownTable1(t) // Do not change the shape of statement without checking tests/common_test.go. // The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251 authToolStatement := fmt.Sprintf("SELECT CAST(cf['name'] AS string) as name FROM %s WHERE CAST(cf['email'] AS string) = @email;", tableNameAuth) teardownTable2 := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableNameAuth, columnFamilyName, muts, rowKeys) defer teardownTable2(t) mutsTmpl, rowKeysTmpl := getTestDataTemplateParam(columnFamilyName) teardownTableTmpl := setupBtTable(t, ctx, sourceConfig["project"].(string), sourceConfig["instance"].(string), tableNameTemplateParam, columnFamilyName, mutsTmpl, rowKeysTmpl) defer teardownTableTmpl(t) // Write config into a file and pass it to command toolsFile := tests.GetToolsConfig(sourceConfig, BigtableToolKind, paramTestStatement, idParamTestStatement, nameParamTestStatement, arrayTestStatement, authToolStatement) toolsFile = addTemplateParamConfig(t, toolsFile) cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } // Get configs for tests // Actual test parameters are set in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L250 select1Want := "[{\"$col1\":1}]" myToolById4Want := `[{"id":4,"name":""}]` mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"unable to prepare statement: rpc error: code = InvalidArgument desc = Syntax error: Unexpected identifier \"SELEC\" [at 1:1]"}],"isError":true}}` mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"$col1\":1}"}]}}` nameFieldArray := `["CAST(cf['name'] AS string) as name"]` nameColFilter := "CAST(cf['name'] AS string)" // Run tests tests.RunToolGetTest(t) tests.RunToolInvokeTest(t, select1Want, tests.WithMyToolById4Want(myToolById4Want), ) tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want) tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam, tests.WithNameFieldArray(nameFieldArray), tests.WithNameColFilter(nameColFilter), tests.DisableDdlTest(), tests.DisableInsertTest(), ) } func convertToBytes(v int) []byte { binary1 := new(bytes.Buffer) if err := binary.Write(binary1, binary.BigEndian, int64(v)); err != nil { log.Fatalf("Unable to encode id: %v", err) } return binary1.Bytes() } func getTestData(columnFamilyName string) ([]*bigtable.Mutation, []string) { muts := []*bigtable.Mutation{} rowKeys := []string{} var ids [4][]byte for i := range ids { ids[i] = convertToBytes(i + 1) } now := bigtable.Time(time.Now()) for rowKey, mutData := range map[string]map[string][]byte{ // Do not change the test data without checking tests/common_test.go. // The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251 // Expected values are defined in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L229-L310 "row-01": { "name": []byte("Alice"), "email": []byte(tests.ServiceAccountEmail), "id": ids[0], }, "row-02": { "name": []byte("Jane"), "email": []byte("[email protected]"), "id": ids[1], }, "row-03": { "name": []byte("Sid"), "id": ids[2], }, "row-04": { "name": nil, "id": ids[3], }, } { mut := bigtable.NewMutation() for col, v := range mutData { mut.Set(columnFamilyName, col, now, v) } muts = append(muts, mut) rowKeys = append(rowKeys, rowKey) } return muts, rowKeys } func getTestDataTemplateParam(columnFamilyName string) ([]*bigtable.Mutation, []string) { muts := []*bigtable.Mutation{} rowKeys := []string{} var ids [2][]byte for i := range ids { ids[i] = convertToBytes(i + 1) } now := bigtable.Time(time.Now()) for rowKey, mutData := range map[string]map[string][]byte{ // Do not change the test data without checking tests/common_test.go. // The structure and value of seed data has to match https://github.com/googleapis/genai-toolbox/blob/4dba0df12dc438eca3cb476ef52aa17cdf232c12/tests/common_test.go#L200-L251 // Expected values are defined in https://github.com/googleapis/genai-toolbox/blob/52b09a67cb40ac0c5f461598b4673136699a3089/tests/tool_test.go#L229-L310 "row-01": { "name": []byte("Alex"), "age": convertToBytes(21), "id": ids[0], }, "row-02": { "name": []byte("Alice"), "age": convertToBytes(100), "id": ids[1], }, } { mut := bigtable.NewMutation() for col, v := range mutData { mut.Set(columnFamilyName, col, now, v) } muts = append(muts, mut) rowKeys = append(rowKeys, rowKey) } return muts, rowKeys } func setupBtTable(t *testing.T, ctx context.Context, projectId string, instance string, tableName string, columnFamilyName string, muts []*bigtable.Mutation, rowKeys []string) func(*testing.T) { // Creating clients adminClient, err := bigtable.NewAdminClient(ctx, projectId, instance) if err != nil { t.Fatalf("NewAdminClient: %v", err) } client, err := bigtable.NewClient(ctx, projectId, instance) if err != nil { log.Fatalf("Could not create data operations client: %v", err) } defer client.Close() // Creating tables tables, err := adminClient.Tables(ctx) if err != nil { log.Fatalf("Could not fetch table list: %v", err) } if !slices.Contains(tables, tableName) { log.Printf("Creating table %s", tableName) if err := adminClient.CreateTable(ctx, tableName); err != nil { log.Fatalf("Could not create table %s: %v", tableName, err) } } tblInfo, err := adminClient.TableInfo(ctx, tableName) if err != nil { log.Fatalf("Could not read info for table %s: %v", tableName, err) } // Creating column family if !slices.Contains(tblInfo.Families, columnFamilyName) { if err := adminClient.CreateColumnFamily(ctx, tableName, columnFamilyName); err != nil { log.Fatalf("Could not create column family %s: %v", columnFamilyName, err) } } tbl := client.Open(tableName) rowErrs, err := tbl.ApplyBulk(ctx, rowKeys, muts) if err != nil { log.Fatalf("Could not apply bulk row mutation: %v", err) } if rowErrs != nil { for _, rowErr := range rowErrs { log.Printf("Error writing row: %v", rowErr) } log.Fatalf("Could not write some rows") } // Writing data return func(t *testing.T) { // tear down test if err = adminClient.DeleteTable(ctx, tableName); err != nil { log.Fatalf("Teardown failed. Could not delete table %s: %v", tableName, err) } defer adminClient.Close() } } func addTemplateParamConfig(t *testing.T, config map[string]any) map[string]any { toolsMap, ok := config["tools"].(map[string]any) if !ok { t.Fatalf("unable to get tools from config") } toolsMap["select-templateParams-tool"] = map[string]any{ "kind": "bigtable-sql", "source": "my-instance", "description": "Create table tool with template parameters", "statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}};", "templateParameters": []tools.Parameter{ tools.NewStringParameter("tableName", "some description"), }, } toolsMap["select-templateParams-combined-tool"] = map[string]any{ "kind": "bigtable-sql", "source": "my-instance", "description": "Create table tool with template parameters", "statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}} WHERE TO_INT64(cf['id']) = @id;", "parameters": []tools.Parameter{tools.NewIntParameter("id", "the id of the user")}, "templateParameters": []tools.Parameter{ tools.NewStringParameter("tableName", "some description"), }, } toolsMap["select-fields-templateParams-tool"] = map[string]any{ "kind": "bigtable-sql", "source": "my-instance", "description": "Create table tool with template parameters", "statement": "SELECT {{array .fields}}, FROM {{.tableName}};", "templateParameters": []tools.Parameter{ tools.NewStringParameter("tableName", "some description"), tools.NewArrayParameter("fields", "The fields to select from", tools.NewStringParameter("field", "A field that will be returned from the query.")), }, } toolsMap["select-filter-templateParams-combined-tool"] = map[string]any{ "kind": "bigtable-sql", "source": "my-instance", "description": "Create table tool with template parameters", "statement": "SELECT TO_INT64(cf['age']) as age, TO_INT64(cf['id']) as id, CAST(cf['name'] AS string) as name, FROM {{.tableName}} WHERE {{.columnFilter}} = @name;", "parameters": []tools.Parameter{tools.NewStringParameter("name", "the name of the user")}, "templateParameters": []tools.Parameter{ tools.NewStringParameter("tableName", "some description"), tools.NewStringParameter("columnFilter", "some description"), }, } config["tools"] = toolsMap return config } ``` -------------------------------------------------------------------------------- /docs/en/resources/tools/firestore/firestore-query.md: -------------------------------------------------------------------------------- ```markdown --- title: "firestore-query" type: docs weight: 1 description: > Query a Firestore collection with parameterizable filters and Firestore native JSON value types aliases: - /resources/tools/firestore-query --- ## Overview The `firestore-query` tool allows you to query Firestore collections with dynamic, parameterizable filters that support Firestore's native JSON value types. This tool is designed for querying single collection, which is the standard pattern in Firestore. The collection path itself can be parameterized, making it flexible for various use cases. This tool is particularly useful when you need to create reusable query templates with parameters that can be substituted at runtime. **Developer Note**: This tool serves as the general querying foundation that developers can use to create custom tools with specific query patterns. ## Key Features - **Parameterizable Queries**: Use Go template syntax to create dynamic queries - **Dynamic Collection Paths**: The collection path can be parameterized for flexibility - **Native JSON Value Types**: Support for Firestore's typed values (stringValue, integerValue, doubleValue, etc.) - **Complex Filter Logic**: Support for AND/OR logical operators in filters - **Template Substitution**: Dynamic collection paths, filters, and ordering - **Query Analysis**: Optional query performance analysis with explain metrics (non-parameterizable) ## Configuration ### Basic Configuration ```yaml tools: query_countries: kind: firestore-query source: my-firestore-source description: Query countries with dynamic filters collectionPath: "countries" filters: | { "field": "continent", "op": "==", "value": {"stringValue": "{{.continent}}"} } parameters: - name: continent type: string description: Continent to filter by required: true ``` ### Advanced Configuration with Complex Filters ```yaml tools: advanced_query: kind: firestore-query source: my-firestore-source description: Advanced query with complex filters collectionPath: "{{.collection}}" filters: | { "or": [ {"field": "status", "op": "==", "value": {"stringValue": "{{.status}}"}}, { "and": [ {"field": "priority", "op": ">", "value": {"integerValue": "{{.priority}}"}}, {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}}, {"field": "active", "op": "==", "value": {"booleanValue": {{.isActive}}}} ] } ] } select: - name - status - priority orderBy: field: "{{.sortField}}" direction: "{{.sortDirection}}" limit: 100 analyzeQuery: true parameters: - name: collection type: string description: Collection to query required: true - name: status type: string description: Status to filter by required: true - name: priority type: string description: Minimum priority value required: true - name: maxArea type: float description: Maximum area value required: true - name: isActive type: boolean description: Filter by active status required: true - name: sortField type: string description: Field to sort by required: false default: "createdAt" - name: sortDirection type: string description: Sort direction (ASCENDING or DESCENDING) required: false default: "DESCENDING" ``` ## Parameters ### Configuration Parameters | Parameter | Type | Required | Description | |------------------|---------|----------|-------------------------------------------------------------------------------------------------------------| | `kind` | string | Yes | Must be `firestore-query` | | `source` | string | Yes | Name of the Firestore source to use | | `description` | string | Yes | Description of what this tool does | | `collectionPath` | string | Yes | Path to the collection to query (supports templates) | | `filters` | string | No | JSON string defining query filters (supports templates) | | `select` | array | No | Fields to select from documents(supports templates - string or array) | | `orderBy` | object | No | Ordering configuration with `field` and `direction`(supports templates for the value of field or direction) | | `limit` | integer | No | Maximum number of documents to return (default: 100) (supports templates) | | `analyzeQuery` | boolean | No | Whether to analyze query performance (default: false) | | `parameters` | array | Yes | Parameter definitions for template substitution | ### Runtime Parameters Runtime parameters are defined in the `parameters` array and can be used in templates throughout the configuration. ## Filter Format ### Simple Filter ```json { "field": "age", "op": ">", "value": {"integerValue": "25"} } ``` ### AND Filter ```json { "and": [ {"field": "status", "op": "==", "value": {"stringValue": "active"}}, {"field": "age", "op": ">=", "value": {"integerValue": "18"}} ] } ``` ### OR Filter ```json { "or": [ {"field": "role", "op": "==", "value": {"stringValue": "admin"}}, {"field": "role", "op": "==", "value": {"stringValue": "moderator"}} ] } ``` ### Nested Filters ```json { "or": [ {"field": "type", "op": "==", "value": {"stringValue": "premium"}}, { "and": [ {"field": "type", "op": "==", "value": {"stringValue": "standard"}}, {"field": "credits", "op": ">", "value": {"integerValue": "1000"}} ] } ] } ``` ## Firestore Native Value Types The tool supports all Firestore native JSON value types: | Type | Format | Example | |-----------|------------------------------------------------------|----------------------------------------------------------------| | String | `{"stringValue": "text"}` | `{"stringValue": "{{.name}}"}` | | Integer | `{"integerValue": "123"}` or `{"integerValue": 123}` | `{"integerValue": "{{.age}}"}` or `{"integerValue": {{.age}}}` | | Double | `{"doubleValue": 45.67}` | `{"doubleValue": {{.price}}}` | | Boolean | `{"booleanValue": true}` | `{"booleanValue": {{.active}}}` | | Null | `{"nullValue": null}` | `{"nullValue": null}` | | Timestamp | `{"timestampValue": "RFC3339"}` | `{"timestampValue": "{{.date}}"}` | | GeoPoint | `{"geoPointValue": {"latitude": 0, "longitude": 0}}` | See below | | Array | `{"arrayValue": {"values": [...]}}` | See below | | Map | `{"mapValue": {"fields": {...}}}` | See below | ### Complex Type Examples **GeoPoint:** ```json { "field": "location", "op": "==", "value": { "geoPointValue": { "latitude": 37.7749, "longitude": -122.4194 } } } ``` **Array:** ```json { "field": "tags", "op": "array-contains", "value": {"stringValue": "{{.tag}}"} } ``` ## Supported Operators - `<` - Less than - `<=` - Less than or equal - `>` - Greater than - `>=` - Greater than or equal - `==` - Equal - `!=` - Not equal - `array-contains` - Array contains value - `array-contains-any` - Array contains any of the values - `in` - Value is in array - `not-in` - Value is not in array ## Examples ### Example 1: Query with Dynamic Collection Path ```yaml tools: user_documents: kind: firestore-query source: my-firestore description: Query user-specific documents collectionPath: "users/{{.userId}}/documents" filters: | { "field": "type", "op": "==", "value": {"stringValue": "{{.docType}}"} } parameters: - name: userId type: string description: User ID required: true - name: docType type: string description: Document type to filter required: true ``` ### Example 2: Complex Geographic Query ```yaml tools: location_search: kind: firestore-query source: my-firestore description: Search locations by area and population collectionPath: "cities" filters: | { "and": [ {"field": "country", "op": "==", "value": {"stringValue": "{{.country}}"}}, {"field": "population", "op": ">", "value": {"integerValue": "{{.minPopulation}}"}}, {"field": "area", "op": "<", "value": {"doubleValue": {{.maxArea}}}} ] } orderBy: field: "population" direction: "DESCENDING" limit: 50 parameters: - name: country type: string description: Country code required: true - name: minPopulation type: string description: Minimum population (as string for large numbers) required: true - name: maxArea type: float description: Maximum area in square kilometers required: true ``` ### Example 3: Time-based Query with Analysis ```yaml tools: activity_log: kind: firestore-query source: my-firestore description: Query activity logs within time range collectionPath: "logs" filters: | { "and": [ {"field": "timestamp", "op": ">=", "value": {"timestampValue": "{{.startTime}}"}}, {"field": "timestamp", "op": "<=", "value": {"timestampValue": "{{.endTime}}"}}, {"field": "severity", "op": "in", "value": {"arrayValue": {"values": [ {"stringValue": "ERROR"}, {"stringValue": "CRITICAL"} ]}}} ] } select: - timestamp - message - severity - userId orderBy: field: "timestamp" direction: "DESCENDING" analyzeQuery: true parameters: - name: startTime type: string description: Start time in RFC3339 format required: true - name: endTime type: string description: End time in RFC3339 format required: true ``` ## Usage ### Invoking the Tool ```bash # Using curl curl -X POST http://localhost:5000/api/tool/your-tool-name/invoke \ -H "Content-Type: application/json" \ -d '{ "continent": "Europe", "minPopulation": "1000000", "maxArea": 500000.5, "isActive": true }' ``` ### Response Format **Without analyzeQuery:** ```json [ { "id": "doc1", "path": "countries/doc1", "data": { "name": "France", "continent": "Europe", "population": 67000000, "area": 551695 }, "createTime": "2024-01-01T00:00:00Z", "updateTime": "2024-01-15T10:30:00Z" } ] ``` **With analyzeQuery:** ```json { "documents": [...], "explainMetrics": { "planSummary": { "indexesUsed": [...] }, "executionStats": { "resultsReturned": 10, "executionDuration": "15ms", "readOperations": 10 } } } ``` ## Best Practices 1. **Use Typed Values**: Always use Firestore's native JSON value types for proper type handling 2. **String Numbers for Large Integers**: Use string representation for large integers to avoid precision loss 3. **Template Security**: Validate all template parameters to prevent injection attacks 4. **Index Optimization**: Use `analyzeQuery` to identify missing indexes 5. **Limit Results**: Always set a reasonable `limit` to prevent excessive data retrieval 6. **Field Selection**: Use `select` to retrieve only necessary fields ## Technical Notes - Queries operate on a single collection (the standard Firestore pattern) - Maximum of 100 filters per query (configurable) - Template parameters must be properly escaped in JSON contexts - Complex nested queries may require composite indexes ## See Also - [firestore-query-collection](firestore-query-collection.md) - Non-parameterizable query tool - [Firestore Source Configuration](../../sources/firestore.md) - [Firestore Query Documentation](https://firebase.google.com/docs/firestore/query-data/queries) ``` -------------------------------------------------------------------------------- /tests/alloydbainl/alloydb_ai_nl_integration_test.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package alloydbainl import ( "bytes" "context" "encoding/json" "io" "net/http" "os" "reflect" "regexp" "strings" "testing" "time" "github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc" "github.com/googleapis/genai-toolbox/internal/testutils" "github.com/googleapis/genai-toolbox/tests" ) var ( AlloyDBAINLSourceKind = "alloydb-postgres" AlloyDBAINLToolKind = "alloydb-ai-nl" AlloyDBAINLProject = os.Getenv("ALLOYDB_AI_NL_PROJECT") AlloyDBAINLRegion = os.Getenv("ALLOYDB_AI_NL_REGION") AlloyDBAINLCluster = os.Getenv("ALLOYDB_AI_NL_CLUSTER") AlloyDBAINLInstance = os.Getenv("ALLOYDB_AI_NL_INSTANCE") AlloyDBAINLDatabase = os.Getenv("ALLOYDB_AI_NL_DATABASE") AlloyDBAINLUser = os.Getenv("ALLOYDB_AI_NL_USER") AlloyDBAINLPass = os.Getenv("ALLOYDB_AI_NL_PASS") ) func getAlloyDBAINLVars(t *testing.T) map[string]any { switch "" { case AlloyDBAINLProject: t.Fatal("'ALLOYDB_AI_NL_PROJECT' not set") case AlloyDBAINLRegion: t.Fatal("'ALLOYDB_AI_NL_REGION' not set") case AlloyDBAINLCluster: t.Fatal("'ALLOYDB_AI_NL_CLUSTER' not set") case AlloyDBAINLInstance: t.Fatal("'ALLOYDB_AI_NL_INSTANCE' not set") case AlloyDBAINLDatabase: t.Fatal("'ALLOYDB_AI_NL_DATABASE' not set") case AlloyDBAINLUser: t.Fatal("'ALLOYDB_AI_NL_USER' not set") case AlloyDBAINLPass: t.Fatal("'ALLOYDB_AI_NL_PASS' not set") } return map[string]any{ "kind": AlloyDBAINLSourceKind, "project": AlloyDBAINLProject, "cluster": AlloyDBAINLCluster, "instance": AlloyDBAINLInstance, "region": AlloyDBAINLRegion, "database": AlloyDBAINLDatabase, "user": AlloyDBAINLUser, "password": AlloyDBAINLPass, } } func TestAlloyDBAINLToolEndpoints(t *testing.T) { sourceConfig := getAlloyDBAINLVars(t) ctx, cancel := context.WithTimeout(context.Background(), time.Minute) defer cancel() var args []string // Write config into a file and pass it to command toolsFile := getAINLToolsConfig(sourceConfig) cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...) if err != nil { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) if err != nil { t.Logf("toolbox command logs: \n%s", out) t.Fatalf("toolbox didn't start successfully: %s", err) } runAINLToolGetTest(t) runAINLToolInvokeTest(t) runAINLMCPToolCallMethod(t) } func runAINLToolGetTest(t *testing.T) { // Test tool get endpoint tcs := []struct { name string api string want map[string]any }{ { name: "get my-simple-tool", api: "http://127.0.0.1:5000/api/tool/my-simple-tool/", want: map[string]any{ "my-simple-tool": map[string]any{ "description": "Simple tool to test end to end functionality.", "parameters": []any{ map[string]any{ "name": "question", "type": "string", "required": true, "description": "The natural language question to ask.", "authSources": []any{}, }, }, "authRequired": []any{}, }, }, }, } for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { resp, err := http.Get(tc.api) if err != nil { t.Fatalf("error when sending a request: %s", err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Fatalf("response status code is not 200") } var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["tools"] if !ok { t.Fatalf("unable to find tools in response body") } if !reflect.DeepEqual(got, tc.want) { t.Fatalf("got %q, want %q", got, tc.want) } }) } } func runAINLToolInvokeTest(t *testing.T) { // Get ID token idToken, err := tests.GetGoogleIdToken(tests.ClientId) if err != nil { t.Fatalf("error getting Google ID token: %s", err) } // Test tool invoke endpoint invokeTcs := []struct { name string api string requestHeader map[string]string requestBody io.Reader want string isErr bool }{ { name: "invoke my-simple-tool", api: "http://127.0.0.1:5000/api/tool/my-simple-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), want: "[{\"execute_nl_query\":{\"?column?\":1}}]", isErr: false, }, { name: "Invoke my-tool without parameters", api: "http://127.0.0.1:5000/api/tool/my-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{}`)), isErr: true, }, { name: "Invoke my-auth-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(`{"question": "can you show me the name of this user?"}`)), want: "[{\"execute_nl_query\":{\"name\":\"Alice\"}}]", isErr: false, }, { name: "Invoke my-auth-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), isErr: true, }, { name: "Invoke my-auth-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), isErr: true, }, { name: "Invoke my-auth-required-tool with auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-required-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": idToken}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), isErr: false, want: "[{\"execute_nl_query\":{\"?column?\":1}}]", }, { name: "Invoke my-auth-required-tool with invalid auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-required-tool/invoke", requestHeader: map[string]string{"my-google-auth_token": "INVALID_TOKEN"}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), isErr: true, }, { name: "Invoke my-auth-required-tool without auth token", api: "http://127.0.0.1:5000/api/tool/my-auth-tool/invoke", requestHeader: map[string]string{}, requestBody: bytes.NewBuffer([]byte(`{"question": "return 1"}`)), isErr: true, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range tc.requestHeader { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { if tc.isErr == true { return } bodyBytes, _ := io.ReadAll(resp.Body) t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) } // Check response body var body map[string]interface{} err = json.NewDecoder(resp.Body).Decode(&body) if err != nil { t.Fatalf("error parsing response body") } got, ok := body["result"].(string) if !ok { t.Fatalf("unable to find result in response body") } if got != tc.want { t.Fatalf("unexpected value: got %q, want %q", got, tc.want) } }) } } func getAINLToolsConfig(sourceConfig map[string]any) map[string]any { // Write config into a file and pass it to command toolsFile := map[string]any{ "sources": map[string]any{ "my-instance": sourceConfig, }, "authServices": map[string]any{ "my-google-auth": map[string]any{ "kind": "google", "clientId": tests.ClientId, }, }, "tools": map[string]any{ "my-simple-tool": map[string]any{ "kind": AlloyDBAINLToolKind, "source": "my-instance", "description": "Simple tool to test end to end functionality.", "nlConfig": "my_nl_config", }, "my-auth-tool": map[string]any{ "kind": AlloyDBAINLToolKind, "source": "my-instance", "description": "Tool to test authenticated parameters.", "nlConfig": "my_nl_config", "nlConfigParameters": []map[string]any{ { "name": "email", "type": "string", "description": "user email", "authServices": []map[string]string{ { "name": "my-google-auth", "field": "email", }, }, }, }, }, "my-auth-required-tool": map[string]any{ "kind": AlloyDBAINLToolKind, "source": "my-instance", "description": "Tool to test auth required invocation.", "nlConfig": "my_nl_config", "authRequired": []string{ "my-google-auth", }, }, }, } return toolsFile } func runAINLMCPToolCallMethod(t *testing.T) { sessionId := tests.RunInitialize(t, "2024-11-05") header := map[string]string{} if sessionId != "" { header["Mcp-Session-Id"] = sessionId } // Test tool invoke endpoint invokeTcs := []struct { name string api string requestBody jsonrpc.JSONRPCRequest requestHeader map[string]string want string }{ { name: "MCP Invoke my-simple-tool", api: "http://127.0.0.1:5000/mcp", requestHeader: map[string]string{}, requestBody: jsonrpc.JSONRPCRequest{ Jsonrpc: "2.0", Id: "my-simple-tool", Request: jsonrpc.Request{ Method: "tools/call", }, Params: map[string]any{ "name": "my-simple-tool", "arguments": map[string]any{ "question": "return 1", }, }, }, want: `{"jsonrpc":"2.0","id":"my-simple-tool","result":{"content":[{"type":"text","text":"{\"execute_nl_query\":{\"?column?\":1}}"}]}}`, }, { name: "MCP Invoke invalid tool", api: "http://127.0.0.1:5000/mcp", requestHeader: map[string]string{}, requestBody: jsonrpc.JSONRPCRequest{ Jsonrpc: "2.0", Id: "invalid-tool", Request: jsonrpc.Request{ Method: "tools/call", }, Params: map[string]any{ "name": "foo", "arguments": map[string]any{}, }, }, want: `{"jsonrpc":"2.0","id":"invalid-tool","error":{"code":-32602,"message":"invalid tool name: tool with name \"foo\" does not exist"}}`, }, { name: "MCP Invoke my-auth-tool without parameters", api: "http://127.0.0.1:5000/mcp", requestHeader: map[string]string{}, requestBody: jsonrpc.JSONRPCRequest{ Jsonrpc: "2.0", Id: "invoke-without-parameter", Request: jsonrpc.Request{ Method: "tools/call", }, Params: map[string]any{ "name": "my-auth-tool", "arguments": map[string]any{}, }, }, want: `{"jsonrpc":"2.0","id":"invoke-without-parameter","error":{"code":-32602,"message":"provided parameters were invalid: parameter question is required"}}`, }, } for _, tc := range invokeTcs { t.Run(tc.name, func(t *testing.T) { reqMarshal, err := json.Marshal(tc.requestBody) if err != nil { t.Fatalf("unexpected error during marshaling of request body") } // Send Tool invocation request req, err := http.NewRequest(http.MethodPost, tc.api, bytes.NewBuffer(reqMarshal)) if err != nil { t.Fatalf("unable to create request: %s", err) } req.Header.Add("Content-type", "application/json") for k, v := range header { req.Header.Add(k, v) } resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatalf("unable to send request: %s", err) } respBody, err := io.ReadAll(resp.Body) if err != nil { t.Fatalf("unable to read request body: %s", err) } defer resp.Body.Close() got := string(bytes.TrimSpace(respBody)) // Remove `\` and `"` for string comparison got = strings.ReplaceAll(got, "\\", "") want := strings.ReplaceAll(tc.want, "\\", "") got = strings.ReplaceAll(got, "\"", "") want = strings.ReplaceAll(want, "\"", "") if !strings.Contains(got, want) { t.Fatalf("Expected substring not found:\ngot: %q\nwant: %q (to be contained within got)", got, want) } }) } } ``` -------------------------------------------------------------------------------- /internal/tools/bigquery/bigqueryexecutesql/bigqueryexecutesql.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package bigqueryexecutesql import ( "context" "encoding/json" "fmt" "strings" bigqueryapi "cloud.google.com/go/bigquery" yaml "github.com/goccy/go-yaml" "github.com/googleapis/genai-toolbox/internal/sources" bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery" "github.com/googleapis/genai-toolbox/internal/tools" bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon" "github.com/googleapis/genai-toolbox/internal/util" bigqueryrestapi "google.golang.org/api/bigquery/v2" "google.golang.org/api/iterator" ) const kind string = "bigquery-execute-sql" func init() { if !tools.Register(kind, newConfig) { panic(fmt.Sprintf("tool kind %q already registered", kind)) } } func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { actual := Config{Name: name} if err := decoder.DecodeContext(ctx, &actual); err != nil { return nil, err } return actual, nil } type compatibleSource interface { BigQueryClient() *bigqueryapi.Client BigQuerySession() bigqueryds.BigQuerySessionProvider BigQueryWriteMode() string BigQueryRestService() *bigqueryrestapi.Service BigQueryClientCreator() bigqueryds.BigqueryClientCreator UseClientAuthorization() bool IsDatasetAllowed(projectID, datasetID string) bool BigQueryAllowedDatasets() []string } // validate compatible sources are still compatible var _ compatibleSource = &bigqueryds.Source{} var compatibleSources = [...]string{bigqueryds.SourceKind} type Config struct { Name string `yaml:"name" validate:"required"` Kind string `yaml:"kind" validate:"required"` Source string `yaml:"source" validate:"required"` Description string `yaml:"description" validate:"required"` AuthRequired []string `yaml:"authRequired"` } // validate interface var _ tools.ToolConfig = Config{} func (cfg Config) ToolConfigKind() string { return kind } func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { // verify source exists rawS, ok := srcs[cfg.Source] if !ok { return nil, fmt.Errorf("no source named %q configured", cfg.Source) } // verify the source is compatible s, ok := rawS.(compatibleSource) if !ok { return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources) } var sqlDescriptionBuilder strings.Builder switch s.BigQueryWriteMode() { case bigqueryds.WriteModeBlocked: sqlDescriptionBuilder.WriteString("The SQL to execute. In 'blocked' mode, only SELECT statements are allowed; other statement types will fail.") case bigqueryds.WriteModeProtected: sqlDescriptionBuilder.WriteString("The SQL to execute. Only SELECT statements and writes to the session's temporary dataset are allowed (e.g., `CREATE TEMP TABLE ...`).") default: // WriteModeAllowed sqlDescriptionBuilder.WriteString("The SQL to execute.") } allowedDatasets := s.BigQueryAllowedDatasets() if len(allowedDatasets) > 0 { if len(allowedDatasets) == 1 { datasetFQN := allowedDatasets[0] parts := strings.Split(datasetFQN, ".") if len(parts) < 2 { return nil, fmt.Errorf("expected allowedDataset to have at least 2 parts (project.dataset): %s", datasetFQN) } datasetID := parts[1] sqlDescriptionBuilder.WriteString(fmt.Sprintf(" The query must only access the `%s` dataset. "+ "To query a table within this dataset (e.g., `my_table`), "+ "qualify it with the dataset id (e.g., `%s.my_table`).", datasetFQN, datasetID)) } else { datasetIDs := []string{} for _, ds := range allowedDatasets { datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds)) } sqlDescriptionBuilder.WriteString(fmt.Sprintf(" The query must only access datasets from the following list: %s.", strings.Join(datasetIDs, ", "))) } } sqlParameter := tools.NewStringParameter("sql", sqlDescriptionBuilder.String()) dryRunParameter := tools.NewBooleanParameterWithDefault( "dry_run", false, "If set to true, the query will be validated and information about the execution will be returned "+ "without running the query. Defaults to false.", ) parameters := tools.Parameters{sqlParameter, dryRunParameter} mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters) // finish tool setup t := Tool{ Name: cfg.Name, Kind: kind, Parameters: parameters, AuthRequired: cfg.AuthRequired, UseClientOAuth: s.UseClientAuthorization(), ClientCreator: s.BigQueryClientCreator(), Client: s.BigQueryClient(), RestService: s.BigQueryRestService(), WriteMode: s.BigQueryWriteMode(), SessionProvider: s.BigQuerySession(), IsDatasetAllowed: s.IsDatasetAllowed, AllowedDatasets: allowedDatasets, manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired}, mcpManifest: mcpManifest, } return t, nil } // validate interface var _ tools.Tool = Tool{} type Tool struct { Name string `yaml:"name"` Kind string `yaml:"kind"` AuthRequired []string `yaml:"authRequired"` UseClientOAuth bool `yaml:"useClientOAuth"` Parameters tools.Parameters `yaml:"parameters"` Client *bigqueryapi.Client RestService *bigqueryrestapi.Service WriteMode string SessionProvider bigqueryds.BigQuerySessionProvider ClientCreator bigqueryds.BigqueryClientCreator IsDatasetAllowed func(projectID, datasetID string) bool AllowedDatasets []string manifest tools.Manifest mcpManifest tools.McpManifest } func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) { paramsMap := params.AsMap() sql, ok := paramsMap["sql"].(string) if !ok { return nil, fmt.Errorf("unable to cast sql parameter %s", paramsMap["sql"]) } dryRun, ok := paramsMap["dry_run"].(bool) if !ok { return nil, fmt.Errorf("unable to cast dry_run parameter %s", paramsMap["dry_run"]) } bqClient := t.Client restService := t.RestService var err error // Initialize new client if using user OAuth token if t.UseClientOAuth { tokenStr, err := accessToken.ParseBearerToken() if err != nil { return nil, fmt.Errorf("error parsing access token: %w", err) } bqClient, restService, err = t.ClientCreator(tokenStr, true) if err != nil { return nil, fmt.Errorf("error creating client from OAuth access token: %w", err) } } var connProps []*bigqueryapi.ConnectionProperty var session *bigqueryds.Session if t.WriteMode == bigqueryds.WriteModeProtected { session, err = t.SessionProvider(ctx) if err != nil { return nil, fmt.Errorf("failed to get BigQuery session for protected mode: %w", err) } connProps = []*bigqueryapi.ConnectionProperty{ {Key: "session_id", Value: session.ID}, } } dryRunJob, err := bqutil.DryRunQuery(ctx, restService, bqClient.Project(), bqClient.Location, sql, nil, connProps) if err != nil { return nil, fmt.Errorf("query validation failed: %w", err) } statementType := dryRunJob.Statistics.Query.StatementType switch t.WriteMode { case bigqueryds.WriteModeBlocked: if statementType != "SELECT" { return nil, fmt.Errorf("write mode is 'blocked', only SELECT statements are allowed") } case bigqueryds.WriteModeProtected: if dryRunJob.Configuration != nil && dryRunJob.Configuration.Query != nil { if dest := dryRunJob.Configuration.Query.DestinationTable; dest != nil && dest.DatasetId != session.DatasetID { return nil, fmt.Errorf("protected write mode only supports SELECT statements, or write operations in the anonymous "+ "dataset of a BigQuery session, but destination was %q", dest.DatasetId) } } } if len(t.AllowedDatasets) > 0 { switch statementType { case "CREATE_SCHEMA", "DROP_SCHEMA", "ALTER_SCHEMA": return nil, fmt.Errorf("dataset-level operations like '%s' are not allowed when dataset restrictions are in place", statementType) case "CREATE_FUNCTION", "CREATE_TABLE_FUNCTION", "CREATE_PROCEDURE": return nil, fmt.Errorf("creating stored routines ('%s') is not allowed when dataset restrictions are in place, as their contents cannot be safely analyzed", statementType) case "CALL": return nil, fmt.Errorf("calling stored procedures ('%s') is not allowed when dataset restrictions are in place, as their contents cannot be safely analyzed", statementType) } // Use a map to avoid duplicate table names. tableIDSet := make(map[string]struct{}) // Get all tables from the dry run result. This is the most reliable method. queryStats := dryRunJob.Statistics.Query if queryStats != nil { for _, tableRef := range queryStats.ReferencedTables { tableIDSet[fmt.Sprintf("%s.%s.%s", tableRef.ProjectId, tableRef.DatasetId, tableRef.TableId)] = struct{}{} } if tableRef := queryStats.DdlTargetTable; tableRef != nil { tableIDSet[fmt.Sprintf("%s.%s.%s", tableRef.ProjectId, tableRef.DatasetId, tableRef.TableId)] = struct{}{} } if tableRef := queryStats.DdlDestinationTable; tableRef != nil { tableIDSet[fmt.Sprintf("%s.%s.%s", tableRef.ProjectId, tableRef.DatasetId, tableRef.TableId)] = struct{}{} } } var tableNames []string if len(tableIDSet) > 0 { for tableID := range tableIDSet { tableNames = append(tableNames, tableID) } } else if statementType != "SELECT" { // If dry run yields no tables, fall back to the parser for non-SELECT statements // to catch unsafe operations like EXECUTE IMMEDIATE. parsedTables, parseErr := bqutil.TableParser(sql, t.Client.Project()) if parseErr != nil { // If parsing fails (e.g., EXECUTE IMMEDIATE), we cannot guarantee safety, so we must fail. return nil, fmt.Errorf("could not parse tables from query to validate against allowed datasets: %w", parseErr) } tableNames = parsedTables } for _, tableID := range tableNames { parts := strings.Split(tableID, ".") if len(parts) == 3 { projectID, datasetID := parts[0], parts[1] if !t.IsDatasetAllowed(projectID, datasetID) { return nil, fmt.Errorf("query accesses dataset '%s.%s', which is not in the allowed list", projectID, datasetID) } } } } if dryRun { if dryRunJob != nil { jobJSON, err := json.MarshalIndent(dryRunJob, "", " ") if err != nil { return nil, fmt.Errorf("failed to marshal dry run job to JSON: %w", err) } return string(jobJSON), nil } // This case should not be reached, but as a fallback, we return a message. return "Dry run was requested, but no job information was returned.", nil } query := bqClient.Query(sql) query.Location = bqClient.Location query.ConnectionProperties = connProps // Log the query executed for debugging. logger, err := util.LoggerFromContext(ctx) if err != nil { return nil, fmt.Errorf("error getting logger: %s", err) } logger.DebugContext(ctx, "executing `%s` tool query: %s", kind, sql) // This block handles SELECT statements, which return a row set. // We iterate through the results, convert each row into a map of // column names to values, and return the collection of rows. var out []any job, err := query.Run(ctx) if err != nil { return nil, fmt.Errorf("unable to execute query: %w", err) } it, err := job.Read(ctx) if err != nil { return nil, fmt.Errorf("unable to read query results: %w", err) } for { var row map[string]bigqueryapi.Value err = it.Next(&row) if err == iterator.Done { break } if err != nil { return nil, fmt.Errorf("unable to iterate through query results: %w", err) } vMap := make(map[string]any) for key, value := range row { vMap[key] = value } out = append(out, vMap) } // If the query returned any rows, return them directly. if len(out) > 0 { return out, nil } // This handles the standard case for a SELECT query that successfully // executes but returns zero rows. if statementType == "SELECT" { return "The query returned 0 rows.", nil } // This is the fallback for a successful query that doesn't return content. // In most cases, this will be for DML/DDL statements like INSERT, UPDATE, CREATE, etc. // However, it is also possible that this was a query that was expected to return rows // but returned none, a case that we cannot distinguish here. return "Query executed successfully and returned no content.", nil } func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) { return tools.ParseParams(t.Parameters, data, claims) } func (t Tool) Manifest() tools.Manifest { return t.manifest } func (t Tool) McpManifest() tools.McpManifest { return t.mcpManifest } func (t Tool) Authorized(verifiedAuthServices []string) bool { return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) } func (t Tool) RequiresClientAuthorization() bool { return t.UseClientOAuth } ``` -------------------------------------------------------------------------------- /docs/en/resources/tools/_index.md: -------------------------------------------------------------------------------- ```markdown --- title: "Tools" type: docs weight: 2 description: > Tools define actions an agent can take -- such as reading and writing to a source. --- A tool represents an action your agent can take, such as running a SQL statement. You can define Tools as a map in the `tools` section of your `tools.yaml` file. Typically, a tool will require a source to act on: ```yaml tools: search_flights_by_number: kind: postgres-sql source: my-pg-instance statement: | SELECT * FROM flights WHERE airline = $1 AND flight_number = $2 LIMIT 10 description: | Use this tool to get information for a specific flight. Takes an airline code and flight number and returns info on the flight. Do NOT use this tool with a flight id. Do NOT guess an airline code or flight number. An airline code is a code for an airline service consisting of a two-character airline designator and followed by a flight number, which is a 1 to 4 digit number. For example, if given CY 0123, the airline is "CY", and flight_number is "123". Another example for this is DL 1234, the airline is "DL", and flight_number is "1234". If the tool returns more than one option choose the date closest to today. Example: {{ "airline": "CY", "flight_number": "888", }} Example: {{ "airline": "DL", "flight_number": "1234", }} parameters: - name: airline type: string description: Airline unique 2 letter identifier - name: flight_number type: string description: 1 to 4 digit number ``` ## Specifying Parameters Parameters for each Tool will define what inputs the agent will need to provide to invoke them. Parameters should be pass as a list of Parameter objects: ```yaml parameters: - name: airline type: string description: Airline unique 2 letter identifier - name: flight_number type: string description: 1 to 4 digit number ``` ### Basic Parameters Basic parameters types include `string`, `integer`, `float`, `boolean` types. In most cases, the description will be provided to the LLM as context on specifying the parameter. ```yaml parameters: - name: airline type: string description: Airline unique 2 letter identifier ``` | **field** | **type** | **required** | **description** | |---------------|:--------------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | name | string | true | Name of the parameter. | | type | string | true | Must be one of "string", "integer", "float", "boolean" "array" | | description | string | true | Natural language description of the parameter to describe it to the agent. | | default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. | | required | bool | false | Indicate if the parameter is required. Default to `true`. | | allowedValues | []string | false | Input value will be checked against this field. Regex is also supported. | | escape | string | false | Only available for type `string`. Indicate the escaping delimiters used for the parameter. This field is intended to be used with templateParameters. Must be one of "single-quotes", "double-quotes", "backticks", "square-brackets". | | minValue | int or float | false | Only available for type `integer` and `float`. Indicate the minimum value allowed. | | maxValue | int or float | false | Only available for type `integer` and `float`. Indicate the maximum value allowed. | ### Array Parameters The `array` type is a list of items passed in as a single parameter. To use the `array` type, you must also specify what kind of items are in the list using the items field: ```yaml parameters: - name: preferred_airlines type: array description: A list of airline, ordered by preference. items: name: name type: string description: Name of the airline. statement: | SELECT * FROM airlines WHERE preferred_airlines = ANY($1); ``` | **field** | **type** | **required** | **description** | |---------------|:----------------:|:------------:|----------------------------------------------------------------------------| | name | string | true | Name of the parameter. | | type | string | true | Must be "array" | | description | string | true | Natural language description of the parameter to describe it to the agent. | | default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. | | required | bool | false | Indicate if the parameter is required. Default to `true`. | | allowedValues | []string | false | Input value will be checked against this field. Regex is also supported. | | items | parameter object | true | Specify a Parameter object for the type of the values in the array. | {{< notice note >}} Items in array should not have a `default` or `required` value. If provided, it will be ignored. {{< /notice >}} ### Map Parameters The map type is a collection of key-value pairs. It can be configured in two ways: - Generic Map: By default, it accepts values of any primitive type (string, integer, float, boolean), allowing for mixed data. - Typed Map: By setting the valueType field, you can enforce that all values within the map must be of the same specified type. #### Generic Map (Mixed Value Types) This is the default behavior when valueType is omitted. It's useful for passing a flexible group of settings. ```yaml parameters: - name: execution_context type: map description: A flexible set of key-value pairs for the execution environment. ``` #### Typed Map Specify valueType to ensure all values in the map are of the same type. An error will be thrown in case of value type mismatch. ```yaml parameters: - name: user_scores type: map description: A map of user IDs to their scores. All scores must be integers. valueType: integer # This enforces the value type for all entries. ``` ### Authenticated Parameters Authenticated parameters are automatically populated with user information decoded from [ID tokens](../authServices/#specifying-id-tokens-from-clients) that are passed in request headers. They do not take input values in request bodies like other parameters. To use authenticated parameters, you must configure the tool to map the required [authServices](../authServices/) to specific claims within the user's ID token. ```yaml tools: search_flights_by_user_id: kind: postgres-sql source: my-pg-instance statement: | SELECT * FROM flights WHERE user_id = $1 parameters: - name: user_id type: string description: Auto-populated from Google login authServices: # Refer to one of the `authServices` defined - name: my-google-auth # `sub` is the OIDC claim field for user ID field: sub ``` | **field** | **type** | **required** | **description** | |-----------|:--------:|:------------:|----------------------------------------------------------------------------------| | name | string | true | Name of the [authServices](../authServices/) used to verify the OIDC auth token. | | field | string | true | Claim field decoded from the OIDC token used to auto-populate this parameter. | ### Template Parameters Template parameters types include `string`, `integer`, `float`, `boolean` types. In most cases, the description will be provided to the LLM as context on specifying the parameter. Template parameters will be inserted into the SQL statement before executing the prepared statement. They will be inserted without quotes, so to insert a string using template parameters, quotes must be explicitly added within the string. Template parameter arrays can also be used similarly to basic parameters, and array items must be strings. Once inserted into the SQL statement, the outer layer of quotes will be removed. Therefore to insert strings into the SQL statement, a set of quotes must be explicitly added within the string. {{< notice warning >}} Because template parameters can directly replace identifiers, column names, and table names, they are prone to SQL injections. Basic parameters are preferred for performance and safety reasons. {{< /notice >}} {{< notice tip >}} To minimize SQL injection risk when using template parameters, always provide the `allowedValues` field within the parameter to restrict inputs. Alternatively, for `string` type parameters, you can use the `escape` field to add delimiters to the identifier. For `integer` or `float` type parameters, you can use `minValue` and `maxValue` to define the allowable range. {{< /notice >}} ```yaml tools: select_columns_from_table: kind: postgres-sql source: my-pg-instance statement: | SELECT {{array .columnNames}} FROM {{.tableName}} description: | Use this tool to list all information from a specific table. Example: {{ "tableName": "flights", "columnNames": ["id", "name"] }} templateParameters: - name: tableName type: string description: Table to select from - name: columnNames type: array description: The columns to select items: name: column type: string description: Name of a column to select escape: double-quotes # with this, the statement will resolve to `SELECT "id", "name" FROM flights` ``` | **field** | **type** | **required** | **description** | |---------------|:----------------:|:---------------:|-------------------------------------------------------------------------------------| | name | string | true | Name of the template parameter. | | type | string | true | Must be one of "string", "integer", "float", "boolean", "array" | | description | string | true | Natural language description of the template parameter to describe it to the agent. | | default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. | | required | bool | false | Indicate if the parameter is required. Default to `true`. | | allowedValues | []string | false | Input value will be checked against this field. Regex is also supported. | | items | parameter object | true (if array) | Specify a Parameter object for the type of the values in the array (string only). | ## Authorized Invocations You can require an authorization check for any Tool invocation request by specifying an `authRequired` field. Specify a list of [authServices](../authServices/) defined in the previous section. ```yaml tools: search_all_flight: kind: postgres-sql source: my-pg-instance statement: | SELECT * FROM flights # A list of `authServices` defined previously authRequired: - my-google-auth - other-auth-service ``` ## Kinds of tools ``` -------------------------------------------------------------------------------- /internal/tools/looker/lookerhealthvacuum/lookerhealthvacuum.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package lookerhealthvacuum import ( "context" "encoding/json" "fmt" "regexp" "strings" yaml "github.com/goccy/go-yaml" "github.com/googleapis/genai-toolbox/internal/sources" lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker" "github.com/googleapis/genai-toolbox/internal/tools" "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon" "github.com/googleapis/genai-toolbox/internal/util" "github.com/looker-open-source/sdk-codegen/go/rtl" v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4" ) // ================================================================================================================= // START MCP SERVER CORE LOGIC // ================================================================================================================= const kind string = "looker-health-vacuum" func init() { if !tools.Register(kind, newConfig) { panic(fmt.Sprintf("tool kind %q already registered", kind)) } } func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { actual := Config{Name: name} if err := decoder.DecodeContext(ctx, &actual); err != nil { return nil, err } return actual, nil } type Config struct { Name string `yaml:"name" validate:"required"` Kind string `yaml:"kind" validate:"required"` Source string `yaml:"source" validate:"required"` Description string `yaml:"description" validate:"required"` AuthRequired []string `yaml:"authRequired"` Parameters map[string]any `yaml:"parameters"` } var _ tools.ToolConfig = Config{} func (cfg Config) ToolConfigKind() string { return kind } func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { rawS, ok := srcs[cfg.Source] if !ok { return nil, fmt.Errorf("no source named %q configured", cfg.Source) } s, ok := rawS.(*lookersrc.Source) if !ok { return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind) } actionParameter := tools.NewStringParameterWithRequired("action", "The vacuum action to run. Can be 'models', or 'explores'.", true) projectParameter := tools.NewStringParameterWithDefault("project", "", "The Looker project to vacuum (optional).") modelParameter := tools.NewStringParameterWithDefault("model", "", "The Looker model to vacuum (optional).") exploreParameter := tools.NewStringParameterWithDefault("explore", "", "The Looker explore to vacuum (optional).") timeframeParameter := tools.NewIntParameterWithDefault("timeframe", 90, "The timeframe in days to analyze.") minQueriesParameter := tools.NewIntParameterWithDefault("min_queries", 1, "The minimum number of queries for a model or explore to be considered used.") parameters := tools.Parameters{ actionParameter, projectParameter, modelParameter, exploreParameter, timeframeParameter, minQueriesParameter, } mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters) return Tool{ Name: cfg.Name, Kind: kind, Parameters: parameters, AuthRequired: cfg.AuthRequired, UseClientOAuth: s.UseClientOAuth, Client: s.Client, ApiSettings: s.ApiSettings, manifest: tools.Manifest{ Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired, }, mcpManifest: mcpManifest, }, nil } var _ tools.Tool = Tool{} type Tool struct { Name string `yaml:"name"` Kind string `yaml:"kind"` UseClientOAuth bool Client *v4.LookerSDK ApiSettings *rtl.ApiSettings AuthRequired []string `yaml:"authRequired"` Parameters tools.Parameters manifest tools.Manifest mcpManifest tools.McpManifest } func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) { sdk, err := lookercommon.GetLookerSDK(t.UseClientOAuth, t.ApiSettings, t.Client, accessToken) if err != nil { return nil, fmt.Errorf("error getting sdk: %w", err) } paramsMap := params.AsMap() timeframe, _ := paramsMap["timeframe"].(int) if timeframe == 0 { timeframe = 90 } minQueries, _ := paramsMap["min_queries"].(int) if minQueries == 0 { minQueries = 1 } vacuumTool := &vacuumTool{ SdkClient: sdk, timeframe: timeframe, minQueries: minQueries, } action, ok := paramsMap["action"].(string) if !ok { return nil, fmt.Errorf("action parameter not found") } switch action { case "models": project, _ := paramsMap["project"].(string) model, _ := paramsMap["model"].(string) return vacuumTool.models(ctx, project, model) case "explores": model, _ := paramsMap["model"].(string) explore, _ := paramsMap["explore"].(string) return vacuumTool.explores(ctx, model, explore) default: return nil, fmt.Errorf("unknown action: %s", action) } } func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) { return tools.ParseParams(t.Parameters, data, claims) } func (t Tool) Manifest() tools.Manifest { return t.manifest } func (t Tool) McpManifest() tools.McpManifest { return t.mcpManifest } func (t Tool) Authorized(verifiedAuthServices []string) bool { return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) } func (t Tool) RequiresClientAuthorization() bool { return t.UseClientOAuth } // ================================================================================================================= // END MCP SERVER CORE LOGIC // ================================================================================================================= // ================================================================================================================= // START LOOKER HEALTH VACUUM CORE LOGIC // ================================================================================================================= type vacuumTool struct { SdkClient *v4.LookerSDK timeframe int minQueries int } func (t *vacuumTool) models(ctx context.Context, project, model string) ([]map[string]interface{}, error) { logger, err := util.LoggerFromContext(ctx) if err != nil { return nil, fmt.Errorf("unable to get logger from ctx: %s", err) } logger.InfoContext(ctx, "Vacuuming models...") usedModels, err := t.getUsedModels(ctx) if err != nil { return nil, err } lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil) if err != nil { return nil, fmt.Errorf("error fetching LookML models: %w", err) } var results []map[string]interface{} for _, m := range lookmlModels { if (project == "" || (m.ProjectName != nil && *m.ProjectName == project)) && (model == "" || (m.Name != nil && *m.Name == model)) { queryCount := 0 if qc, ok := usedModels[*m.Name]; ok { queryCount = qc } unusedExplores, err := t.getUnusedExplores(ctx, *m.Name) if err != nil { return nil, err } results = append(results, map[string]interface{}{ "Model": *m.Name, "Unused Explores": unusedExplores, "Model Query Count": queryCount, }) } } return results, nil } func (t *vacuumTool) explores(ctx context.Context, model, explore string) ([]map[string]interface{}, error) { logger, err := util.LoggerFromContext(ctx) if err != nil { return nil, fmt.Errorf("unable to get logger from ctx: %s", err) } logger.InfoContext(ctx, "Vacuuming explores...") lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil) if err != nil { return nil, fmt.Errorf("error fetching LookML models: %w", err) } var results []map[string]interface{} for _, m := range lookmlModels { if model != "" && (m.Name == nil || *m.Name != model) { continue } if m.Explores == nil { continue } for _, e := range *m.Explores { if explore != "" && (e.Name == nil || *e.Name != explore) { continue } if e.Name == nil { continue } exploreDetail, err := t.SdkClient.LookmlModelExplore(v4.RequestLookmlModelExplore{ LookmlModelName: *m.Name, ExploreName: *e.Name, }, nil) if err != nil { logger.ErrorContext(ctx, fmt.Sprintf("Error fetching detail for explore %s.%s: %v", *m.Name, *e.Name, err)) continue } usedFields, err := t.getUsedExploreFields(ctx, *m.Name, *e.Name) if err != nil { logger.ErrorContext(ctx, fmt.Sprintf("Error fetching used fields for explore %s.%s: %v", *m.Name, *e.Name, err)) continue } var allFields []string if exploreDetail.Fields != nil { for _, d := range *exploreDetail.Fields.Dimensions { if !*d.Hidden { allFields = append(allFields, *d.Name) } } for _, ms := range *exploreDetail.Fields.Measures { if !*ms.Hidden { allFields = append(allFields, *ms.Name) } } } var unusedFields []string for _, field := range allFields { if _, ok := usedFields[field]; !ok { unusedFields = append(unusedFields, field) } } joinStats := make(map[string]int) if exploreDetail.Joins != nil { for field, queryCount := range usedFields { join := strings.Split(field, ".")[0] joinStats[join] += queryCount } for _, join := range *exploreDetail.Joins { if _, ok := joinStats[*join.Name]; !ok { joinStats[*join.Name] = 0 } } } var unusedJoins []string for join, count := range joinStats { if count == 0 { unusedJoins = append(unusedJoins, join) } } results = append(results, map[string]interface{}{ "Model": *m.Name, "Explore": *e.Name, "Unused Joins": unusedJoins, "Unused Fields": unusedFields, }) } } return results, nil } func (t *vacuumTool) getUsedModels(ctx context.Context) (map[string]int, error) { limit := "5000" query := &v4.WriteQuery{ Model: "system__activity", View: "history", Fields: &[]string{"history.query_run_count", "query.model"}, Filters: &map[string]any{ "history.created_date": fmt.Sprintf("%d days", t.timeframe), "query.model": "-system__activity, -i__looker", "history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1), "user.dev_branch_name": "NULL", }, Limit: &limit, } raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil) if err != nil { return nil, err } var data []map[string]interface{} _ = json.Unmarshal([]byte(raw), &data) results := make(map[string]int) for _, row := range data { model, _ := row["query.model"].(string) count, _ := row["history.query_run_count"].(float64) results[model] = int(count) } return results, nil } func (t *vacuumTool) getUnusedExplores(ctx context.Context, modelName string) ([]string, error) { lookmlModel, err := t.SdkClient.LookmlModel(modelName, "", nil) if err != nil { return nil, fmt.Errorf("error fetching LookML model %s: %w", modelName, err) } var unusedExplores []string if lookmlModel.Explores != nil { for _, e := range *lookmlModel.Explores { limit := "1" queryCountQueryBody := &v4.WriteQuery{ Model: "system__activity", View: "history", Fields: &[]string{"history.query_run_count"}, Filters: &map[string]any{ "query.model": modelName, "query.view": *e.Name, "history.created_date": fmt.Sprintf("%d days", t.timeframe), "history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1), "user.dev_branch_name": "NULL", }, Limit: &limit, } rawQueryCount, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, queryCountQueryBody, "json", nil) if err != nil { // Log the error but continue continue } var data []map[string]interface{} _ = json.Unmarshal([]byte(rawQueryCount), &data) if len(data) == 0 { unusedExplores = append(unusedExplores, *e.Name) } } } return unusedExplores, nil } func (t *vacuumTool) getUsedExploreFields(ctx context.Context, model, explore string) (map[string]int, error) { limit := "5000" query := &v4.WriteQuery{ Model: "system__activity", View: "history", Fields: &[]string{"query.formatted_fields", "query.filters", "history.query_run_count"}, Filters: &map[string]any{ "history.created_date": fmt.Sprintf("%d days", t.timeframe), "query.model": strings.ReplaceAll(model, "_", "^_"), "query.view": strings.ReplaceAll(explore, "_", "^_"), "query.formatted_fields": "-NULL", "history.workspace_id": "production", }, Limit: &limit, } raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil) if err != nil { return nil, err } var data []map[string]interface{} _ = json.Unmarshal([]byte(raw), &data) results := make(map[string]int) fieldRegex := regexp.MustCompile(`(\w+\.\w+)`) for _, row := range data { count, _ := row["history.query_run_count"].(float64) formattedFields, _ := row["query.formatted_fields"].(string) filters, _ := row["query.filters"].(string) usedFields := make(map[string]bool) for _, field := range fieldRegex.FindAllString(formattedFields, -1) { results[field] += int(count) usedFields[field] = true } for _, field := range fieldRegex.FindAllString(filters, -1) { if _, ok := usedFields[field]; !ok { results[field] += int(count) } } } return results, nil } // ================================================================================================================= // END LOOKER HEALTH VACUUM CORE LOGIC // ================================================================================================================= ``` -------------------------------------------------------------------------------- /internal/server/static/css/style.css: -------------------------------------------------------------------------------- ```css :root { --toolbox-blue: #4285f4; --text-primary-gray: #444444; --text-secondary-gray: #6e6e6e; --button-primary: var(--toolbox-blue); --button-secondary: #616161; --section-border: #e0e0e0; } body { display: flex; height: 100vh; margin: 0; font-family: 'Trebuchet MS'; background-color: #f8f9fa; box-sizing: border-box; } *, *:before, *:after { box-sizing: inherit; } #navbar-container { flex: 0 0 250px; height: 100%; position: relative; z-index: 10; } #main-content-container { flex: 1; display: flex; flex-direction: column; min-width: 0; overflow-x: hidden; } .left-nav { background-color: #fff; box-shadow: 4px 0px 12px rgba(0, 0, 0, 0.15); display: flex; flex-direction: column; padding: 15px; align-items: center; width: 100%; height: 100%; z-index: 3; ul { font-family: 'Verdana'; list-style: none; padding: 0; margin: 0; width: 100%; li { margin-bottom: 5px; a { display: flex; align-items: center; padding: 12px; text-decoration: none; color: #333; border-radius: 0; &:hover { background-color: #e9e9e9; border-radius: 35px; } &.active { background-color: #d0d0d0; font-weight: bold; border-radius: 35px; } } } } } .second-nav { flex: 0 0 250px; background-color: #fff; box-shadow: 4px 0px 12px rgba(0, 0, 0, 0.15); z-index: 2; display: flex; flex-direction: column; padding: 15px; align-items: center; position: relative; } .nav-logo { width: 90%; margin-bottom: 40px; flex-shrink: 0; img { max-width: 100%; height: auto; display: block; cursor: pointer; } } .main-content-area { flex: 1; display: flex; flex-direction: column; min-width: 0; overflow-x: hidden; } .top-bar { background-color: #fff; padding: 30px 30px; display: flex; justify-content: flex-end; align-items: center; border-bottom: 1px solid #eee; } .content { padding: 20px; flex-grow: 1; overflow-y: auto; display: flex; flex-direction: column; } .btn { display: flex; align-items: center; justify-content: center; padding: 10px 20px; color: white; border: none; border-radius: 30px; font: inherit; font-size: 1em; font-weight: bolder; cursor: pointer; &:hover { opacity: 0.8; } } .btn--run { background-color: var(--button-primary); } .btn--editHeaders { background-color: var(--button-secondary) } .btn--saveHeaders { background-color: var(--button-primary) } .btn--closeHeaders { background-color: var(--button-secondary) } .btn--setup-gis { background-color: white; color: var(--text-primary-gray); border: 2px solid var(--text-primary-gray); } .btn--externalDocs { background-color: var(--button-secondary); text-decoration: none; display: inline-flex; } .tool-button { display: flex; align-items: center; padding: 12px; text-decoration: none; color: #333; background-color: transparent; border: none; border-radius: 0; width: 100%; text-align: left; cursor: pointer; font-family: inherit; font-size: inherit; transition: background-color 0.1s ease-in-out, border-radius 0.1s ease-in-out; &:hover { background-color: #e9e9e9; border-radius: 35px; } &:focus { outline: none; box-shadow: 0 0 0 2px rgba(208, 208, 208, 0.5); } &.active { background-color: #d0d0d0; font-weight: bold; border-radius: 35px; &:hover { background-color: #d0d0d0; } } } #secondary-panel-content { flex: 1; overflow-y: auto; width: 100%; min-height: 0; ul { list-style: none; padding: 0; margin: 0; width: 100%; } } .tool-details-grid { display: grid; grid-template-columns: 1fr 2fr; gap: 20px; margin: 0 0 20px 0; align-items: start; flex-shrink: 0; } .tool-info { display: flex; flex-direction: column; gap: 15px; } .tool-execution-area { display: flex; flex-direction: column; gap: 12px; } .tool-params { background-color: #ffffff; padding: 15px; border-radius: 4px; border: 1px solid #ddd; h5 { margin-bottom: 0; } } .tool-box { background-color: #ffffff; padding: 15px; border-radius: 4px; border: 1px solid #eee; h5 { color: var(--toolbox-blue); margin-top: 0; font-weight: bold; } } .params-header { display: flex; justify-content: flex-end; margin-bottom: 8px; padding-right: 6px; font-weight: bold; font-size: 0.9em; color: var(--text-secondary-gray); } .params-disclaimer { font-style: italic; color: var(--text-secondary-gray); font-size: 0.8em; margin-bottom: 10px; width: 100%; word-wrap: break-word; } .param-item { margin-bottom: 12px; label { display: block; margin-bottom: 4px; font-family: inherit; } &.disabled-param { > label { color: #888; text-decoration: line-through; } .param-input-element { background-color: #f5f5f5; opacity: 0.6; } } input[type="text"], input[type="number"], select, textarea { width: calc(100% - 12px); padding: 6px; border: 1px solid #ccc; border-radius: 4px; font-family: inherit; } input[type="checkbox"].param-input-element { width: auto; padding: 0; border: initial; border-radius: initial; vertical-align: middle; margin-right: 4px; accent-color: var(--toolbox-blue); flex-grow: 0; } } .input-checkbox-wrapper { display: flex; align-items: center; gap: 10px; } .param-input-element-container { flex-grow: 1; } .param-input-element { box-sizing: border-box; } .include-param-container { display: flex; align-items: center; white-space: nowrap; input[type="checkbox"] { width: auto; padding: 0; border: initial; border-radius: initial; vertical-align: middle; margin-right: 0; accent-color: var(--toolbox-blue); } } .include-param-container input[type="checkbox"] { width: auto; padding: 0; border: initial; border-radius: initial; vertical-align: middle; margin: 0; accent-color: var(--toolbox-blue); } .checkbox-bool-label { margin-left: 5px; font-style: italic; color: var(--text-primary-gray); } .checkbox-bool-label.disabled { color: #aaa; cursor: not-allowed; } .param-label-extras { font-style: italic; font-weight: lighter; color: var(--text-secondary-gray); } .auth-param-input { background-color: var(--section-border); cursor: not-allowed; } .run-button-container { display: flex; justify-content: flex-end; gap: 20px; } .header-modal { display: none; position: fixed; z-index: 1000; left: 0; top: 0; width: 100%; height: 100%; overflow: auto; background-color: rgba(0,0,0,0.4); li { margin-bottom: 10px; } .header-modal-content { background-color: #fefefe; margin: 10% auto; padding: 20px; border: 1px solid #888; width: 80%; max-width: 50%; border-radius: 8px; display: flex; flex-direction: column; gap: 15px; align-items: center; h5 { margin-top: 0; font-size: 1.2em; } .headers-textarea { width: calc(100% - 16px); padding: 8px; font-family: monospace; border: 1px solid #ccc; border-radius: 4px; min-height: 150px; } .header-modal-actions { display: flex; justify-content: center; gap: 30px; width: 100%; } .auth-token-details { width: 100%; max-width: calc(100% - 16px); margin-left: 8px; margin-right: 8px; summary { cursor: pointer; text-align: left; padding: 5px 0; } .auth-token-content { padding: 10px; border: 1px solid #eee; margin-top: 5px; background-color: #f9f9f9; text-align: left; max-width: 100%; overflow-wrap: break-word; .auth-tab-group { display: flex; border-bottom: 1px solid #ccc; margin-bottom: 10px; } .auth-tab-picker { padding: 8px 12px; cursor: pointer; border: 1px solid transparent; border-bottom: 1px solid transparent; margin-bottom: -1px; background-color: #f0f0f0; &.active { background-color: #fff; border-color: #ccc; border-bottom-color: #fff; font-weight: bold; } } .auth-tab-content { display: none; overflow-wrap: break-word; word-wrap: break-word; max-width: 100%; &.active { display: block; } pre { white-space: pre-wrap; word-wrap: break-word; overflow-x: auto; background-color: #f5f5f5; padding: 10px; border: 1px solid #ccc; border-radius: 4px; max-width: 100%; code { display: block; word-wrap: break-word; color: inherit; } } } } } } } .auth-method-header { display: flex; justify-content: space-between; align-items: center; padding: 8px 12px; } .auth-method-label { font-weight: 500; color: var(--text-primary-gray); word-break: break-word; } .auth-helper-section { border: 1px solid var(--section-border); background-color: transparent; padding: 16px; border-radius: 8px; margin-top: 20px; width: 80%; } .auth-method-list { display: flex; flex-direction: column; gap: 12px; } .auth-method-details { padding: 16px; border: 1px solid var(--section-border); border-radius: 4px; margin-bottom: 16px; background-color: #fff; overflow-x: auto; } .auth-controls { display: flex; flex-direction: column; gap: 16px; align-items: flex-start; } .auth-input-row { display: flex; flex-direction: column; gap: 6px; & label { font-size: 14px; color: var(--text-primary-gray); margin-bottom: 4px; } } .auth-input { padding: 8px 8px; border: 1px solid #bdc1c6; border-radius: 4px; font-size: 14px; width: 100%; box-sizing: border-box; &:focus { outline: none; border-color: var(--toolbox-blue); box-shadow: 0 0 0 1px #1a73e8; } } .auth-method-actions { display: flex; align-items: center; gap: 12px; } .auth-instructions { font-size: 0.8em; margin-top: 5px; color: var(--text-secondary-gray); } .tool-response { margin: 20px 0 0 0; textarea { width: 100%; min-height: 150px; padding: 12px; border: 1px solid #ddd; border-radius: 4px; font-family: monospace; } } .search-container { display: flex; width: 100%; margin-bottom: 15px; #toolset-search-input { flex-grow: 1; padding: 10px 12px; border: 1px solid #ccc; border-radius: 20px 0 0 20px; border-right: none; font-family: inherit; font-size: 0.9em; color: var(--text-primary-gray); &:focus { outline: none; border-color: var(--toolbox-blue); box-shadow: 0 0 0 2px rgba(66, 133, 244, 0.3); } &::placeholder { color: var(--text-secondary-gray); } } #toolset-search-button { padding: 10px 15px; border: 1px solid var(--button-primary); background-color: var(--button-primary); color: white; border-radius: 0 20px 20px 0; cursor: pointer; font-family: inherit; font-size: 0.9em; font-weight: bold; transition: opacity 0.2s ease-in-out; flex-shrink: 0; line-height: 1; &:hover { opacity: 0.8; } &:focus { outline: none; box-shadow: 0 0 0 2px rgba(66, 133, 244, 0.3); } } } .toggle-details-tab { background-color: transparent; color: var(--toolbox-blue); border: none; padding: 8px 12px; border-radius: 4px; cursor: pointer; font-size: 1em; font-weight: bold; &:hover { opacity: 0.8; } } .resource-instructions { font-family: inherit; color: var(--text-primary-gray); padding: 24px; background-color: #ffffff; border-radius: 4px; box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24); margin: 16px 0; li { margin-bottom: 10px; color: var(--text-secondary-gray); } } .resource-title { color: var(--toolbox-blue); font-size: 24px; font-weight: bold; margin-top: 0; margin-bottom: 16px; } .resource-subtitle { color: var(--text-primary-gray); font-size: 20px; font-weight: normal; margin-top: 24px; margin-bottom: 12px; } .resource-intro, .resource-description { font-size: 16px; line-height: 1.6; color: var(--text-secondary-gray); margin-bottom: 16px; code { background-color: #e9e9e9; color: #c0392b; padding: 2px 4px; border-radius: 4px; font-family: monospace; font-size: 90%; vertical-align: baseline; } } ``` -------------------------------------------------------------------------------- /internal/tools/neo4j/neo4jschema/helpers/helpers_test.go: -------------------------------------------------------------------------------- ```go // Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package helpers import ( "testing" "time" "github.com/google/go-cmp/cmp" "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types" "github.com/neo4j/neo4j-go-driver/v5/neo4j" ) func TestHelperFunctions(t *testing.T) { t.Run("ConvertToStringSlice", func(t *testing.T) { tests := []struct { name string input []any want []string }{ { name: "empty slice", input: []any{}, want: []string{}, }, { name: "string values", input: []any{"a", "b", "c"}, want: []string{"a", "b", "c"}, }, { name: "mixed types", input: []any{"string", 123, true, 45.67}, want: []string{"string", "123", "true", "45.67"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := ConvertToStringSlice(tt.input) if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("ConvertToStringSlice() mismatch (-want +got):\n%s", diff) } }) } }) t.Run("GetStringValue", func(t *testing.T) { tests := []struct { name string input any want string }{ { name: "nil value", input: nil, want: "", }, { name: "string value", input: "test", want: "test", }, { name: "int value", input: 42, want: "42", }, { name: "bool value", input: true, want: "true", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := GetStringValue(tt.input) if got != tt.want { t.Errorf("GetStringValue() got %q, want %q", got, tt.want) } }) } }) } func TestMapToAPOCSchema(t *testing.T) { tests := []struct { name string input map[string]any want *types.APOCSchemaResult wantErr bool }{ { name: "simple node schema", input: map[string]any{ "Person": map[string]any{ "type": "node", "count": int64(150), "properties": map[string]any{ "name": map[string]any{ "type": "STRING", "unique": false, "indexed": true, "existence": false, }, }, }, }, want: &types.APOCSchemaResult{ Value: map[string]types.APOCEntity{ "Person": { Type: "node", Count: 150, Properties: map[string]types.APOCProperty{ "name": { Type: "STRING", Unique: false, Indexed: true, Existence: false, }, }, }, }, }, wantErr: false, }, { name: "empty input", input: map[string]any{}, want: &types.APOCSchemaResult{Value: map[string]types.APOCEntity{}}, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := MapToAPOCSchema(tt.input) if (err != nil) != tt.wantErr { t.Errorf("MapToAPOCSchema() error = %v, wantErr %v", err, tt.wantErr) return } if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("MapToAPOCSchema() mismatch (-want +got):\n%s", diff) } }) } } func TestProcessAPOCSchema(t *testing.T) { tests := []struct { name string input *types.APOCSchemaResult wantNodes []types.NodeLabel wantRels []types.Relationship wantStats *types.Statistics statsAreEmpty bool }{ { name: "empty schema", input: &types.APOCSchemaResult{ Value: map[string]types.APOCEntity{}, }, wantNodes: nil, wantRels: nil, statsAreEmpty: true, }, { name: "simple node only", input: &types.APOCSchemaResult{ Value: map[string]types.APOCEntity{ "Person": { Type: "node", Count: 100, Properties: map[string]types.APOCProperty{ "name": {Type: "STRING", Indexed: true}, "age": {Type: "INTEGER"}, }, }, }, }, wantNodes: []types.NodeLabel{ { Name: "Person", Count: 100, Properties: []types.PropertyInfo{ {Name: "age", Types: []string{"INTEGER"}}, {Name: "name", Types: []string{"STRING"}, Indexed: true}, }, }, }, wantRels: nil, wantStats: &types.Statistics{ NodesByLabel: map[string]int64{"Person": 100}, PropertiesByLabel: map[string]int64{"Person": 2}, TotalNodes: 100, TotalProperties: 200, }, }, { name: "nodes and relationships", input: &types.APOCSchemaResult{ Value: map[string]types.APOCEntity{ "Person": { Type: "node", Count: 100, Properties: map[string]types.APOCProperty{ "name": {Type: "STRING", Unique: true, Indexed: true, Existence: true}, }, Relationships: map[string]types.APOCRelationshipInfo{ "KNOWS": { Direction: "out", Count: 50, Labels: []string{"Person"}, Properties: map[string]types.APOCProperty{ "since": {Type: "INTEGER"}, }, }, }, }, "Post": { Type: "node", Count: 200, Properties: map[string]types.APOCProperty{"content": {Type: "STRING"}}, }, "FOLLOWS": {Type: "relationship", Count: 80}, }, }, wantNodes: []types.NodeLabel{ { Name: "Post", Count: 200, Properties: []types.PropertyInfo{ {Name: "content", Types: []string{"STRING"}}, }, }, { Name: "Person", Count: 100, Properties: []types.PropertyInfo{ {Name: "name", Types: []string{"STRING"}, Unique: true, Indexed: true, Mandatory: true}, }, }, }, wantRels: []types.Relationship{ { Type: "KNOWS", StartNode: "Person", EndNode: "Person", Count: 50, Properties: []types.PropertyInfo{ {Name: "since", Types: []string{"INTEGER"}}, }, }, }, wantStats: &types.Statistics{ NodesByLabel: map[string]int64{"Person": 100, "Post": 200}, RelationshipsByType: map[string]int64{"KNOWS": 50}, PropertiesByLabel: map[string]int64{"Person": 1, "Post": 1}, PropertiesByRelType: map[string]int64{"KNOWS": 1}, TotalNodes: 300, TotalRelationships: 50, TotalProperties: 350, // (100*1 + 200*1) for nodes + (50*1) for rels }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotNodes, gotRels, gotStats := ProcessAPOCSchema(tt.input) if diff := cmp.Diff(tt.wantNodes, gotNodes); diff != "" { t.Errorf("ProcessAPOCSchema() node labels mismatch (-want +got):\n%s", diff) } if diff := cmp.Diff(tt.wantRels, gotRels); diff != "" { t.Errorf("ProcessAPOCSchema() relationships mismatch (-want +got):\n%s", diff) } if tt.statsAreEmpty { tt.wantStats = &types.Statistics{} } if diff := cmp.Diff(tt.wantStats, gotStats); diff != "" { t.Errorf("ProcessAPOCSchema() statistics mismatch (-want +got):\n%s", diff) } }) } } func TestProcessNonAPOCSchema(t *testing.T) { t.Run("full schema processing", func(t *testing.T) { nodeCounts := map[string]int64{"Person": 10, "City": 5} nodePropsMap := map[string]map[string]map[string]bool{ "Person": {"name": {"STRING": true}, "age": {"INTEGER": true}}, "City": {"name": {"STRING": true, "TEXT": true}}, } relCounts := map[string]int64{"LIVES_IN": 8} relPropsMap := map[string]map[string]map[string]bool{ "LIVES_IN": {"since": {"DATE": true}}, } relConnectivity := map[string]types.RelConnectivityInfo{ "LIVES_IN": {StartNode: "Person", EndNode: "City", Count: 8}, } wantNodes := []types.NodeLabel{ { Name: "Person", Count: 10, Properties: []types.PropertyInfo{ {Name: "age", Types: []string{"INTEGER"}}, {Name: "name", Types: []string{"STRING"}}, }, }, { Name: "City", Count: 5, Properties: []types.PropertyInfo{ {Name: "name", Types: []string{"STRING", "TEXT"}}, }, }, } wantRels := []types.Relationship{ { Type: "LIVES_IN", Count: 8, StartNode: "Person", EndNode: "City", Properties: []types.PropertyInfo{ {Name: "since", Types: []string{"DATE"}}, }, }, } wantStats := &types.Statistics{ TotalNodes: 15, TotalRelationships: 8, TotalProperties: 33, // (10*2 + 5*1) for nodes + (8*1) for rels NodesByLabel: map[string]int64{"Person": 10, "City": 5}, RelationshipsByType: map[string]int64{"LIVES_IN": 8}, PropertiesByLabel: map[string]int64{"Person": 2, "City": 1}, PropertiesByRelType: map[string]int64{"LIVES_IN": 1}, } gotNodes, gotRels, gotStats := ProcessNonAPOCSchema(nodeCounts, nodePropsMap, relCounts, relPropsMap, relConnectivity) if diff := cmp.Diff(wantNodes, gotNodes); diff != "" { t.Errorf("ProcessNonAPOCSchema() nodes mismatch (-want +got):\n%s", diff) } if diff := cmp.Diff(wantRels, gotRels); diff != "" { t.Errorf("ProcessNonAPOCSchema() relationships mismatch (-want +got):\n%s", diff) } if diff := cmp.Diff(wantStats, gotStats); diff != "" { t.Errorf("ProcessNonAPOCSchema() stats mismatch (-want +got):\n%s", diff) } }) t.Run("empty schema", func(t *testing.T) { gotNodes, gotRels, gotStats := ProcessNonAPOCSchema( map[string]int64{}, map[string]map[string]map[string]bool{}, map[string]int64{}, map[string]map[string]map[string]bool{}, map[string]types.RelConnectivityInfo{}, ) if len(gotNodes) != 0 { t.Errorf("expected 0 nodes, got %d", len(gotNodes)) } if len(gotRels) != 0 { t.Errorf("expected 0 relationships, got %d", len(gotRels)) } if diff := cmp.Diff(&types.Statistics{}, gotStats); diff != "" { t.Errorf("ProcessNonAPOCSchema() stats mismatch (-want +got):\n%s", diff) } }) } func TestConvertValue(t *testing.T) { tests := []struct { name string input any want any }{ { name: "nil value", input: nil, want: nil, }, { name: "neo4j.InvalidValue", input: neo4j.InvalidValue{}, want: nil, }, { name: "primitive bool", input: true, want: true, }, { name: "primitive int", input: int64(42), want: int64(42), }, { name: "primitive float", input: 3.14, want: 3.14, }, { name: "primitive string", input: "hello", want: "hello", }, { name: "neo4j.Date", input: neo4j.Date(time.Date(2024, 6, 1, 0, 0, 0, 0, time.UTC)), want: "2024-06-01", }, { name: "neo4j.LocalTime", input: neo4j.LocalTime(time.Date(0, 0, 0, 12, 34, 56, 0, time.Local)), want: "12:34:56", }, { name: "neo4j.Time", input: neo4j.Time(time.Date(0, 0, 0, 1, 2, 3, 0, time.UTC)), want: "01:02:03Z", }, { name: "neo4j.LocalDateTime", input: neo4j.LocalDateTime(time.Date(2024, 6, 1, 10, 20, 30, 0, time.Local)), want: "2024-06-01T10:20:30", }, { name: "neo4j.Duration", input: neo4j.Duration{Months: 1, Days: 2, Seconds: 3, Nanos: 4}, want: "P1M2DT3.000000004S", }, { name: "neo4j.Point2D", input: neo4j.Point2D{X: 1.1, Y: 2.2, SpatialRefId: 1234}, want: map[string]any{"x": 1.1, "y": 2.2, "srid": uint32(1234)}, }, { name: "neo4j.Point3D", input: neo4j.Point3D{X: 1.1, Y: 2.2, Z: 3.3, SpatialRefId: 5467}, want: map[string]any{"x": 1.1, "y": 2.2, "z": 3.3, "srid": uint32(5467)}, }, { name: "neo4j.Node (handled by Entity case, losing labels)", input: neo4j.Node{ ElementId: "element-1", Labels: []string{"Person"}, Props: map[string]any{"name": "Alice"}, }, want: map[string]any{ "elementId": "element-1", "labels": []string{"Person"}, "properties": map[string]any{"name": "Alice"}, }, }, { name: "neo4j.Relationship (handled by Entity case, losing type/endpoints)", input: neo4j.Relationship{ ElementId: "element-2", StartElementId: "start-1", EndElementId: "end-1", Type: "KNOWS", Props: map[string]any{"since": 2024}, }, want: map[string]any{ "elementId": "element-2", "properties": map[string]any{"since": 2024}, "startElementId": "start-1", "endElementId": "end-1", "type": "KNOWS", }, }, { name: "neo4j.Path (elements handled by Entity case)", input: func() neo4j.Path { node1 := neo4j.Node{ElementId: "n10", Labels: []string{"A"}, Props: map[string]any{"p1": "v1"}} node2 := neo4j.Node{ElementId: "n11", Labels: []string{"B"}, Props: map[string]any{"p2": "v2"}} rel1 := neo4j.Relationship{ElementId: "r12", StartElementId: "n10", EndElementId: "n11", Type: "REL", Props: map[string]any{"p3": "v3"}} return neo4j.Path{ Nodes: []neo4j.Node{node1, node2}, Relationships: []neo4j.Relationship{rel1}, } }(), want: map[string]any{ "nodes": []any{ map[string]any{ "elementId": "n10", "properties": map[string]any{"p1": "v1"}, "labels": []string{"A"}, }, map[string]any{ "elementId": "n11", "properties": map[string]any{"p2": "v2"}, "labels": []string{"B"}, }, }, "relationships": []any{ map[string]any{ "elementId": "r12", "properties": map[string]any{"p3": "v3"}, "startElementId": "n10", "endElementId": "n11", "type": "REL", }, }, }, }, { name: "slice of primitives", input: []any{"a", 1, true}, want: []any{"a", 1, true}, }, { name: "slice of mixed types", input: []any{"a", neo4j.Date(time.Date(2024, 6, 1, 0, 0, 0, 0, time.UTC))}, want: []any{"a", "2024-06-01"}, }, { name: "map of primitives", input: map[string]any{"foo": 1, "bar": "baz"}, want: map[string]any{"foo": 1, "bar": "baz"}, }, { name: "map with nested neo4j type", input: map[string]any{"date": neo4j.Date(time.Date(2024, 6, 1, 0, 0, 0, 0, time.UTC))}, want: map[string]any{"date": "2024-06-01"}, }, { name: "unhandled type", input: struct{ X int }{X: 5}, want: "{5}", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := ConvertValue(tt.input) if !cmp.Equal(got, tt.want) { t.Errorf("ConvertValue() mismatch (-want +got):\n%s", cmp.Diff(tt.want, got)) } }) } } ```