This is page 33 of 45. Use http://codebase.md/googleapis/genai-toolbox?lines=true&page={x} to view the full context. # Directory Structure ``` ├── .ci │ ├── continuous.release.cloudbuild.yaml │ ├── generate_release_table.sh │ ├── integration.cloudbuild.yaml │ ├── quickstart_test │ │ ├── go.integration.cloudbuild.yaml │ │ ├── js.integration.cloudbuild.yaml │ │ ├── py.integration.cloudbuild.yaml │ │ ├── run_go_tests.sh │ │ ├── run_js_tests.sh │ │ ├── run_py_tests.sh │ │ └── setup_hotels_sample.sql │ ├── test_with_coverage.sh │ └── versioned.release.cloudbuild.yaml ├── .github │ ├── auto-label.yaml │ ├── blunderbuss.yml │ ├── CODEOWNERS │ ├── header-checker-lint.yml │ ├── ISSUE_TEMPLATE │ │ ├── bug_report.yml │ │ ├── config.yml │ │ ├── feature_request.yml │ │ └── question.yml │ ├── label-sync.yml │ ├── labels.yaml │ ├── PULL_REQUEST_TEMPLATE.md │ ├── release-please.yml │ ├── renovate.json5 │ ├── sync-repo-settings.yaml │ └── workflows │ ├── cloud_build_failure_reporter.yml │ ├── deploy_dev_docs.yaml │ ├── deploy_previous_version_docs.yaml │ ├── deploy_versioned_docs.yaml │ ├── docs_deploy.yaml │ ├── docs_preview_clean.yaml │ ├── docs_preview_deploy.yaml │ ├── lint.yaml │ ├── schedule_reporter.yml │ ├── sync-labels.yaml │ └── tests.yaml ├── .gitignore ├── .gitmodules ├── .golangci.yaml ├── .hugo │ ├── archetypes │ │ └── default.md │ ├── assets │ │ ├── icons │ │ │ └── logo.svg │ │ └── scss │ │ ├── _styles_project.scss │ │ └── _variables_project.scss │ ├── go.mod │ ├── go.sum │ ├── hugo.toml │ ├── layouts │ │ ├── _default │ │ │ └── home.releases.releases │ │ ├── index.llms-full.txt │ │ ├── index.llms.txt │ │ ├── partials │ │ │ ├── hooks │ │ │ │ └── head-end.html │ │ │ ├── navbar-version-selector.html │ │ │ ├── page-meta-links.html │ │ │ └── td │ │ │ └── render-heading.html │ │ ├── robot.txt │ │ └── shortcodes │ │ ├── include.html │ │ ├── ipynb.html │ │ └── regionInclude.html │ ├── package-lock.json │ ├── package.json │ └── static │ ├── favicons │ │ ├── android-chrome-192x192.png │ │ ├── android-chrome-512x512.png │ │ ├── apple-touch-icon.png │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ └── favicon.ico │ └── js │ └── w3.js ├── CHANGELOG.md ├── cmd │ ├── options_test.go │ ├── options.go │ ├── root_test.go │ ├── root.go │ └── version.txt ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── DEVELOPER.md ├── Dockerfile ├── docs │ └── en │ ├── _index.md │ ├── about │ │ ├── _index.md │ │ └── faq.md │ ├── concepts │ │ ├── _index.md │ │ └── telemetry │ │ ├── index.md │ │ ├── telemetry_flow.png │ │ └── telemetry_traces.png │ ├── getting-started │ │ ├── _index.md │ │ ├── colab_quickstart.ipynb │ │ ├── configure.md │ │ ├── introduction │ │ │ ├── _index.md │ │ │ └── architecture.png │ │ ├── local_quickstart_go.md │ │ ├── local_quickstart_js.md │ │ ├── local_quickstart.md │ │ ├── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── quickstart │ │ ├── go │ │ │ ├── genAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── genkit │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── langchain │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ ├── openAI │ │ │ │ ├── go.mod │ │ │ │ ├── go.sum │ │ │ │ └── quickstart.go │ │ │ └── quickstart_test.go │ │ ├── golden.txt │ │ ├── js │ │ │ ├── genAI │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── genkit │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── langchain │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ ├── llamaindex │ │ │ │ ├── package-lock.json │ │ │ │ ├── package.json │ │ │ │ └── quickstart.js │ │ │ └── quickstart.test.js │ │ ├── python │ │ │ ├── __init__.py │ │ │ ├── adk │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── core │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── langchain │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ ├── llamaindex │ │ │ │ ├── quickstart.py │ │ │ │ └── requirements.txt │ │ │ └── quickstart_test.py │ │ └── shared │ │ ├── cloud_setup.md │ │ ├── configure_toolbox.md │ │ └── database_setup.md │ ├── how-to │ │ ├── _index.md │ │ ├── connect_via_geminicli.md │ │ ├── connect_via_mcp.md │ │ ├── connect-ide │ │ │ ├── _index.md │ │ │ ├── alloydb_pg_admin_mcp.md │ │ │ ├── alloydb_pg_mcp.md │ │ │ ├── bigquery_mcp.md │ │ │ ├── cloud_sql_mssql_admin_mcp.md │ │ │ ├── cloud_sql_mssql_mcp.md │ │ │ ├── cloud_sql_mysql_admin_mcp.md │ │ │ ├── cloud_sql_mysql_mcp.md │ │ │ ├── cloud_sql_pg_admin_mcp.md │ │ │ ├── cloud_sql_pg_mcp.md │ │ │ ├── firestore_mcp.md │ │ │ ├── looker_mcp.md │ │ │ ├── mssql_mcp.md │ │ │ ├── mysql_mcp.md │ │ │ ├── neo4j_mcp.md │ │ │ ├── postgres_mcp.md │ │ │ ├── spanner_mcp.md │ │ │ └── sqlite_mcp.md │ │ ├── deploy_docker.md │ │ ├── deploy_gke.md │ │ ├── deploy_toolbox.md │ │ ├── export_telemetry.md │ │ └── toolbox-ui │ │ ├── edit-headers.gif │ │ ├── edit-headers.png │ │ ├── index.md │ │ ├── optional-param-checked.png │ │ ├── optional-param-unchecked.png │ │ ├── run-tool.gif │ │ ├── tools.png │ │ └── toolsets.png │ ├── reference │ │ ├── _index.md │ │ ├── cli.md │ │ └── prebuilt-tools.md │ ├── resources │ │ ├── _index.md │ │ ├── authServices │ │ │ ├── _index.md │ │ │ └── google.md │ │ ├── sources │ │ │ ├── _index.md │ │ │ ├── alloydb-admin.md │ │ │ ├── alloydb-pg.md │ │ │ ├── bigquery.md │ │ │ ├── bigtable.md │ │ │ ├── cassandra.md │ │ │ ├── clickhouse.md │ │ │ ├── cloud-monitoring.md │ │ │ ├── cloud-sql-admin.md │ │ │ ├── cloud-sql-mssql.md │ │ │ ├── cloud-sql-mysql.md │ │ │ ├── cloud-sql-pg.md │ │ │ ├── couchbase.md │ │ │ ├── dataplex.md │ │ │ ├── dgraph.md │ │ │ ├── firebird.md │ │ │ ├── firestore.md │ │ │ ├── http.md │ │ │ ├── looker.md │ │ │ ├── mongodb.md │ │ │ ├── mssql.md │ │ │ ├── mysql.md │ │ │ ├── neo4j.md │ │ │ ├── oceanbase.md │ │ │ ├── oracle.md │ │ │ ├── postgres.md │ │ │ ├── redis.md │ │ │ ├── spanner.md │ │ │ ├── sqlite.md │ │ │ ├── tidb.md │ │ │ ├── trino.md │ │ │ ├── valkey.md │ │ │ └── yugabytedb.md │ │ └── tools │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── alloydb-create-cluster.md │ │ │ ├── alloydb-create-instance.md │ │ │ ├── alloydb-create-user.md │ │ │ ├── alloydb-get-cluster.md │ │ │ ├── alloydb-get-instance.md │ │ │ ├── alloydb-get-user.md │ │ │ ├── alloydb-list-clusters.md │ │ │ ├── alloydb-list-instances.md │ │ │ ├── alloydb-list-users.md │ │ │ └── alloydb-wait-for-operation.md │ │ ├── alloydbainl │ │ │ ├── _index.md │ │ │ └── alloydb-ai-nl.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── bigquery-analyze-contribution.md │ │ │ ├── bigquery-conversational-analytics.md │ │ │ ├── bigquery-execute-sql.md │ │ │ ├── bigquery-forecast.md │ │ │ ├── bigquery-get-dataset-info.md │ │ │ ├── bigquery-get-table-info.md │ │ │ ├── bigquery-list-dataset-ids.md │ │ │ ├── bigquery-list-table-ids.md │ │ │ ├── bigquery-search-catalog.md │ │ │ └── bigquery-sql.md │ │ ├── bigtable │ │ │ ├── _index.md │ │ │ └── bigtable-sql.md │ │ ├── cassandra │ │ │ ├── _index.md │ │ │ └── cassandra-cql.md │ │ ├── clickhouse │ │ │ ├── _index.md │ │ │ ├── clickhouse-execute-sql.md │ │ │ ├── clickhouse-list-databases.md │ │ │ ├── clickhouse-list-tables.md │ │ │ └── clickhouse-sql.md │ │ ├── cloudmonitoring │ │ │ ├── _index.md │ │ │ └── cloud-monitoring-query-prometheus.md │ │ ├── cloudsql │ │ │ ├── _index.md │ │ │ ├── cloudsqlcreatedatabase.md │ │ │ ├── cloudsqlcreateusers.md │ │ │ ├── cloudsqlgetinstances.md │ │ │ ├── cloudsqllistdatabases.md │ │ │ ├── cloudsqllistinstances.md │ │ │ ├── cloudsqlmssqlcreateinstance.md │ │ │ ├── cloudsqlmysqlcreateinstance.md │ │ │ ├── cloudsqlpgcreateinstances.md │ │ │ └── cloudsqlwaitforoperation.md │ │ ├── couchbase │ │ │ ├── _index.md │ │ │ └── couchbase-sql.md │ │ ├── dataform │ │ │ ├── _index.md │ │ │ └── dataform-compile-local.md │ │ ├── dataplex │ │ │ ├── _index.md │ │ │ ├── dataplex-lookup-entry.md │ │ │ ├── dataplex-search-aspect-types.md │ │ │ └── dataplex-search-entries.md │ │ ├── dgraph │ │ │ ├── _index.md │ │ │ └── dgraph-dql.md │ │ ├── firebird │ │ │ ├── _index.md │ │ │ ├── firebird-execute-sql.md │ │ │ └── firebird-sql.md │ │ ├── firestore │ │ │ ├── _index.md │ │ │ ├── firestore-add-documents.md │ │ │ ├── firestore-delete-documents.md │ │ │ ├── firestore-get-documents.md │ │ │ ├── firestore-get-rules.md │ │ │ ├── firestore-list-collections.md │ │ │ ├── firestore-query-collection.md │ │ │ ├── firestore-query.md │ │ │ ├── firestore-update-document.md │ │ │ └── firestore-validate-rules.md │ │ ├── http │ │ │ ├── _index.md │ │ │ └── http.md │ │ ├── looker │ │ │ ├── _index.md │ │ │ ├── looker-add-dashboard-element.md │ │ │ ├── looker-conversational-analytics.md │ │ │ ├── looker-get-dashboards.md │ │ │ ├── looker-get-dimensions.md │ │ │ ├── looker-get-explores.md │ │ │ ├── looker-get-filters.md │ │ │ ├── looker-get-looks.md │ │ │ ├── looker-get-measures.md │ │ │ ├── looker-get-models.md │ │ │ ├── looker-get-parameters.md │ │ │ ├── looker-health-analyze.md │ │ │ ├── looker-health-pulse.md │ │ │ ├── looker-health-vacuum.md │ │ │ ├── looker-make-dashboard.md │ │ │ ├── looker-make-look.md │ │ │ ├── looker-query-sql.md │ │ │ ├── looker-query-url.md │ │ │ ├── looker-query.md │ │ │ └── looker-run-look.md │ │ ├── mongodb │ │ │ ├── _index.md │ │ │ ├── mongodb-aggregate.md │ │ │ ├── mongodb-delete-many.md │ │ │ ├── mongodb-delete-one.md │ │ │ ├── mongodb-find-one.md │ │ │ ├── mongodb-find.md │ │ │ ├── mongodb-insert-many.md │ │ │ ├── mongodb-insert-one.md │ │ │ ├── mongodb-update-many.md │ │ │ └── mongodb-update-one.md │ │ ├── mssql │ │ │ ├── _index.md │ │ │ ├── mssql-execute-sql.md │ │ │ ├── mssql-list-tables.md │ │ │ └── mssql-sql.md │ │ ├── mysql │ │ │ ├── _index.md │ │ │ ├── mysql-execute-sql.md │ │ │ ├── mysql-list-active-queries.md │ │ │ ├── mysql-list-table-fragmentation.md │ │ │ ├── mysql-list-tables-missing-unique-indexes.md │ │ │ ├── mysql-list-tables.md │ │ │ └── mysql-sql.md │ │ ├── neo4j │ │ │ ├── _index.md │ │ │ ├── neo4j-cypher.md │ │ │ ├── neo4j-execute-cypher.md │ │ │ └── neo4j-schema.md │ │ ├── oceanbase │ │ │ ├── _index.md │ │ │ ├── oceanbase-execute-sql.md │ │ │ └── oceanbase-sql.md │ │ ├── oracle │ │ │ ├── _index.md │ │ │ ├── oracle-execute-sql.md │ │ │ └── oracle-sql.md │ │ ├── postgres │ │ │ ├── _index.md │ │ │ ├── postgres-execute-sql.md │ │ │ ├── postgres-list-active-queries.md │ │ │ ├── postgres-list-available-extensions.md │ │ │ ├── postgres-list-installed-extensions.md │ │ │ ├── postgres-list-tables.md │ │ │ └── postgres-sql.md │ │ ├── redis │ │ │ ├── _index.md │ │ │ └── redis.md │ │ ├── spanner │ │ │ ├── _index.md │ │ │ ├── spanner-execute-sql.md │ │ │ ├── spanner-list-tables.md │ │ │ └── spanner-sql.md │ │ ├── sqlite │ │ │ ├── _index.md │ │ │ ├── sqlite-execute-sql.md │ │ │ └── sqlite-sql.md │ │ ├── tidb │ │ │ ├── _index.md │ │ │ ├── tidb-execute-sql.md │ │ │ └── tidb-sql.md │ │ ├── trino │ │ │ ├── _index.md │ │ │ ├── trino-execute-sql.md │ │ │ └── trino-sql.md │ │ ├── utility │ │ │ ├── _index.md │ │ │ └── wait.md │ │ ├── valkey │ │ │ ├── _index.md │ │ │ └── valkey.md │ │ └── yuagbytedb │ │ ├── _index.md │ │ └── yugabytedb-sql.md │ ├── samples │ │ ├── _index.md │ │ ├── alloydb │ │ │ ├── _index.md │ │ │ ├── ai-nl │ │ │ │ ├── alloydb_ai_nl.ipynb │ │ │ │ └── index.md │ │ │ └── mcp_quickstart.md │ │ ├── bigquery │ │ │ ├── _index.md │ │ │ ├── colab_quickstart_bigquery.ipynb │ │ │ ├── local_quickstart.md │ │ │ └── mcp_quickstart │ │ │ ├── _index.md │ │ │ ├── inspector_tools.png │ │ │ └── inspector.png │ │ └── looker │ │ ├── _index.md │ │ ├── looker_gemini_oauth │ │ │ ├── _index.md │ │ │ ├── authenticated.png │ │ │ ├── authorize.png │ │ │ └── registration.png │ │ ├── looker_gemini.md │ │ └── looker_mcp_inspector │ │ ├── _index.md │ │ ├── inspector_tools.png │ │ └── inspector.png │ └── sdks │ ├── _index.md │ ├── go-sdk.md │ ├── js-sdk.md │ └── python-sdk.md ├── go.mod ├── go.sum ├── internal │ ├── auth │ │ ├── auth.go │ │ └── google │ │ └── google.go │ ├── log │ │ ├── handler.go │ │ ├── log_test.go │ │ ├── log.go │ │ └── logger.go │ ├── prebuiltconfigs │ │ ├── prebuiltconfigs_test.go │ │ ├── prebuiltconfigs.go │ │ └── tools │ │ ├── alloydb-postgres-admin.yaml │ │ ├── alloydb-postgres-observability.yaml │ │ ├── alloydb-postgres.yaml │ │ ├── bigquery.yaml │ │ ├── clickhouse.yaml │ │ ├── cloud-sql-mssql-admin.yaml │ │ ├── cloud-sql-mssql-observability.yaml │ │ ├── cloud-sql-mssql.yaml │ │ ├── cloud-sql-mysql-admin.yaml │ │ ├── cloud-sql-mysql-observability.yaml │ │ ├── cloud-sql-mysql.yaml │ │ ├── cloud-sql-postgres-admin.yaml │ │ ├── cloud-sql-postgres-observability.yaml │ │ ├── cloud-sql-postgres.yaml │ │ ├── dataplex.yaml │ │ ├── firestore.yaml │ │ ├── looker-conversational-analytics.yaml │ │ ├── looker.yaml │ │ ├── mssql.yaml │ │ ├── mysql.yaml │ │ ├── neo4j.yaml │ │ ├── oceanbase.yaml │ │ ├── postgres.yaml │ │ ├── spanner-postgres.yaml │ │ ├── spanner.yaml │ │ └── sqlite.yaml │ ├── server │ │ ├── api_test.go │ │ ├── api.go │ │ ├── common_test.go │ │ ├── config.go │ │ ├── mcp │ │ │ ├── jsonrpc │ │ │ │ ├── jsonrpc_test.go │ │ │ │ └── jsonrpc.go │ │ │ ├── mcp.go │ │ │ ├── util │ │ │ │ └── lifecycle.go │ │ │ ├── v20241105 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ ├── v20250326 │ │ │ │ ├── method.go │ │ │ │ └── types.go │ │ │ └── v20250618 │ │ │ ├── method.go │ │ │ └── types.go │ │ ├── mcp_test.go │ │ ├── mcp.go │ │ ├── server_test.go │ │ ├── server.go │ │ ├── static │ │ │ ├── assets │ │ │ │ └── mcptoolboxlogo.png │ │ │ ├── css │ │ │ │ └── style.css │ │ │ ├── index.html │ │ │ ├── js │ │ │ │ ├── auth.js │ │ │ │ ├── loadTools.js │ │ │ │ ├── mainContent.js │ │ │ │ ├── navbar.js │ │ │ │ ├── runTool.js │ │ │ │ ├── toolDisplay.js │ │ │ │ ├── tools.js │ │ │ │ └── toolsets.js │ │ │ ├── tools.html │ │ │ └── toolsets.html │ │ ├── web_test.go │ │ └── web.go │ ├── sources │ │ ├── alloydbadmin │ │ │ ├── alloydbadmin_test.go │ │ │ └── alloydbadmin.go │ │ ├── alloydbpg │ │ │ ├── alloydb_pg_test.go │ │ │ └── alloydb_pg.go │ │ ├── bigquery │ │ │ ├── bigquery_test.go │ │ │ └── bigquery.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ ├── cassandra_test.go │ │ │ └── cassandra.go │ │ ├── clickhouse │ │ │ ├── clickhouse_test.go │ │ │ └── clickhouse.go │ │ ├── cloudmonitoring │ │ │ ├── cloud_monitoring_test.go │ │ │ └── cloud_monitoring.go │ │ ├── cloudsqladmin │ │ │ ├── cloud_sql_admin_test.go │ │ │ └── cloud_sql_admin.go │ │ ├── cloudsqlmssql │ │ │ ├── cloud_sql_mssql_test.go │ │ │ └── cloud_sql_mssql.go │ │ ├── cloudsqlmysql │ │ │ ├── cloud_sql_mysql_test.go │ │ │ └── cloud_sql_mysql.go │ │ ├── cloudsqlpg │ │ │ ├── cloud_sql_pg_test.go │ │ │ └── cloud_sql_pg.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataplex │ │ │ ├── dataplex_test.go │ │ │ └── dataplex.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── dialect.go │ │ ├── firebird │ │ │ ├── firebird_test.go │ │ │ └── firebird.go │ │ ├── firestore │ │ │ ├── firestore_test.go │ │ │ └── firestore.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── ip_type.go │ │ ├── looker │ │ │ ├── looker_test.go │ │ │ └── looker.go │ │ ├── mongodb │ │ │ ├── mongodb_test.go │ │ │ └── mongodb.go │ │ ├── mssql │ │ │ ├── mssql_test.go │ │ │ └── mssql.go │ │ ├── mysql │ │ │ ├── mysql_test.go │ │ │ └── mysql.go │ │ ├── neo4j │ │ │ ├── neo4j_test.go │ │ │ └── neo4j.go │ │ ├── oceanbase │ │ │ ├── oceanbase_test.go │ │ │ └── oceanbase.go │ │ ├── oracle │ │ │ └── oracle.go │ │ ├── postgres │ │ │ ├── postgres_test.go │ │ │ └── postgres.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── sources.go │ │ ├── spanner │ │ │ ├── spanner_test.go │ │ │ └── spanner.go │ │ ├── sqlite │ │ │ ├── sqlite_test.go │ │ │ └── sqlite.go │ │ ├── tidb │ │ │ ├── tidb_test.go │ │ │ └── tidb.go │ │ ├── trino │ │ │ ├── trino_test.go │ │ │ └── trino.go │ │ ├── util.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedb │ │ ├── yugabytedb_test.go │ │ └── yugabytedb.go │ ├── telemetry │ │ ├── instrumentation.go │ │ └── telemetry.go │ ├── testutils │ │ └── testutils.go │ ├── tools │ │ ├── alloydb │ │ │ ├── alloydbcreatecluster │ │ │ │ ├── alloydbcreatecluster_test.go │ │ │ │ └── alloydbcreatecluster.go │ │ │ ├── alloydbcreateinstance │ │ │ │ ├── alloydbcreateinstance_test.go │ │ │ │ └── alloydbcreateinstance.go │ │ │ ├── alloydbcreateuser │ │ │ │ ├── alloydbcreateuser_test.go │ │ │ │ └── alloydbcreateuser.go │ │ │ ├── alloydbgetcluster │ │ │ │ ├── alloydbgetcluster_test.go │ │ │ │ └── alloydbgetcluster.go │ │ │ ├── alloydbgetinstance │ │ │ │ ├── alloydbgetinstance_test.go │ │ │ │ └── alloydbgetinstance.go │ │ │ ├── alloydbgetuser │ │ │ │ ├── alloydbgetuser_test.go │ │ │ │ └── alloydbgetuser.go │ │ │ ├── alloydblistclusters │ │ │ │ ├── alloydblistclusters_test.go │ │ │ │ └── alloydblistclusters.go │ │ │ ├── alloydblistinstances │ │ │ │ ├── alloydblistinstances_test.go │ │ │ │ └── alloydblistinstances.go │ │ │ ├── alloydblistusers │ │ │ │ ├── alloydblistusers_test.go │ │ │ │ └── alloydblistusers.go │ │ │ └── alloydbwaitforoperation │ │ │ ├── alloydbwaitforoperation_test.go │ │ │ └── alloydbwaitforoperation.go │ │ ├── alloydbainl │ │ │ ├── alloydbainl_test.go │ │ │ └── alloydbainl.go │ │ ├── bigquery │ │ │ ├── bigqueryanalyzecontribution │ │ │ │ ├── bigqueryanalyzecontribution_test.go │ │ │ │ └── bigqueryanalyzecontribution.go │ │ │ ├── bigquerycommon │ │ │ │ ├── table_name_parser_test.go │ │ │ │ ├── table_name_parser.go │ │ │ │ └── util.go │ │ │ ├── bigqueryconversationalanalytics │ │ │ │ ├── bigqueryconversationalanalytics_test.go │ │ │ │ └── bigqueryconversationalanalytics.go │ │ │ ├── bigqueryexecutesql │ │ │ │ ├── bigqueryexecutesql_test.go │ │ │ │ └── bigqueryexecutesql.go │ │ │ ├── bigqueryforecast │ │ │ │ ├── bigqueryforecast_test.go │ │ │ │ └── bigqueryforecast.go │ │ │ ├── bigquerygetdatasetinfo │ │ │ │ ├── bigquerygetdatasetinfo_test.go │ │ │ │ └── bigquerygetdatasetinfo.go │ │ │ ├── bigquerygettableinfo │ │ │ │ ├── bigquerygettableinfo_test.go │ │ │ │ └── bigquerygettableinfo.go │ │ │ ├── bigquerylistdatasetids │ │ │ │ ├── bigquerylistdatasetids_test.go │ │ │ │ └── bigquerylistdatasetids.go │ │ │ ├── bigquerylisttableids │ │ │ │ ├── bigquerylisttableids_test.go │ │ │ │ └── bigquerylisttableids.go │ │ │ ├── bigquerysearchcatalog │ │ │ │ ├── bigquerysearchcatalog_test.go │ │ │ │ └── bigquerysearchcatalog.go │ │ │ └── bigquerysql │ │ │ ├── bigquerysql_test.go │ │ │ └── bigquerysql.go │ │ ├── bigtable │ │ │ ├── bigtable_test.go │ │ │ └── bigtable.go │ │ ├── cassandra │ │ │ └── cassandracql │ │ │ ├── cassandracql_test.go │ │ │ └── cassandracql.go │ │ ├── clickhouse │ │ │ ├── clickhouseexecutesql │ │ │ │ ├── clickhouseexecutesql_test.go │ │ │ │ └── clickhouseexecutesql.go │ │ │ ├── clickhouselistdatabases │ │ │ │ ├── clickhouselistdatabases_test.go │ │ │ │ └── clickhouselistdatabases.go │ │ │ ├── clickhouselisttables │ │ │ │ ├── clickhouselisttables_test.go │ │ │ │ └── clickhouselisttables.go │ │ │ └── clickhousesql │ │ │ ├── clickhousesql_test.go │ │ │ └── clickhousesql.go │ │ ├── cloudmonitoring │ │ │ ├── cloudmonitoring_test.go │ │ │ └── cloudmonitoring.go │ │ ├── cloudsql │ │ │ ├── cloudsqlcreatedatabase │ │ │ │ ├── cloudsqlcreatedatabase_test.go │ │ │ │ └── cloudsqlcreatedatabase.go │ │ │ ├── cloudsqlcreateusers │ │ │ │ ├── cloudsqlcreateusers_test.go │ │ │ │ └── cloudsqlcreateusers.go │ │ │ ├── cloudsqlgetinstances │ │ │ │ ├── cloudsqlgetinstances_test.go │ │ │ │ └── cloudsqlgetinstances.go │ │ │ ├── cloudsqllistdatabases │ │ │ │ ├── cloudsqllistdatabases_test.go │ │ │ │ └── cloudsqllistdatabases.go │ │ │ ├── cloudsqllistinstances │ │ │ │ ├── cloudsqllistinstances_test.go │ │ │ │ └── cloudsqllistinstances.go │ │ │ └── cloudsqlwaitforoperation │ │ │ ├── cloudsqlwaitforoperation_test.go │ │ │ └── cloudsqlwaitforoperation.go │ │ ├── cloudsqlmssql │ │ │ └── cloudsqlmssqlcreateinstance │ │ │ ├── cloudsqlmssqlcreateinstance_test.go │ │ │ └── cloudsqlmssqlcreateinstance.go │ │ ├── cloudsqlmysql │ │ │ └── cloudsqlmysqlcreateinstance │ │ │ ├── cloudsqlmysqlcreateinstance_test.go │ │ │ └── cloudsqlmysqlcreateinstance.go │ │ ├── cloudsqlpg │ │ │ └── cloudsqlpgcreateinstances │ │ │ ├── cloudsqlpgcreateinstances_test.go │ │ │ └── cloudsqlpgcreateinstances.go │ │ ├── common_test.go │ │ ├── common.go │ │ ├── couchbase │ │ │ ├── couchbase_test.go │ │ │ └── couchbase.go │ │ ├── dataform │ │ │ └── dataformcompilelocal │ │ │ ├── dataformcompilelocal_test.go │ │ │ └── dataformcompilelocal.go │ │ ├── dataplex │ │ │ ├── dataplexlookupentry │ │ │ │ ├── dataplexlookupentry_test.go │ │ │ │ └── dataplexlookupentry.go │ │ │ ├── dataplexsearchaspecttypes │ │ │ │ ├── dataplexsearchaspecttypes_test.go │ │ │ │ └── dataplexsearchaspecttypes.go │ │ │ └── dataplexsearchentries │ │ │ ├── dataplexsearchentries_test.go │ │ │ └── dataplexsearchentries.go │ │ ├── dgraph │ │ │ ├── dgraph_test.go │ │ │ └── dgraph.go │ │ ├── firebird │ │ │ ├── firebirdexecutesql │ │ │ │ ├── firebirdexecutesql_test.go │ │ │ │ └── firebirdexecutesql.go │ │ │ └── firebirdsql │ │ │ ├── firebirdsql_test.go │ │ │ └── firebirdsql.go │ │ ├── firestore │ │ │ ├── firestoreadddocuments │ │ │ │ ├── firestoreadddocuments_test.go │ │ │ │ └── firestoreadddocuments.go │ │ │ ├── firestoredeletedocuments │ │ │ │ ├── firestoredeletedocuments_test.go │ │ │ │ └── firestoredeletedocuments.go │ │ │ ├── firestoregetdocuments │ │ │ │ ├── firestoregetdocuments_test.go │ │ │ │ └── firestoregetdocuments.go │ │ │ ├── firestoregetrules │ │ │ │ ├── firestoregetrules_test.go │ │ │ │ └── firestoregetrules.go │ │ │ ├── firestorelistcollections │ │ │ │ ├── firestorelistcollections_test.go │ │ │ │ └── firestorelistcollections.go │ │ │ ├── firestorequery │ │ │ │ ├── firestorequery_test.go │ │ │ │ └── firestorequery.go │ │ │ ├── firestorequerycollection │ │ │ │ ├── firestorequerycollection_test.go │ │ │ │ └── firestorequerycollection.go │ │ │ ├── firestoreupdatedocument │ │ │ │ ├── firestoreupdatedocument_test.go │ │ │ │ └── firestoreupdatedocument.go │ │ │ ├── firestorevalidaterules │ │ │ │ ├── firestorevalidaterules_test.go │ │ │ │ └── firestorevalidaterules.go │ │ │ └── util │ │ │ ├── converter_test.go │ │ │ ├── converter.go │ │ │ ├── validator_test.go │ │ │ └── validator.go │ │ ├── http │ │ │ ├── http_test.go │ │ │ └── http.go │ │ ├── http_method.go │ │ ├── looker │ │ │ ├── lookeradddashboardelement │ │ │ │ ├── lookeradddashboardelement_test.go │ │ │ │ └── lookeradddashboardelement.go │ │ │ ├── lookercommon │ │ │ │ ├── lookercommon_test.go │ │ │ │ └── lookercommon.go │ │ │ ├── lookerconversationalanalytics │ │ │ │ ├── lookerconversationalanalytics_test.go │ │ │ │ └── lookerconversationalanalytics.go │ │ │ ├── lookergetdashboards │ │ │ │ ├── lookergetdashboards_test.go │ │ │ │ └── lookergetdashboards.go │ │ │ ├── lookergetdimensions │ │ │ │ ├── lookergetdimensions_test.go │ │ │ │ └── lookergetdimensions.go │ │ │ ├── lookergetexplores │ │ │ │ ├── lookergetexplores_test.go │ │ │ │ └── lookergetexplores.go │ │ │ ├── lookergetfilters │ │ │ │ ├── lookergetfilters_test.go │ │ │ │ └── lookergetfilters.go │ │ │ ├── lookergetlooks │ │ │ │ ├── lookergetlooks_test.go │ │ │ │ └── lookergetlooks.go │ │ │ ├── lookergetmeasures │ │ │ │ ├── lookergetmeasures_test.go │ │ │ │ └── lookergetmeasures.go │ │ │ ├── lookergetmodels │ │ │ │ ├── lookergetmodels_test.go │ │ │ │ └── lookergetmodels.go │ │ │ ├── lookergetparameters │ │ │ │ ├── lookergetparameters_test.go │ │ │ │ └── lookergetparameters.go │ │ │ ├── lookerhealthanalyze │ │ │ │ ├── lookerhealthanalyze_test.go │ │ │ │ └── lookerhealthanalyze.go │ │ │ ├── lookerhealthpulse │ │ │ │ ├── lookerhealthpulse_test.go │ │ │ │ └── lookerhealthpulse.go │ │ │ ├── lookerhealthvacuum │ │ │ │ ├── lookerhealthvacuum_test.go │ │ │ │ └── lookerhealthvacuum.go │ │ │ ├── lookermakedashboard │ │ │ │ ├── lookermakedashboard_test.go │ │ │ │ └── lookermakedashboard.go │ │ │ ├── lookermakelook │ │ │ │ ├── lookermakelook_test.go │ │ │ │ └── lookermakelook.go │ │ │ ├── lookerquery │ │ │ │ ├── lookerquery_test.go │ │ │ │ └── lookerquery.go │ │ │ ├── lookerquerysql │ │ │ │ ├── lookerquerysql_test.go │ │ │ │ └── lookerquerysql.go │ │ │ ├── lookerqueryurl │ │ │ │ ├── lookerqueryurl_test.go │ │ │ │ └── lookerqueryurl.go │ │ │ └── lookerrunlook │ │ │ ├── lookerrunlook_test.go │ │ │ └── lookerrunlook.go │ │ ├── mongodb │ │ │ ├── mongodbaggregate │ │ │ │ ├── mongodbaggregate_test.go │ │ │ │ └── mongodbaggregate.go │ │ │ ├── mongodbdeletemany │ │ │ │ ├── mongodbdeletemany_test.go │ │ │ │ └── mongodbdeletemany.go │ │ │ ├── mongodbdeleteone │ │ │ │ ├── mongodbdeleteone_test.go │ │ │ │ └── mongodbdeleteone.go │ │ │ ├── mongodbfind │ │ │ │ ├── mongodbfind_test.go │ │ │ │ └── mongodbfind.go │ │ │ ├── mongodbfindone │ │ │ │ ├── mongodbfindone_test.go │ │ │ │ └── mongodbfindone.go │ │ │ ├── mongodbinsertmany │ │ │ │ ├── mongodbinsertmany_test.go │ │ │ │ └── mongodbinsertmany.go │ │ │ ├── mongodbinsertone │ │ │ │ ├── mongodbinsertone_test.go │ │ │ │ └── mongodbinsertone.go │ │ │ ├── mongodbupdatemany │ │ │ │ ├── mongodbupdatemany_test.go │ │ │ │ └── mongodbupdatemany.go │ │ │ └── mongodbupdateone │ │ │ ├── mongodbupdateone_test.go │ │ │ └── mongodbupdateone.go │ │ ├── mssql │ │ │ ├── mssqlexecutesql │ │ │ │ ├── mssqlexecutesql_test.go │ │ │ │ └── mssqlexecutesql.go │ │ │ ├── mssqllisttables │ │ │ │ ├── mssqllisttables_test.go │ │ │ │ └── mssqllisttables.go │ │ │ └── mssqlsql │ │ │ ├── mssqlsql_test.go │ │ │ └── mssqlsql.go │ │ ├── mysql │ │ │ ├── mysqlcommon │ │ │ │ └── mysqlcommon.go │ │ │ ├── mysqlexecutesql │ │ │ │ ├── mysqlexecutesql_test.go │ │ │ │ └── mysqlexecutesql.go │ │ │ ├── mysqllistactivequeries │ │ │ │ ├── mysqllistactivequeries_test.go │ │ │ │ └── mysqllistactivequeries.go │ │ │ ├── mysqllisttablefragmentation │ │ │ │ ├── mysqllisttablefragmentation_test.go │ │ │ │ └── mysqllisttablefragmentation.go │ │ │ ├── mysqllisttables │ │ │ │ ├── mysqllisttables_test.go │ │ │ │ └── mysqllisttables.go │ │ │ ├── mysqllisttablesmissinguniqueindexes │ │ │ │ ├── mysqllisttablesmissinguniqueindexes_test.go │ │ │ │ └── mysqllisttablesmissinguniqueindexes.go │ │ │ └── mysqlsql │ │ │ ├── mysqlsql_test.go │ │ │ └── mysqlsql.go │ │ ├── neo4j │ │ │ ├── neo4jcypher │ │ │ │ ├── neo4jcypher_test.go │ │ │ │ └── neo4jcypher.go │ │ │ ├── neo4jexecutecypher │ │ │ │ ├── classifier │ │ │ │ │ ├── classifier_test.go │ │ │ │ │ └── classifier.go │ │ │ │ ├── neo4jexecutecypher_test.go │ │ │ │ └── neo4jexecutecypher.go │ │ │ └── neo4jschema │ │ │ ├── cache │ │ │ │ ├── cache_test.go │ │ │ │ └── cache.go │ │ │ ├── helpers │ │ │ │ ├── helpers_test.go │ │ │ │ └── helpers.go │ │ │ ├── neo4jschema_test.go │ │ │ ├── neo4jschema.go │ │ │ └── types │ │ │ └── types.go │ │ ├── oceanbase │ │ │ ├── oceanbaseexecutesql │ │ │ │ ├── oceanbaseexecutesql_test.go │ │ │ │ └── oceanbaseexecutesql.go │ │ │ └── oceanbasesql │ │ │ ├── oceanbasesql_test.go │ │ │ └── oceanbasesql.go │ │ ├── oracle │ │ │ ├── oracleexecutesql │ │ │ │ └── oracleexecutesql.go │ │ │ └── oraclesql │ │ │ └── oraclesql.go │ │ ├── parameters_test.go │ │ ├── parameters.go │ │ ├── postgres │ │ │ ├── postgresexecutesql │ │ │ │ ├── postgresexecutesql_test.go │ │ │ │ └── postgresexecutesql.go │ │ │ ├── postgreslistactivequeries │ │ │ │ ├── postgreslistactivequeries_test.go │ │ │ │ └── postgreslistactivequeries.go │ │ │ ├── postgreslistavailableextensions │ │ │ │ ├── postgreslistavailableextensions_test.go │ │ │ │ └── postgreslistavailableextensions.go │ │ │ ├── postgreslistinstalledextensions │ │ │ │ ├── postgreslistinstalledextensions_test.go │ │ │ │ └── postgreslistinstalledextensions.go │ │ │ ├── postgreslisttables │ │ │ │ ├── postgreslisttables_test.go │ │ │ │ └── postgreslisttables.go │ │ │ └── postgressql │ │ │ ├── postgressql_test.go │ │ │ └── postgressql.go │ │ ├── redis │ │ │ ├── redis_test.go │ │ │ └── redis.go │ │ ├── spanner │ │ │ ├── spannerexecutesql │ │ │ │ ├── spannerexecutesql_test.go │ │ │ │ └── spannerexecutesql.go │ │ │ ├── spannerlisttables │ │ │ │ ├── spannerlisttables_test.go │ │ │ │ └── spannerlisttables.go │ │ │ └── spannersql │ │ │ ├── spanner_test.go │ │ │ └── spannersql.go │ │ ├── sqlite │ │ │ ├── sqliteexecutesql │ │ │ │ ├── sqliteexecutesql_test.go │ │ │ │ └── sqliteexecutesql.go │ │ │ └── sqlitesql │ │ │ ├── sqlitesql_test.go │ │ │ └── sqlitesql.go │ │ ├── tidb │ │ │ ├── tidbexecutesql │ │ │ │ ├── tidbexecutesql_test.go │ │ │ │ └── tidbexecutesql.go │ │ │ └── tidbsql │ │ │ ├── tidbsql_test.go │ │ │ └── tidbsql.go │ │ ├── tools_test.go │ │ ├── tools.go │ │ ├── toolsets.go │ │ ├── trino │ │ │ ├── trinoexecutesql │ │ │ │ ├── trinoexecutesql_test.go │ │ │ │ └── trinoexecutesql.go │ │ │ └── trinosql │ │ │ ├── trinosql_test.go │ │ │ └── trinosql.go │ │ ├── utility │ │ │ └── wait │ │ │ ├── wait_test.go │ │ │ └── wait.go │ │ ├── valkey │ │ │ ├── valkey_test.go │ │ │ └── valkey.go │ │ └── yugabytedbsql │ │ ├── yugabytedbsql_test.go │ │ └── yugabytedbsql.go │ └── util │ └── util.go ├── LICENSE ├── logo.png ├── main.go ├── README.md └── tests ├── alloydb │ ├── alloydb_integration_test.go │ └── alloydb_wait_for_operation_test.go ├── alloydbainl │ └── alloydb_ai_nl_integration_test.go ├── alloydbpg │ └── alloydb_pg_integration_test.go ├── auth.go ├── bigquery │ └── bigquery_integration_test.go ├── bigtable │ └── bigtable_integration_test.go ├── cassandra │ └── cassandra_integration_test.go ├── clickhouse │ └── clickhouse_integration_test.go ├── cloudmonitoring │ └── cloud_monitoring_integration_test.go ├── cloudsql │ ├── cloud_sql_create_database_test.go │ ├── cloud_sql_create_users_test.go │ ├── cloud_sql_get_instances_test.go │ ├── cloud_sql_list_databases_test.go │ ├── cloudsql_list_instances_test.go │ └── cloudsql_wait_for_operation_test.go ├── cloudsqlmssql │ ├── cloud_sql_mssql_create_instance_integration_test.go │ └── cloud_sql_mssql_integration_test.go ├── cloudsqlmysql │ ├── cloud_sql_mysql_create_instance_integration_test.go │ └── cloud_sql_mysql_integration_test.go ├── cloudsqlpg │ ├── cloud_sql_pg_create_instances_test.go │ └── cloud_sql_pg_integration_test.go ├── common.go ├── couchbase │ └── couchbase_integration_test.go ├── dataform │ └── dataform_integration_test.go ├── dataplex │ └── dataplex_integration_test.go ├── dgraph │ └── dgraph_integration_test.go ├── firebird │ └── firebird_integration_test.go ├── firestore │ └── firestore_integration_test.go ├── http │ └── http_integration_test.go ├── looker │ └── looker_integration_test.go ├── mongodb │ └── mongodb_integration_test.go ├── mssql │ └── mssql_integration_test.go ├── mysql │ └── mysql_integration_test.go ├── neo4j │ └── neo4j_integration_test.go ├── oceanbase │ └── oceanbase_integration_test.go ├── option.go ├── oracle │ └── oracle_integration_test.go ├── postgres │ └── postgres_integration_test.go ├── redis │ └── redis_test.go ├── server.go ├── source.go ├── spanner │ └── spanner_integration_test.go ├── sqlite │ └── sqlite_integration_test.go ├── tidb │ └── tidb_integration_test.go ├── tool.go ├── trino │ └── trino_integration_test.go ├── utility │ └── wait_integration_test.go ├── valkey │ └── valkey_test.go └── yugabytedb └── yugabytedb_integration_test.go ``` # Files -------------------------------------------------------------------------------- /internal/tools/bigquery/bigquerycommon/table_name_parser_test.go: -------------------------------------------------------------------------------- ```go 1 | // Copyright 2025 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package bigquerycommon_test 16 | 17 | import ( 18 | "sort" 19 | "strings" 20 | "testing" 21 | 22 | "github.com/google/go-cmp/cmp" 23 | "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon" 24 | ) 25 | 26 | func TestTableParser(t *testing.T) { 27 | testCases := []struct { 28 | name string 29 | sql string 30 | defaultProjectID string 31 | want []string 32 | wantErr bool 33 | wantErrMsg string 34 | }{ 35 | { 36 | name: "single fully qualified table", 37 | sql: "SELECT * FROM `my-project.my_dataset.my_table`", 38 | defaultProjectID: "default-proj", 39 | want: []string{"my-project.my_dataset.my_table"}, 40 | wantErr: false, 41 | }, 42 | { 43 | name: "multiple statements with same table", 44 | sql: "select * from proj1.data1.tbl1 limit 1; select A.b from proj1.data1.tbl1 as A limit 1;", 45 | defaultProjectID: "default-proj", 46 | want: []string{"proj1.data1.tbl1"}, 47 | wantErr: false, 48 | }, 49 | { 50 | name: "multiple fully qualified tables", 51 | sql: "SELECT * FROM `proj1.data1`.`tbl1` JOIN proj2.`data2.tbl2` ON id", 52 | defaultProjectID: "default-proj", 53 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"}, 54 | wantErr: false, 55 | }, 56 | { 57 | name: "duplicate tables", 58 | sql: "SELECT * FROM `proj1.data1.tbl1` JOIN proj1.data1.tbl1 ON id", 59 | defaultProjectID: "default-proj", 60 | want: []string{"proj1.data1.tbl1"}, 61 | wantErr: false, 62 | }, 63 | { 64 | name: "partial table with default project", 65 | sql: "SELECT * FROM `my_dataset`.my_table", 66 | defaultProjectID: "default-proj", 67 | want: []string{"default-proj.my_dataset.my_table"}, 68 | wantErr: false, 69 | }, 70 | { 71 | name: "partial table without default project", 72 | sql: "SELECT * FROM `my_dataset.my_table`", 73 | defaultProjectID: "", 74 | want: nil, 75 | wantErr: true, 76 | }, 77 | { 78 | name: "mixed fully qualified and partial tables", 79 | sql: "SELECT t1.*, t2.* FROM `proj1.data1.tbl1` AS t1 JOIN `data2.tbl2` AS t2 ON t1.id = t2.id", 80 | defaultProjectID: "default-proj", 81 | want: []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"}, 82 | wantErr: false, 83 | }, 84 | { 85 | name: "no tables", 86 | sql: "SELECT 1+1", 87 | defaultProjectID: "default-proj", 88 | want: []string{}, 89 | wantErr: false, 90 | }, 91 | { 92 | name: "ignore single part identifiers (like CTEs)", 93 | sql: "WITH my_cte AS (SELECT 1) SELECT * FROM `my_cte`", 94 | defaultProjectID: "default-proj", 95 | want: []string{}, 96 | wantErr: false, 97 | }, 98 | { 99 | name: "complex CTE", 100 | sql: "WITH cte1 AS (SELECT * FROM `real.table.one`), cte2 AS (SELECT * FROM cte1) SELECT * FROM cte2 JOIN `real.table.two` ON true", 101 | defaultProjectID: "default-proj", 102 | want: []string{"real.table.one", "real.table.two"}, 103 | wantErr: false, 104 | }, 105 | { 106 | name: "nested subquery should be parsed", 107 | sql: "SELECT * FROM (SELECT a FROM (SELECT A.b FROM `real.table.nested` AS A))", 108 | defaultProjectID: "default-proj", 109 | want: []string{"real.table.nested"}, 110 | wantErr: false, 111 | }, 112 | { 113 | name: "from clause with unnest", 114 | sql: "SELECT event.name FROM `my-project.my_dataset.my_table` AS A, UNNEST(A.events) AS event", 115 | defaultProjectID: "default-proj", 116 | want: []string{"my-project.my_dataset.my_table"}, 117 | wantErr: false, 118 | }, 119 | { 120 | name: "ignore more than 3 parts", 121 | sql: "SELECT * FROM `proj.data.tbl.col`", 122 | defaultProjectID: "default-proj", 123 | want: []string{}, 124 | wantErr: false, 125 | }, 126 | { 127 | name: "complex query", 128 | sql: "SELECT name FROM (SELECT name FROM `proj1.data1.tbl1`) UNION ALL SELECT name FROM `data2.tbl2`", 129 | defaultProjectID: "default-proj", 130 | want: []string{"proj1.data1.tbl1", "default-proj.data2.tbl2"}, 131 | wantErr: false, 132 | }, 133 | { 134 | name: "empty sql", 135 | sql: "", 136 | defaultProjectID: "default-proj", 137 | want: []string{}, 138 | wantErr: false, 139 | }, 140 | { 141 | name: "with comments", 142 | sql: "SELECT * FROM `proj1.data1.tbl1`; -- comment `fake.table.one` \n SELECT * FROM `proj2.data2.tbl2`; # comment `fake.table.two`", 143 | defaultProjectID: "default-proj", 144 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"}, 145 | wantErr: false, 146 | }, 147 | { 148 | name: "multi-statement with semicolon", 149 | sql: "SELECT * FROM `proj1.data1.tbl1`; SELECT * FROM `proj2.data2.tbl2`", 150 | defaultProjectID: "default-proj", 151 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"}, 152 | wantErr: false, 153 | }, 154 | { 155 | name: "simple execute immediate", 156 | sql: "EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'", 157 | defaultProjectID: "default-proj", 158 | want: nil, 159 | wantErr: true, 160 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 161 | }, 162 | { 163 | name: "execute immediate with multiple spaces", 164 | sql: "EXECUTE IMMEDIATE 'SELECT 1'", 165 | defaultProjectID: "default-proj", 166 | want: nil, 167 | wantErr: true, 168 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 169 | }, 170 | { 171 | name: "execute immediate with newline", 172 | sql: "EXECUTE\nIMMEDIATE 'SELECT 1'", 173 | defaultProjectID: "default-proj", 174 | want: nil, 175 | wantErr: true, 176 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 177 | }, 178 | { 179 | name: "execute immediate with comment", 180 | sql: "EXECUTE -- some comment\n IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'", 181 | defaultProjectID: "default-proj", 182 | want: nil, 183 | wantErr: true, 184 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 185 | }, 186 | { 187 | name: "nested execute immediate", 188 | sql: "EXECUTE IMMEDIATE \"EXECUTE IMMEDIATE '''SELECT * FROM `nested.exec.tbl`'''\"", 189 | defaultProjectID: "default-proj", 190 | want: nil, 191 | wantErr: true, 192 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 193 | }, 194 | { 195 | name: "begin execute immediate", 196 | sql: "BEGIN EXECUTE IMMEDIATE 'SELECT * FROM `exec.proj.tbl`'; END;", 197 | defaultProjectID: "default-proj", 198 | want: nil, 199 | wantErr: true, 200 | wantErrMsg: "EXECUTE IMMEDIATE is not allowed when dataset restrictions are in place", 201 | }, 202 | { 203 | name: "table inside string literal should be ignored", 204 | sql: "SELECT * FROM `real.table.one` WHERE name = 'select * from `fake.table.two`'", 205 | defaultProjectID: "default-proj", 206 | want: []string{"real.table.one"}, 207 | wantErr: false, 208 | }, 209 | { 210 | name: "string with escaped single quote", 211 | sql: "SELECT 'this is a string with an escaped quote \\' and a fake table `fake.table.one`' FROM `real.table.two`", 212 | defaultProjectID: "default-proj", 213 | want: []string{"real.table.two"}, 214 | wantErr: false, 215 | }, 216 | { 217 | name: "string with escaped double quote", 218 | sql: `SELECT "this is a string with an escaped quote \" and a fake table ` + "`fake.table.one`" + `" FROM ` + "`real.table.two`", 219 | defaultProjectID: "default-proj", 220 | want: []string{"real.table.two"}, 221 | wantErr: false, 222 | }, 223 | { 224 | name: "multi-line comment", 225 | sql: "/* `fake.table.1` */ SELECT * FROM `real.table.2`", 226 | defaultProjectID: "default-proj", 227 | want: []string{"real.table.2"}, 228 | wantErr: false, 229 | }, 230 | { 231 | name: "raw string with backslash should be ignored", 232 | sql: "SELECT * FROM `real.table.one` WHERE name = r'a raw string with a \\ and a fake table `fake.table.two`'", 233 | defaultProjectID: "default-proj", 234 | want: []string{"real.table.one"}, 235 | wantErr: false, 236 | }, 237 | { 238 | name: "capital R raw string with quotes inside should be ignored", 239 | sql: `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`, 240 | defaultProjectID: "default-proj", 241 | want: []string{"real.table.one"}, 242 | wantErr: false, 243 | }, 244 | { 245 | name: "triple quoted raw string should be ignored", 246 | sql: "SELECT * FROM `real.table.one` WHERE name = r'''a raw string with a ' and a \" and a \\ and a fake table `fake.table.two`'''", 247 | defaultProjectID: "default-proj", 248 | want: []string{"real.table.one"}, 249 | wantErr: false, 250 | }, 251 | { 252 | name: "triple quoted capital R raw string should be ignored", 253 | sql: `SELECT * FROM ` + "`real.table.one`" + ` WHERE name = R"""a raw string with a ' and a " and a \ and a fake table ` + "`fake.table.two`" + `"""`, 254 | defaultProjectID: "default-proj", 255 | want: []string{"real.table.one"}, 256 | wantErr: false, 257 | }, 258 | { 259 | name: "unquoted fully qualified table", 260 | sql: "SELECT * FROM my-project.my_dataset.my_table", 261 | defaultProjectID: "default-proj", 262 | want: []string{"my-project.my_dataset.my_table"}, 263 | wantErr: false, 264 | }, 265 | { 266 | name: "unquoted partial table with default project", 267 | sql: "SELECT * FROM my_dataset.my_table", 268 | defaultProjectID: "default-proj", 269 | want: []string{"default-proj.my_dataset.my_table"}, 270 | wantErr: false, 271 | }, 272 | { 273 | name: "unquoted partial table without default project", 274 | sql: "SELECT * FROM my_dataset.my_table", 275 | defaultProjectID: "", 276 | want: nil, 277 | wantErr: true, 278 | }, 279 | { 280 | name: "mixed quoting style 1", 281 | sql: "SELECT * FROM `my-project`.my_dataset.my_table", 282 | defaultProjectID: "default-proj", 283 | want: []string{"my-project.my_dataset.my_table"}, 284 | wantErr: false, 285 | }, 286 | { 287 | name: "mixed quoting style 2", 288 | sql: "SELECT * FROM `my-project`.`my_dataset`.my_table", 289 | defaultProjectID: "default-proj", 290 | want: []string{"my-project.my_dataset.my_table"}, 291 | wantErr: false, 292 | }, 293 | { 294 | name: "mixed quoting style 3", 295 | sql: "SELECT * FROM `my-project`.`my_dataset`.`my_table`", 296 | defaultProjectID: "default-proj", 297 | want: []string{"my-project.my_dataset.my_table"}, 298 | wantErr: false, 299 | }, 300 | { 301 | name: "mixed quoted and unquoted tables", 302 | sql: "SELECT * FROM `proj1.data1.tbl1` JOIN proj2.data2.tbl2 ON id", 303 | defaultProjectID: "default-proj", 304 | want: []string{"proj1.data1.tbl1", "proj2.data2.tbl2"}, 305 | wantErr: false, 306 | }, 307 | { 308 | name: "create table statement", 309 | sql: "CREATE TABLE `my-project.my_dataset.my_table` (x INT64)", 310 | defaultProjectID: "default-proj", 311 | want: []string{"my-project.my_dataset.my_table"}, 312 | wantErr: false, 313 | }, 314 | { 315 | name: "insert into statement", 316 | sql: "INSERT INTO `my-project.my_dataset.my_table` (x) VALUES (1)", 317 | defaultProjectID: "default-proj", 318 | want: []string{"my-project.my_dataset.my_table"}, 319 | wantErr: false, 320 | }, 321 | { 322 | name: "update statement", 323 | sql: "UPDATE `my-project.my_dataset.my_table` SET x = 2 WHERE true", 324 | defaultProjectID: "default-proj", 325 | want: []string{"my-project.my_dataset.my_table"}, 326 | wantErr: false, 327 | }, 328 | { 329 | name: "delete from statement", 330 | sql: "DELETE FROM `my-project.my_dataset.my_table` WHERE true", 331 | defaultProjectID: "default-proj", 332 | want: []string{"my-project.my_dataset.my_table"}, 333 | wantErr: false, 334 | }, 335 | { 336 | name: "merge into statement", 337 | sql: "MERGE `proj.data.target` T USING `proj.data.source` S ON T.id = S.id WHEN NOT MATCHED THEN INSERT ROW", 338 | defaultProjectID: "default-proj", 339 | want: []string{"proj.data.source", "proj.data.target"}, 340 | wantErr: false, 341 | }, 342 | { 343 | name: "create schema statement", 344 | sql: "CREATE SCHEMA `my-project.my_dataset`", 345 | defaultProjectID: "default-proj", 346 | want: nil, 347 | wantErr: true, 348 | wantErrMsg: "dataset-level operations like 'CREATE SCHEMA' are not allowed", 349 | }, 350 | { 351 | name: "create dataset statement", 352 | sql: "CREATE DATASET `my-project.my_dataset`", 353 | defaultProjectID: "default-proj", 354 | want: nil, 355 | wantErr: true, 356 | wantErrMsg: "dataset-level operations like 'CREATE DATASET' are not allowed", 357 | }, 358 | { 359 | name: "drop schema statement", 360 | sql: "DROP SCHEMA `my-project.my_dataset`", 361 | defaultProjectID: "default-proj", 362 | want: nil, 363 | wantErr: true, 364 | wantErrMsg: "dataset-level operations like 'DROP SCHEMA' are not allowed", 365 | }, 366 | { 367 | name: "drop dataset statement", 368 | sql: "DROP DATASET `my-project.my_dataset`", 369 | defaultProjectID: "default-proj", 370 | want: nil, 371 | wantErr: true, 372 | wantErrMsg: "dataset-level operations like 'DROP DATASET' are not allowed", 373 | }, 374 | { 375 | name: "alter schema statement", 376 | sql: "ALTER SCHEMA my_dataset SET OPTIONS(description='new description')", 377 | defaultProjectID: "default-proj", 378 | want: nil, 379 | wantErr: true, 380 | wantErrMsg: "dataset-level operations like 'ALTER SCHEMA' are not allowed", 381 | }, 382 | { 383 | name: "alter dataset statement", 384 | sql: "ALTER DATASET my_dataset SET OPTIONS(description='new description')", 385 | defaultProjectID: "default-proj", 386 | want: nil, 387 | wantErr: true, 388 | wantErrMsg: "dataset-level operations like 'ALTER DATASET' are not allowed", 389 | }, 390 | { 391 | name: "begin...end block", 392 | sql: "BEGIN CREATE TABLE `proj.data.tbl1` (x INT64); INSERT `proj.data.tbl2` (y) VALUES (1); END;", 393 | defaultProjectID: "default-proj", 394 | want: []string{"proj.data.tbl1", "proj.data.tbl2"}, 395 | wantErr: false, 396 | }, 397 | { 398 | name: "complex begin...end block with comments and different quoting", 399 | sql: ` 400 | BEGIN 401 | -- Create a new table 402 | CREATE TABLE proj.data.tbl1 (x INT64); 403 | /* Insert some data from another table */ 404 | INSERT INTO ` + "`proj.data.tbl2`" + ` (y) SELECT y FROM proj.data.source; 405 | END;`, 406 | defaultProjectID: "default-proj", 407 | want: []string{"proj.data.source", "proj.data.tbl1", "proj.data.tbl2"}, 408 | wantErr: false, 409 | }, 410 | { 411 | name: "call fully qualified procedure", 412 | sql: "CALL my-project.my_dataset.my_procedure()", 413 | defaultProjectID: "default-proj", 414 | want: nil, 415 | wantErr: true, 416 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place", 417 | }, 418 | { 419 | name: "call partially qualified procedure", 420 | sql: "CALL my_dataset.my_procedure()", 421 | defaultProjectID: "default-proj", 422 | want: nil, 423 | wantErr: true, 424 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place", 425 | }, 426 | { 427 | name: "call procedure in begin...end block", 428 | sql: "BEGIN CALL proj.data.proc1(); SELECT * FROM proj.data.tbl1; END;", 429 | defaultProjectID: "default-proj", 430 | want: nil, 431 | wantErr: true, 432 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place", 433 | }, 434 | { 435 | name: "call procedure with newline", 436 | sql: "CALL\nmy_dataset.my_procedure()", 437 | defaultProjectID: "default-proj", 438 | want: nil, 439 | wantErr: true, 440 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place", 441 | }, 442 | { 443 | name: "call procedure without default project should fail", 444 | sql: "CALL my_dataset.my_procedure()", 445 | defaultProjectID: "", 446 | want: nil, 447 | wantErr: true, 448 | wantErrMsg: "CALL is not allowed when dataset restrictions are in place", 449 | }, 450 | { 451 | name: "create procedure statement", 452 | sql: "CREATE PROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;", 453 | defaultProjectID: "default-proj", 454 | want: nil, 455 | wantErr: true, 456 | wantErrMsg: "unanalyzable statements like 'CREATE PROCEDURE' are not allowed", 457 | }, 458 | { 459 | name: "create or replace procedure statement", 460 | sql: "CREATE\n OR \nREPLACE \nPROCEDURE my_dataset.my_procedure() BEGIN SELECT 1; END;", 461 | defaultProjectID: "default-proj", 462 | want: nil, 463 | wantErr: true, 464 | wantErrMsg: "unanalyzable statements like 'CREATE OR REPLACE PROCEDURE' are not allowed", 465 | }, 466 | { 467 | name: "create function statement", 468 | sql: "CREATE FUNCTION my_dataset.my_function() RETURNS INT64 AS (1);", 469 | defaultProjectID: "default-proj", 470 | want: nil, 471 | wantErr: true, 472 | wantErrMsg: "unanalyzable statements like 'CREATE FUNCTION' are not allowed", 473 | }, 474 | } 475 | 476 | for _, tc := range testCases { 477 | t.Run(tc.name, func(t *testing.T) { 478 | got, err := bigquerycommon.TableParser(tc.sql, tc.defaultProjectID) 479 | if (err != nil) != tc.wantErr { 480 | t.Errorf("TableParser() error = %v, wantErr %v", err, tc.wantErr) 481 | return 482 | } 483 | if tc.wantErr && tc.wantErrMsg != "" { 484 | if err == nil || !strings.Contains(err.Error(), tc.wantErrMsg) { 485 | t.Errorf("TableParser() error = %v, want err containing %q", err, tc.wantErrMsg) 486 | } 487 | } 488 | // Sort slices to ensure comparison is order-independent. 489 | sort.Strings(got) 490 | sort.Strings(tc.want) 491 | if diff := cmp.Diff(tc.want, got); diff != "" { 492 | t.Errorf("TableParser() mismatch (-want +got):\n%s", diff) 493 | } 494 | }) 495 | } 496 | } 497 | ``` -------------------------------------------------------------------------------- /docs/en/resources/tools/looker/looker-query-url.md: -------------------------------------------------------------------------------- ```markdown 1 | --- 2 | title: "looker-query-url" 3 | type: docs 4 | weight: 1 5 | description: > 6 | "looker-query-url" generates a url link to a Looker explore. 7 | aliases: 8 | - /resources/tools/looker-query-url 9 | --- 10 | 11 | ## About 12 | 13 | The `looker-query-url` generates a url link to an explore in 14 | Looker so the query can be investigated further. 15 | 16 | It's compatible with the following sources: 17 | 18 | - [looker](../../sources/looker.md) 19 | 20 | `looker-query-url` takes nine parameters: 21 | 22 | 1. the `model` 23 | 2. the `explore` 24 | 3. the `fields` list 25 | 4. an optional set of `filters` 26 | 5. an optional set of `pivots` 27 | 6. an optional set of `sorts` 28 | 7. an optional `limit` 29 | 8. an optional `tz` 30 | 9. an optional `vis_config` 31 | 32 | ## Example 33 | 34 | ```yaml 35 | tools: 36 | query_url: 37 | kind: looker-query-url 38 | source: looker-source 39 | description: | 40 | Query URL Tool 41 | 42 | This tool is used to generate the URL of a query in Looker. 43 | The user can then explore the query further inside Looker. 44 | The tool also returns the query_id and slug. The parameters 45 | are the same as the query tool with an additional vis_config 46 | parameter. 47 | 48 | The vis_config is optional. If provided, it will be used to 49 | control the default visualization for the query. Here are 50 | some notes on making visualizations. 51 | 52 | ### Cartesian Charts (Area, Bar, Column, Line, Scatter) 53 | 54 | These chart types share a large number of configuration options. 55 | 56 | **General** 57 | * `type`: The type of visualization (`looker_area`, `looker_bar`, `looker_column`, `looker_line`, `looker_scatter`). 58 | * `series_types`: Override the chart type for individual series. 59 | * `show_view_names`: Display view names in labels and tooltips (`true`/`false`). 60 | * `series_labels`: Provide custom names for series. 61 | 62 | **Styling & Colors** 63 | * `colors`: An array of color values to be used for the chart series. 64 | * `series_colors`: A mapping of series names to specific color values. 65 | * `color_application`: Advanced controls for color palette application (collection, palette, reverse, etc.). 66 | * `font_size`: Font size for labels (e.g., '12px'). 67 | 68 | **Legend** 69 | * `hide_legend`: Show or hide the chart legend (`true`/`false`). 70 | * `legend_position`: Placement of the legend (`'center'`, `'left'`, `'right'`). 71 | 72 | **Axes** 73 | * `swap_axes`: Swap the X and Y axes (`true`/`false`). 74 | * `x_axis_scale`: Scale of the x-axis (`'auto'`, `'ordinal'`, `'linear'`, `'time'`). 75 | * `x_axis_reversed`, `y_axis_reversed`: Reverse the direction of an axis (`true`/`false`). 76 | * `x_axis_gridlines`, `y_axis_gridlines`: Display gridlines for an axis (`true`/`false`). 77 | * `show_x_axis_label`, `show_y_axis_label`: Show or hide the axis title (`true`/`false`). 78 | * `show_x_axis_ticks`, `show_y_axis_ticks`: Show or hide axis tick marks (`true`/`false`). 79 | * `x_axis_label`, `y_axis_label`: Set a custom title for an axis. 80 | * `x_axis_datetime_label`: A format string for datetime labels on the x-axis (e.g., `'%Y-%m'`). 81 | * `x_padding_left`, `x_padding_right`: Adjust padding on the ends of the x-axis. 82 | * `x_axis_label_rotation`, `x_axis_label_rotation_bar`: Set rotation for x-axis labels. 83 | * `x_axis_zoom`, `y_axis_zoom`: Enable zooming on an axis (`true`/`false`). 84 | * `y_axes`: An array of configuration objects for multiple y-axes. 85 | 86 | **Data & Series** 87 | * `stacking`: How to stack series (`''` for none, `'normal'`, `'percent'`). 88 | * `ordering`: Order of series in a stack (`'none'`, etc.). 89 | * `limit_displayed_rows`: Enable or disable limiting the number of rows displayed (`true`/`false`). 90 | * `limit_displayed_rows_values`: Configuration for the row limit (e.g., `{ "first_last": "first", "show_hide": "show", "num_rows": 10 }`). 91 | * `discontinuous_nulls`: How to render null values in line charts (`true`/`false`). 92 | * `point_style`: Style for points on line and area charts (`'none'`, `'circle'`, `'circle_outline'`). 93 | * `series_point_styles`: Override point styles for individual series. 94 | * `interpolation`: Line interpolation style (`'linear'`, `'monotone'`, `'step'`, etc.). 95 | * `show_value_labels`: Display values on data points (`true`/`false`). 96 | * `label_value_format`: A format string for value labels. 97 | * `show_totals_labels`: Display total labels on stacked charts (`true`/`false`). 98 | * `totals_color`: Color for total labels. 99 | * `show_silhouette`: Display a "silhouette" of hidden series in stacked charts (`true`/`false`). 100 | * `hidden_series`: An array of series names to hide from the visualization. 101 | 102 | **Scatter/Bubble Specific** 103 | * `size_by_field`: The field used to determine the size of bubbles. 104 | * `color_by_field`: The field used to determine the color of bubbles. 105 | * `plot_size_by_field`: Whether to display the size-by field in the legend. 106 | * `cluster_points`: Group nearby points into clusters (`true`/`false`). 107 | * `quadrants_enabled`: Display quadrants on the chart (`true`/`false`). 108 | * `quadrant_properties`: Configuration for quadrant labels and colors. 109 | * `custom_quadrant_value_x`, `custom_quadrant_value_y`: Set quadrant boundaries as a percentage. 110 | * `custom_quadrant_point_x`, `custom_quadrant_point_y`: Set quadrant boundaries to a specific value. 111 | 112 | **Miscellaneous** 113 | * `reference_lines`: Configuration for displaying reference lines. 114 | * `trend_lines`: Configuration for displaying trend lines. 115 | * `trellis`: Configuration for creating trellis (small multiple) charts. 116 | * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering interactions. 117 | 118 | ### Boxplot 119 | 120 | * Inherits most of the Cartesian chart options. 121 | * `type`: Must be `looker_boxplot`. 122 | 123 | ### Funnel 124 | 125 | * `type`: Must be `looker_funnel`. 126 | * `orientation`: How data is read (`'automatic'`, `'dataInRows'`, `'dataInColumns'`). 127 | * `percentType`: How percentages are calculated (`'percentOfMaxValue'`, `'percentOfPriorRow'`). 128 | * `labelPosition`, `valuePosition`, `percentPosition`: Placement of labels (`'left'`, `'right'`, `'inline'`, `'hidden'`). 129 | * `labelColor`, `labelColorEnabled`: Set a custom color for labels. 130 | * `labelOverlap`: Allow labels to overlap (`true`/`false`). 131 | * `barColors`: An array of colors for the funnel steps. 132 | * `color_application`: Advanced color palette controls. 133 | * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. 134 | 135 | ### Pie / Donut 136 | 137 | * `type`: Must be `looker_pie`. 138 | * `value_labels`: Where to display values (`'legend'`, `'labels'`). 139 | * `label_type`: The format of data labels (`'labPer'`, `'labVal'`, `'lab'`, `'val'`, `'per'`). 140 | * `start_angle`, `end_angle`: The start and end angles of the pie chart. 141 | * `inner_radius`: The inner radius, used to create a donut chart. 142 | * `series_colors`, `series_labels`: Override colors and labels for specific slices. 143 | * `color_application`: Advanced color palette controls. 144 | * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. 145 | * `advanced_vis_config`: A string containing JSON for advanced Highcharts configuration. 146 | 147 | ### Waterfall 148 | 149 | * Inherits most of the Cartesian chart options. 150 | * `type`: Must be `looker_waterfall`. 151 | * `up_color`: Color for positive (increasing) values. 152 | * `down_color`: Color for negative (decreasing) values. 153 | * `total_color`: Color for the total bar. 154 | 155 | ### Word Cloud 156 | 157 | * `type`: Must be `looker_wordcloud`. 158 | * `rotation`: Enable random word rotation (`true`/`false`). 159 | * `colors`: An array of colors for the words. 160 | * `color_application`: Advanced color palette controls. 161 | * `crossfilterEnabled`, `crossfilters`: Configuration for cross-filtering. 162 | 163 | These are some sample vis_config settings. 164 | 165 | A bar chart - 166 | {{ 167 | "defaults_version": 1, 168 | "label_density": 25, 169 | "legend_position": "center", 170 | "limit_displayed_rows": false, 171 | "ordering": "none", 172 | "plot_size_by_field": false, 173 | "point_style": "none", 174 | "show_null_labels": false, 175 | "show_silhouette": false, 176 | "show_totals_labels": false, 177 | "show_value_labels": false, 178 | "show_view_names": false, 179 | "show_x_axis_label": true, 180 | "show_x_axis_ticks": true, 181 | "show_y_axis_labels": true, 182 | "show_y_axis_ticks": true, 183 | "stacking": "normal", 184 | "totals_color": "#808080", 185 | "trellis": "", 186 | "type": "looker_bar", 187 | "x_axis_gridlines": false, 188 | "x_axis_reversed": false, 189 | "x_axis_scale": "auto", 190 | "x_axis_zoom": true, 191 | "y_axis_combined": true, 192 | "y_axis_gridlines": true, 193 | "y_axis_reversed": false, 194 | "y_axis_scale_mode": "linear", 195 | "y_axis_tick_density": "default", 196 | "y_axis_tick_density_custom": 5, 197 | "y_axis_zoom": true 198 | }} 199 | 200 | A column chart with an option advanced_vis_config - 201 | {{ 202 | "advanced_vis_config": "{ chart: { type: 'pie', spacingBottom: 50, spacingLeft: 50, spacingRight: 50, spacingTop: 50, }, legend: { enabled: false, }, plotOptions: { pie: { dataLabels: { enabled: true, format: '\u003cb\u003e{key}\u003c/b\u003e\u003cspan style=\"font-weight: normal\"\u003e - {percentage:.2f}%\u003c/span\u003e', }, showInLegend: false, }, }, series: [], }", 203 | "colors": [ 204 | "grey" 205 | ], 206 | "defaults_version": 1, 207 | "hidden_fields": [], 208 | "label_density": 25, 209 | "legend_position": "center", 210 | "limit_displayed_rows": false, 211 | "note_display": "below", 212 | "note_state": "collapsed", 213 | "note_text": "Unsold inventory only", 214 | "ordering": "none", 215 | "plot_size_by_field": false, 216 | "point_style": "none", 217 | "series_colors": {}, 218 | "show_null_labels": false, 219 | "show_silhouette": false, 220 | "show_totals_labels": false, 221 | "show_value_labels": true, 222 | "show_view_names": false, 223 | "show_x_axis_label": true, 224 | "show_x_axis_ticks": true, 225 | "show_y_axis_labels": true, 226 | "show_y_axis_ticks": true, 227 | "stacking": "normal", 228 | "totals_color": "#808080", 229 | "trellis": "", 230 | "type": "looker_column", 231 | "x_axis_gridlines": false, 232 | "x_axis_reversed": false, 233 | "x_axis_scale": "auto", 234 | "x_axis_zoom": true, 235 | "y_axes": [], 236 | "y_axis_combined": true, 237 | "y_axis_gridlines": true, 238 | "y_axis_reversed": false, 239 | "y_axis_scale_mode": "linear", 240 | "y_axis_tick_density": "default", 241 | "y_axis_tick_density_custom": 5, 242 | "y_axis_zoom": true 243 | }} 244 | 245 | A line chart - 246 | {{ 247 | "defaults_version": 1, 248 | "hidden_pivots": {}, 249 | "hidden_series": [], 250 | "interpolation": "linear", 251 | "label_density": 25, 252 | "legend_position": "center", 253 | "limit_displayed_rows": false, 254 | "plot_size_by_field": false, 255 | "point_style": "none", 256 | "series_types": {}, 257 | "show_null_points": true, 258 | "show_value_labels": false, 259 | "show_view_names": false, 260 | "show_x_axis_label": true, 261 | "show_x_axis_ticks": true, 262 | "show_y_axis_labels": true, 263 | "show_y_axis_ticks": true, 264 | "stacking": "", 265 | "trellis": "", 266 | "type": "looker_line", 267 | "x_axis_gridlines": false, 268 | "x_axis_reversed": false, 269 | "x_axis_scale": "auto", 270 | "y_axis_combined": true, 271 | "y_axis_gridlines": true, 272 | "y_axis_reversed": false, 273 | "y_axis_scale_mode": "linear", 274 | "y_axis_tick_density": "default", 275 | "y_axis_tick_density_custom": 5 276 | }} 277 | 278 | An area chart - 279 | {{ 280 | "defaults_version": 1, 281 | "interpolation": "linear", 282 | "label_density": 25, 283 | "legend_position": "center", 284 | "limit_displayed_rows": false, 285 | "plot_size_by_field": false, 286 | "point_style": "none", 287 | "series_types": {}, 288 | "show_null_points": true, 289 | "show_silhouette": false, 290 | "show_totals_labels": false, 291 | "show_value_labels": false, 292 | "show_view_names": false, 293 | "show_x_axis_label": true, 294 | "show_x_axis_ticks": true, 295 | "show_y_axis_labels": true, 296 | "show_y_axis_ticks": true, 297 | "stacking": "normal", 298 | "totals_color": "#808080", 299 | "trellis": "", 300 | "type": "looker_area", 301 | "x_axis_gridlines": false, 302 | "x_axis_reversed": false, 303 | "x_axis_scale": "auto", 304 | "x_axis_zoom": true, 305 | "y_axis_combined": true, 306 | "y_axis_gridlines": true, 307 | "y_axis_reversed": false, 308 | "y_axis_scale_mode": "linear", 309 | "y_axis_tick_density": "default", 310 | "y_axis_tick_density_custom": 5, 311 | "y_axis_zoom": true 312 | }} 313 | 314 | A scatter plot - 315 | {{ 316 | "cluster_points": false, 317 | "custom_quadrant_point_x": 5, 318 | "custom_quadrant_point_y": 5, 319 | "custom_value_label_column": "", 320 | "custom_x_column": "", 321 | "custom_y_column": "", 322 | "defaults_version": 1, 323 | "hidden_fields": [], 324 | "hidden_pivots": {}, 325 | "hidden_points_if_no": [], 326 | "hidden_series": [], 327 | "interpolation": "linear", 328 | "label_density": 25, 329 | "legend_position": "center", 330 | "limit_displayed_rows": false, 331 | "limit_displayed_rows_values": { 332 | "first_last": "first", 333 | "num_rows": 0, 334 | "show_hide": "hide" 335 | }, 336 | "plot_size_by_field": false, 337 | "point_style": "circle", 338 | "quadrant_properties": { 339 | "0": { 340 | "color": "", 341 | "label": "Quadrant 1" 342 | }, 343 | "1": { 344 | "color": "", 345 | "label": "Quadrant 2" 346 | }, 347 | "2": { 348 | "color": "", 349 | "label": "Quadrant 3" 350 | }, 351 | "3": { 352 | "color": "", 353 | "label": "Quadrant 4" 354 | } 355 | }, 356 | "quadrants_enabled": false, 357 | "series_labels": {}, 358 | "series_types": {}, 359 | "show_null_points": false, 360 | "show_value_labels": false, 361 | "show_view_names": true, 362 | "show_x_axis_label": true, 363 | "show_x_axis_ticks": true, 364 | "show_y_axis_labels": true, 365 | "show_y_axis_ticks": true, 366 | "size_by_field": "roi", 367 | "stacking": "normal", 368 | "swap_axes": true, 369 | "trellis": "", 370 | "type": "looker_scatter", 371 | "x_axis_gridlines": false, 372 | "x_axis_reversed": false, 373 | "x_axis_scale": "auto", 374 | "x_axis_zoom": true, 375 | "y_axes": [ 376 | { 377 | "label": "", 378 | "orientation": "bottom", 379 | "series": [ 380 | { 381 | "axisId": "Channel_0 - average_of_roi_first", 382 | "id": "Channel_0 - average_of_roi_first", 383 | "name": "Channel_0" 384 | }, 385 | { 386 | "axisId": "Channel_1 - average_of_roi_first", 387 | "id": "Channel_1 - average_of_roi_first", 388 | "name": "Channel_1" 389 | }, 390 | { 391 | "axisId": "Channel_2 - average_of_roi_first", 392 | "id": "Channel_2 - average_of_roi_first", 393 | "name": "Channel_2" 394 | }, 395 | { 396 | "axisId": "Channel_3 - average_of_roi_first", 397 | "id": "Channel_3 - average_of_roi_first", 398 | "name": "Channel_3" 399 | }, 400 | { 401 | "axisId": "Channel_4 - average_of_roi_first", 402 | "id": "Channel_4 - average_of_roi_first", 403 | "name": "Channel_4" 404 | } 405 | ], 406 | "showLabels": true, 407 | "showValues": true, 408 | "tickDensity": "custom", 409 | "tickDensityCustom": 100, 410 | "type": "linear", 411 | "unpinAxis": false 412 | } 413 | ], 414 | "y_axis_combined": true, 415 | "y_axis_gridlines": true, 416 | "y_axis_reversed": false, 417 | "y_axis_scale_mode": "linear", 418 | "y_axis_tick_density": "default", 419 | "y_axis_tick_density_custom": 5, 420 | "y_axis_zoom": true 421 | }} 422 | 423 | A single record visualization - 424 | {{ 425 | "defaults_version": 1, 426 | "show_view_names": false, 427 | "type": "looker_single_record" 428 | }} 429 | 430 | A single value visualization - 431 | {{ 432 | "comparison_reverse_colors": false, 433 | "comparison_type": "value", "conditional_formatting_include_nulls": false, "conditional_formatting_include_totals": false, 434 | "custom_color": "#1A73E8", 435 | "custom_color_enabled": true, 436 | "defaults_version": 1, 437 | "enable_conditional_formatting": false, 438 | "series_types": {}, 439 | "show_comparison": false, 440 | "show_comparison_label": true, 441 | "show_single_value_title": true, 442 | "single_value_title": "Total Clicks", 443 | "type": "single_value" 444 | }} 445 | 446 | A Pie chart - 447 | {{ 448 | "defaults_version": 1, 449 | "label_density": 25, 450 | "label_type": "labPer", 451 | "legend_position": "center", 452 | "limit_displayed_rows": false, 453 | "ordering": "none", 454 | "plot_size_by_field": false, 455 | "point_style": "none", 456 | "series_types": {}, 457 | "show_null_labels": false, 458 | "show_silhouette": false, 459 | "show_totals_labels": false, 460 | "show_value_labels": false, 461 | "show_view_names": false, 462 | "show_x_axis_label": true, 463 | "show_x_axis_ticks": true, 464 | "show_y_axis_labels": true, 465 | "show_y_axis_ticks": true, 466 | "stacking": "", 467 | "totals_color": "#808080", 468 | "trellis": "", 469 | "type": "looker_pie", 470 | "value_labels": "legend", 471 | "x_axis_gridlines": false, 472 | "x_axis_reversed": false, 473 | "x_axis_scale": "auto", 474 | "y_axis_combined": true, 475 | "y_axis_gridlines": true, 476 | "y_axis_reversed": false, 477 | "y_axis_scale_mode": "linear", 478 | "y_axis_tick_density": "default", 479 | "y_axis_tick_density_custom": 5 480 | }} 481 | 482 | The result is a JSON object with the id, slug, the url, and 483 | the long_url. 484 | ``` 485 | 486 | ## Reference 487 | 488 | | **field** | **type** | **required** | **description** | 489 | |-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------| 490 | | kind | string | true | Must be "looker-query-url" | 491 | | source | string | true | Name of the source the SQL should execute on. | 492 | | description | string | true | Description of the tool that is passed to the LLM. | 493 | ``` -------------------------------------------------------------------------------- /internal/server/static/js/toolDisplay.js: -------------------------------------------------------------------------------- ```javascript 1 | // Copyright 2025 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import { handleRunTool, displayResults } from './runTool.js'; 16 | import { createGoogleAuthMethodItem } from './auth.js' 17 | 18 | /** 19 | * Helper function to create form inputs for parameters. 20 | */ 21 | function createParamInput(param, toolId) { 22 | const paramItem = document.createElement('div'); 23 | paramItem.className = 'param-item'; 24 | 25 | const label = document.createElement('label'); 26 | const INPUT_ID = `param-${toolId}-${param.name}`; 27 | const NAME_TEXT = document.createTextNode(param.name); 28 | label.setAttribute('for', INPUT_ID); 29 | label.appendChild(NAME_TEXT); 30 | 31 | const IS_AUTH_PARAM = param.authServices && param.authServices.length > 0; 32 | let additionalLabelText = ''; 33 | if (IS_AUTH_PARAM) { 34 | additionalLabelText += ' (auth)'; 35 | } 36 | if (!param.required) { 37 | additionalLabelText += ' (optional)'; 38 | } 39 | 40 | if (additionalLabelText) { 41 | const additionalSpan = document.createElement('span'); 42 | additionalSpan.textContent = additionalLabelText; 43 | additionalSpan.classList.add('param-label-extras'); 44 | label.appendChild(additionalSpan); 45 | } 46 | paramItem.appendChild(label); 47 | 48 | const inputCheckboxWrapper = document.createElement('div'); 49 | const inputContainer = document.createElement('div'); 50 | inputCheckboxWrapper.className = 'input-checkbox-wrapper'; 51 | inputContainer.className = 'param-input-element-container'; 52 | 53 | // Build parameter's value input box. 54 | const PLACEHOLDER_LABEL = param.label; 55 | let inputElement; 56 | let boolValueLabel = null; 57 | 58 | if (param.type === 'textarea') { 59 | inputElement = document.createElement('textarea'); 60 | inputElement.rows = 3; 61 | inputContainer.appendChild(inputElement); 62 | } else if(param.type === 'checkbox') { 63 | inputElement = document.createElement('input'); 64 | inputElement.type = 'checkbox'; 65 | inputElement.title = PLACEHOLDER_LABEL; 66 | inputElement.checked = false; 67 | 68 | // handle true/false label for boolean params 69 | boolValueLabel = document.createElement('span'); 70 | boolValueLabel.className = 'checkbox-bool-label'; 71 | boolValueLabel.textContent = inputElement.checked ? ' true' : ' false'; 72 | 73 | inputContainer.appendChild(inputElement); 74 | inputContainer.appendChild(boolValueLabel); 75 | 76 | inputElement.addEventListener('change', () => { 77 | boolValueLabel.textContent = inputElement.checked ? ' true' : ' false'; 78 | }); 79 | } else { 80 | inputElement = document.createElement('input'); 81 | inputElement.type = param.type; 82 | inputContainer.appendChild(inputElement); 83 | } 84 | 85 | inputElement.id = INPUT_ID; 86 | inputElement.name = param.name; 87 | inputElement.classList.add('param-input-element'); 88 | 89 | if (IS_AUTH_PARAM) { 90 | inputElement.disabled = true; 91 | inputElement.classList.add('auth-param-input'); 92 | if (param.type !== 'checkbox') { 93 | inputElement.placeholder = param.authServices; 94 | } 95 | } else if (param.type !== 'checkbox') { 96 | inputElement.placeholder = PLACEHOLDER_LABEL ? PLACEHOLDER_LABEL.trim() : ''; 97 | } 98 | inputCheckboxWrapper.appendChild(inputContainer); 99 | 100 | // create the "Include Param" checkbox 101 | const INCLUDE_CHECKBOX_ID = `include-${INPUT_ID}`; 102 | const includeContainer = document.createElement('div'); 103 | const includeCheckbox = document.createElement('input'); 104 | 105 | includeContainer.className = 'include-param-container'; 106 | includeCheckbox.type = 'checkbox'; 107 | includeCheckbox.id = INCLUDE_CHECKBOX_ID; 108 | includeCheckbox.name = `include-${param.name}`; 109 | includeCheckbox.title = 'Include this parameter'; // Add a tooltip 110 | 111 | // default to checked, unless it's an optional parameter 112 | includeCheckbox.checked = param.required; 113 | 114 | includeContainer.appendChild(includeCheckbox); 115 | inputCheckboxWrapper.appendChild(includeContainer); 116 | 117 | paramItem.appendChild(inputCheckboxWrapper); 118 | 119 | // function to update UI based on checkbox state 120 | const updateParamIncludedState = () => { 121 | const isIncluded = includeCheckbox.checked; 122 | if (isIncluded) { 123 | paramItem.classList.remove('disabled-param'); 124 | if (!IS_AUTH_PARAM) { 125 | inputElement.disabled = false; 126 | } 127 | if (boolValueLabel) { 128 | boolValueLabel.classList.remove('disabled'); 129 | } 130 | } else { 131 | paramItem.classList.add('disabled-param'); 132 | inputElement.disabled = true; 133 | if (boolValueLabel) { 134 | boolValueLabel.classList.add('disabled'); 135 | } 136 | } 137 | }; 138 | 139 | // add event listener to the include checkbox 140 | includeCheckbox.addEventListener('change', updateParamIncludedState); 141 | updateParamIncludedState(); 142 | 143 | return paramItem; 144 | } 145 | 146 | /** 147 | * Function to create the header editor popup modal. 148 | * @param {string} toolId The unique identifier for the tool. 149 | * @param {!Object<string, string>} currentHeaders The current headers. 150 | * @param {function(!Object<string, string>): void} saveCallback A function to be 151 | * called when the "Save" button is clicked and the headers are successfully 152 | * parsed. The function receives the updated headers object as its argument. 153 | * @return {!HTMLDivElement} The outermost div element of the created modal. 154 | */ 155 | function createHeaderEditorModal(toolId, currentHeaders, toolParameters, authRequired, saveCallback) { 156 | const MODAL_ID = `header-modal-${toolId}`; 157 | let modal = document.getElementById(MODAL_ID); 158 | 159 | if (modal) { 160 | modal.remove(); 161 | } 162 | 163 | modal = document.createElement('div'); 164 | modal.id = MODAL_ID; 165 | modal.className = 'header-modal'; 166 | 167 | const modalContent = document.createElement('div'); 168 | const modalHeader = document.createElement('h5'); 169 | const headersTextarea = document.createElement('textarea'); 170 | 171 | modalContent.className = 'header-modal-content'; 172 | modalHeader.textContent = 'Edit Request Headers'; 173 | headersTextarea.id = `headers-textarea-${toolId}`; 174 | headersTextarea.className = 'headers-textarea'; 175 | headersTextarea.rows = 10; 176 | headersTextarea.value = JSON.stringify(currentHeaders, null, 2); 177 | 178 | // handle authenticated params 179 | const authProfileNames = new Set(); 180 | toolParameters.forEach(param => { 181 | const isAuthParam = param.authServices && param.authServices.length > 0; 182 | if (isAuthParam && param.authServices) { 183 | param.authServices.forEach(name => authProfileNames.add(name)); 184 | } 185 | }); 186 | 187 | // handle authorized invocations 188 | if (authRequired && authRequired.length > 0) { 189 | authRequired.forEach(name => authProfileNames.add(name)); 190 | } 191 | 192 | modalContent.appendChild(modalHeader); 193 | modalContent.appendChild(headersTextarea); 194 | 195 | if (authProfileNames.size > 0 || authRequired.length > 0) { 196 | const authHelperSection = document.createElement('div'); 197 | authHelperSection.className = 'auth-helper-section'; 198 | const authList = document.createElement('div'); 199 | authList.className = 'auth-method-list'; 200 | 201 | authProfileNames.forEach(profileName => { 202 | const authItem = createGoogleAuthMethodItem(toolId, profileName); 203 | authList.appendChild(authItem); 204 | }); 205 | authHelperSection.appendChild(authList); 206 | modalContent.appendChild(authHelperSection); 207 | } 208 | 209 | const modalActions = document.createElement('div'); 210 | const closeButton = document.createElement('button'); 211 | const saveButton = document.createElement('button'); 212 | const authTokenDropdown = createAuthTokenInfoDropdown(); 213 | 214 | modalActions.className = 'header-modal-actions'; 215 | closeButton.textContent = 'Close'; 216 | closeButton.className = 'btn btn--closeHeaders'; 217 | closeButton.addEventListener('click', () => closeHeaderEditor(toolId)); 218 | saveButton.textContent = 'Save'; 219 | saveButton.className = 'btn btn--saveHeaders'; 220 | saveButton.addEventListener('click', () => { 221 | try { 222 | const updatedHeaders = JSON.parse(headersTextarea.value); 223 | saveCallback(updatedHeaders); 224 | closeHeaderEditor(toolId); 225 | } catch (e) { 226 | alert('Invalid JSON format for headers.'); 227 | console.error("Header JSON parse error:", e); 228 | } 229 | }); 230 | 231 | modalActions.appendChild(closeButton); 232 | modalActions.appendChild(saveButton); 233 | modalContent.appendChild(modalActions); 234 | modalContent.appendChild(authTokenDropdown); 235 | modal.appendChild(modalContent); 236 | 237 | return modal; 238 | } 239 | 240 | /** 241 | * Function to open the header popup. 242 | */ 243 | function openHeaderEditor(toolId) { 244 | const modal = document.getElementById(`header-modal-${toolId}`); 245 | if (modal) { 246 | modal.style.display = 'block'; 247 | } 248 | } 249 | 250 | /** 251 | * Function to close the header popup. 252 | */ 253 | function closeHeaderEditor(toolId) { 254 | const modal = document.getElementById(`header-modal-${toolId}`); 255 | if (modal) { 256 | modal.style.display = 'none'; 257 | } 258 | } 259 | 260 | /** 261 | * Creates a dropdown element showing information on how to extract Google auth tokens. 262 | * @return {HTMLDetailsElement} The details element representing the dropdown. 263 | */ 264 | function createAuthTokenInfoDropdown() { 265 | const details = document.createElement('details'); 266 | const summary = document.createElement('summary'); 267 | const content = document.createElement('div'); 268 | 269 | details.className = 'auth-token-details'; 270 | details.appendChild(summary); 271 | summary.textContent = 'How to extract Google OAuth ID Token manually'; 272 | content.className = 'auth-token-content'; 273 | 274 | // auth instruction dropdown 275 | const tabButtons = document.createElement('div'); 276 | const leftTab = document.createElement('button'); 277 | const rightTab = document.createElement('button'); 278 | 279 | tabButtons.className = 'auth-tab-group'; 280 | leftTab.className = 'auth-tab-picker active'; 281 | leftTab.textContent = 'With Standard Account'; 282 | leftTab.setAttribute('data-tab', 'standard'); 283 | rightTab.className = 'auth-tab-picker'; 284 | rightTab.textContent = 'With Service Account'; 285 | rightTab.setAttribute('data-tab', 'service'); 286 | 287 | tabButtons.appendChild(leftTab); 288 | tabButtons.appendChild(rightTab); 289 | content.appendChild(tabButtons); 290 | 291 | const tabContentContainer = document.createElement('div'); 292 | const standardAccInstructions = document.createElement('div'); 293 | const serviceAccInstructions = document.createElement('div'); 294 | 295 | standardAccInstructions.id = 'auth-tab-standard'; 296 | standardAccInstructions.className = 'auth-tab-content active'; 297 | standardAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_STANDARD; 298 | serviceAccInstructions.id = 'auth-tab-service'; 299 | serviceAccInstructions.className = 'auth-tab-content'; 300 | serviceAccInstructions.innerHTML = AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT; 301 | 302 | tabContentContainer.appendChild(standardAccInstructions); 303 | tabContentContainer.appendChild(serviceAccInstructions); 304 | content.appendChild(tabContentContainer); 305 | 306 | // switching tabs logic 307 | const tabBtns = [leftTab, rightTab]; 308 | const tabContents = [standardAccInstructions, serviceAccInstructions]; 309 | 310 | tabBtns.forEach(btn => { 311 | btn.addEventListener('click', () => { 312 | // deactivate all buttons and contents 313 | tabBtns.forEach(b => b.classList.remove('active')); 314 | tabContents.forEach(c => c.classList.remove('active')); 315 | 316 | btn.classList.add('active'); 317 | 318 | const tabId = btn.getAttribute('data-tab'); 319 | const activeContent = content.querySelector(`#auth-tab-${tabId}`); 320 | if (activeContent) { 321 | activeContent.classList.add('active'); 322 | } 323 | }); 324 | }); 325 | 326 | details.appendChild(content); 327 | return details; 328 | } 329 | 330 | /** 331 | * Renders the tool display area. 332 | */ 333 | export function renderToolInterface(tool, containerElement) { 334 | const TOOL_ID = tool.id; 335 | containerElement.innerHTML = ''; 336 | 337 | let lastResults = null; 338 | let currentHeaders = { 339 | "Content-Type": "application/json" 340 | }; 341 | 342 | // function to update lastResults so we can toggle json 343 | const updateLastResults = (newResults) => { 344 | lastResults = newResults; 345 | }; 346 | const updateCurrentHeaders = (newHeaders) => { 347 | currentHeaders = newHeaders; 348 | const newModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders); 349 | containerElement.appendChild(newModal); 350 | }; 351 | 352 | const gridContainer = document.createElement('div'); 353 | gridContainer.className = 'tool-details-grid'; 354 | 355 | const toolInfoContainer = document.createElement('div'); 356 | const nameBox = document.createElement('div'); 357 | const descBox = document.createElement('div'); 358 | 359 | nameBox.className = 'tool-box tool-name'; 360 | nameBox.innerHTML = `<h5>Name:</h5><p>${tool.name}</p>`; 361 | descBox.className = 'tool-box tool-description'; 362 | descBox.innerHTML = `<h5>Description:</h5><p>${tool.description}</p>`; 363 | 364 | toolInfoContainer.className = 'tool-info'; 365 | toolInfoContainer.appendChild(nameBox); 366 | toolInfoContainer.appendChild(descBox); 367 | gridContainer.appendChild(toolInfoContainer); 368 | 369 | const DISLCAIMER_INFO = "*Checked parameters are sent with the value from their text field. Empty fields will be sent as an empty string. To exclude a parameter, uncheck it." 370 | const paramsContainer = document.createElement('div'); 371 | const form = document.createElement('form'); 372 | const paramsHeader = document.createElement('div'); 373 | const disclaimerText = document.createElement('div'); 374 | 375 | paramsContainer.className = 'tool-params tool-box'; 376 | paramsContainer.innerHTML = '<h5>Parameters:</h5>'; 377 | paramsHeader.className = 'params-header'; 378 | paramsContainer.appendChild(paramsHeader); 379 | disclaimerText.textContent = DISLCAIMER_INFO; 380 | disclaimerText.className = 'params-disclaimer'; 381 | paramsContainer.appendChild(disclaimerText); 382 | 383 | form.id = `tool-params-form-${TOOL_ID}`; 384 | 385 | tool.parameters.forEach(param => { 386 | form.appendChild(createParamInput(param, TOOL_ID)); 387 | }); 388 | paramsContainer.appendChild(form); 389 | gridContainer.appendChild(paramsContainer); 390 | 391 | containerElement.appendChild(gridContainer); 392 | 393 | const RESPONSE_AREA_ID = `tool-response-area-${TOOL_ID}`; 394 | const runButtonContainer = document.createElement('div'); 395 | const editHeadersButton = document.createElement('button'); 396 | const runButton = document.createElement('button'); 397 | 398 | editHeadersButton.className = 'btn btn--editHeaders'; 399 | editHeadersButton.textContent = 'Edit Headers'; 400 | editHeadersButton.addEventListener('click', () => openHeaderEditor(TOOL_ID)); 401 | runButtonContainer.className = 'run-button-container'; 402 | runButtonContainer.appendChild(editHeadersButton); 403 | 404 | runButton.className = 'btn btn--run'; 405 | runButton.textContent = 'Run Tool'; 406 | runButtonContainer.appendChild(runButton); 407 | containerElement.appendChild(runButtonContainer); 408 | 409 | // response Area (bottom) 410 | const responseContainer = document.createElement('div'); 411 | const responseHeaderControls = document.createElement('div'); 412 | const responseHeader = document.createElement('h5'); 413 | const responseArea = document.createElement('textarea'); 414 | 415 | responseContainer.className = 'tool-response tool-box'; 416 | responseHeaderControls.className = 'response-header-controls'; 417 | responseHeader.textContent = 'Response:'; 418 | responseHeaderControls.appendChild(responseHeader); 419 | 420 | // prettify box 421 | const PRETTIFY_ID = `prettify-${TOOL_ID}`; 422 | const prettifyDiv = document.createElement('div'); 423 | const prettifyLabel = document.createElement('label'); 424 | const prettifyCheckbox = document.createElement('input'); 425 | 426 | prettifyDiv.className = 'prettify-container'; 427 | prettifyLabel.setAttribute('for', PRETTIFY_ID); 428 | prettifyLabel.textContent = 'Prettify JSON'; 429 | prettifyLabel.className = 'prettify-label'; 430 | 431 | prettifyCheckbox.type = 'checkbox'; 432 | prettifyCheckbox.id = PRETTIFY_ID; 433 | prettifyCheckbox.checked = true; 434 | prettifyCheckbox.className = 'prettify-checkbox'; 435 | 436 | prettifyDiv.appendChild(prettifyLabel); 437 | prettifyDiv.appendChild(prettifyCheckbox); 438 | 439 | responseHeaderControls.appendChild(prettifyDiv); 440 | responseContainer.appendChild(responseHeaderControls); 441 | 442 | responseArea.id = RESPONSE_AREA_ID; 443 | responseArea.readOnly = true; 444 | responseArea.placeholder = 'Results will appear here...'; 445 | responseArea.className = 'tool-response-area'; 446 | responseArea.rows = 10; 447 | responseContainer.appendChild(responseArea); 448 | 449 | containerElement.appendChild(responseContainer); 450 | 451 | // create and append the header editor modal 452 | const headerModal = createHeaderEditorModal(TOOL_ID, currentHeaders, tool.parameters, tool.authRequired, updateCurrentHeaders); 453 | containerElement.appendChild(headerModal); 454 | 455 | prettifyCheckbox.addEventListener('change', () => { 456 | if (lastResults) { 457 | displayResults(lastResults, responseArea, prettifyCheckbox.checked); 458 | } 459 | }); 460 | 461 | runButton.addEventListener('click', (event) => { 462 | event.preventDefault(); 463 | handleRunTool(TOOL_ID, form, responseArea, tool.parameters, prettifyCheckbox, updateLastResults, currentHeaders); 464 | }); 465 | } 466 | 467 | /** 468 | * Checks if a specific parameter is marked as included for a given tool. 469 | * @param {string} toolId The ID of the tool. 470 | * @param {string} paramName The name of the parameter. 471 | * @return {boolean|null} True if the parameter's include checkbox is checked, 472 | * False if unchecked, Null if the checkbox element is not found. 473 | */ 474 | export function isParamIncluded(toolId, paramName) { 475 | const inputId = `param-${toolId}-${paramName}`; 476 | const includeCheckboxId = `include-${inputId}`; 477 | const includeCheckbox = document.getElementById(includeCheckboxId); 478 | 479 | if (includeCheckbox && includeCheckbox.type === 'checkbox') { 480 | return includeCheckbox.checked; 481 | } 482 | 483 | console.warn(`Include checkbox not found for ID: ${includeCheckboxId}`); 484 | return null; 485 | } 486 | 487 | // Templates for inserting token retrieval instructions into edit header modal 488 | const AUTH_TOKEN_INSTRUCTIONS_SERVICE_ACCOUNT = ` 489 | <p>To obtain a Google OAuth ID token using a service account:</p> 490 | <ol> 491 | <li>Make sure you are on the intended SERVICE account (typically contain iam.gserviceaccount.com). Verify by running the command below. 492 | <pre><code>gcloud auth list</code></pre> 493 | </li> 494 | <li>Print an id token with the audience set to your clientID defined in tools file: 495 | <pre><code>gcloud auth print-identity-token --audiences=YOUR_CLIENT_ID_HERE</code></pre> 496 | </li> 497 | <li>Copy the output token.</li> 498 | <li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code> 499 | <pre><code>{ 500 | "Content-Type": "application/json", 501 | "my-google-auth_token": "YOUR_ID_TOKEN_HERE" 502 | } </code></pre> 503 | </li> 504 | </ol> 505 | <p>This token is typically short-lived.</p>`; 506 | 507 | const AUTH_TOKEN_INSTRUCTIONS_STANDARD = ` 508 | <p>To obtain a Google OAuth ID token using a standard account:</p> 509 | <ol> 510 | <li>Make sure you are on your intended standard account. Verify by running the command below. 511 | <pre><code>gcloud auth list</code></pre> 512 | </li> 513 | <li>Within your Cloud Console, add the following link to the "Authorized Redirect URIs".</li> 514 | <pre><code>https://developers.google.com/oauthplayground</code></pre> 515 | <li>Go to the Google OAuth Playground site: <a href="https://developers.google.com/oauthplayground/" target="_blank">https://developers.google.com/oauthplayground/</a></li> 516 | <li>In the top right settings menu, select "Use your own OAuth Credentials".</li> 517 | <li>Input your clientID (from tools file), along with the client secret from Cloud Console.</li> 518 | <li>Inside the Google OAuth Playground, select "Google OAuth2 API v2.</li> 519 | <ul> 520 | <li>Select "Authorize APIs".</li> 521 | <li>Select "Exchange Authorization codes for tokens"</li> 522 | <li>Copy the id_token field provided in the response.</li> 523 | </ul> 524 | <li>Paste this token into the header in JSON editor. The key should be the name of your auth service followed by <code>_token</code> 525 | <pre><code>{ 526 | "Content-Type": "application/json", 527 | "my-google-auth_token": "YOUR_ID_TOKEN_HERE" 528 | } </code></pre> 529 | </li> 530 | </ol> 531 | <p>This token is typically short-lived.</p>`; ``` -------------------------------------------------------------------------------- /tests/postgres/postgres_integration_test.go: -------------------------------------------------------------------------------- ```go 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package postgres 16 | 17 | import ( 18 | "bytes" 19 | "context" 20 | "encoding/json" 21 | "fmt" 22 | "io" 23 | "net/http" 24 | "net/url" 25 | "os" 26 | "reflect" 27 | "regexp" 28 | "sort" 29 | "strings" 30 | "sync" 31 | "testing" 32 | "time" 33 | 34 | "github.com/google/go-cmp/cmp" 35 | "github.com/google/uuid" 36 | "github.com/googleapis/genai-toolbox/internal/testutils" 37 | "github.com/googleapis/genai-toolbox/tests" 38 | "github.com/jackc/pgx/v5/pgxpool" 39 | ) 40 | 41 | var ( 42 | PostgresSourceKind = "postgres" 43 | PostgresToolKind = "postgres-sql" 44 | PostgresListTablesToolKind = "postgres-list-tables" 45 | PostgresListActiveQueriesToolKind = "postgres-list-active-queries" 46 | PostgresListInstalledExtensionsToolKind = "postgres-list-installed-extensions" 47 | PostgresListAvailableExtensionsToolKind = "postgres-list-available-extensions" 48 | PostgresDatabase = os.Getenv("POSTGRES_DATABASE") 49 | PostgresHost = os.Getenv("POSTGRES_HOST") 50 | PostgresPort = os.Getenv("POSTGRES_PORT") 51 | PostgresUser = os.Getenv("POSTGRES_USER") 52 | PostgresPass = os.Getenv("POSTGRES_PASS") 53 | ) 54 | 55 | func getPostgresVars(t *testing.T) map[string]any { 56 | switch "" { 57 | case PostgresDatabase: 58 | t.Fatal("'POSTGRES_DATABASE' not set") 59 | case PostgresHost: 60 | t.Fatal("'POSTGRES_HOST' not set") 61 | case PostgresPort: 62 | t.Fatal("'POSTGRES_PORT' not set") 63 | case PostgresUser: 64 | t.Fatal("'POSTGRES_USER' not set") 65 | case PostgresPass: 66 | t.Fatal("'POSTGRES_PASS' not set") 67 | } 68 | 69 | return map[string]any{ 70 | "kind": PostgresSourceKind, 71 | "host": PostgresHost, 72 | "port": PostgresPort, 73 | "database": PostgresDatabase, 74 | "user": PostgresUser, 75 | "password": PostgresPass, 76 | } 77 | } 78 | 79 | func addPrebuiltToolConfig(t *testing.T, config map[string]any) map[string]any { 80 | tools, ok := config["tools"].(map[string]any) 81 | if !ok { 82 | t.Fatalf("unable to get tools from config") 83 | } 84 | tools["list_tables"] = map[string]any{ 85 | "kind": PostgresListTablesToolKind, 86 | "source": "my-instance", 87 | "description": "Lists tables in the database.", 88 | } 89 | tools["list_active_queries"] = map[string]any{ 90 | "kind": PostgresListActiveQueriesToolKind, 91 | "source": "my-instance", 92 | "description": "Lists active queries in the database.", 93 | } 94 | 95 | tools["list_installed_extensions"] = map[string]any{ 96 | "kind": PostgresListInstalledExtensionsToolKind, 97 | "source": "my-instance", 98 | "description": "Lists installed extensions in the database.", 99 | } 100 | 101 | tools["list_available_extensions"] = map[string]any{ 102 | "kind": PostgresListAvailableExtensionsToolKind, 103 | "source": "my-instance", 104 | "description": "Lists available extensions in the database.", 105 | } 106 | 107 | config["tools"] = tools 108 | return config 109 | } 110 | 111 | // Copied over from postgres.go 112 | func initPostgresConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) { 113 | // urlExample := "postgres:dd//username:password@localhost:5432/database_name" 114 | url := &url.URL{ 115 | Scheme: "postgres", 116 | User: url.UserPassword(user, pass), 117 | Host: fmt.Sprintf("%s:%s", host, port), 118 | Path: dbname, 119 | } 120 | pool, err := pgxpool.New(context.Background(), url.String()) 121 | if err != nil { 122 | return nil, fmt.Errorf("Unable to create connection pool: %w", err) 123 | } 124 | 125 | return pool, nil 126 | } 127 | 128 | func TestPostgres(t *testing.T) { 129 | sourceConfig := getPostgresVars(t) 130 | ctx, cancel := context.WithTimeout(context.Background(), time.Minute) 131 | defer cancel() 132 | 133 | var args []string 134 | 135 | pool, err := initPostgresConnectionPool(PostgresHost, PostgresPort, PostgresUser, PostgresPass, PostgresDatabase) 136 | if err != nil { 137 | t.Fatalf("unable to create postgres connection pool: %s", err) 138 | } 139 | 140 | // cleanup test environment 141 | tests.CleanupPostgresTables(t, ctx, pool) 142 | 143 | // create table name with UUID 144 | tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "") 145 | tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "") 146 | tableNameTemplateParam := "template_param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "") 147 | 148 | // set up data for param tool 149 | createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam) 150 | teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams) 151 | defer teardownTable1(t) 152 | 153 | // set up data for auth tool 154 | createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth) 155 | teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams) 156 | defer teardownTable2(t) 157 | 158 | // Write config into a file and pass it to command 159 | toolsFile := tests.GetToolsConfig(sourceConfig, PostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt) 160 | toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql") 161 | tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement() 162 | toolsFile = tests.AddTemplateParamConfig(t, toolsFile, PostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "") 163 | 164 | toolsFile = addPrebuiltToolConfig(t, toolsFile) 165 | 166 | cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...) 167 | if err != nil { 168 | t.Fatalf("command initialization returned an error: %s", err) 169 | } 170 | defer cleanup() 171 | 172 | waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) 173 | defer cancel() 174 | out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out) 175 | if err != nil { 176 | t.Logf("toolbox command logs: \n%s", out) 177 | t.Fatalf("toolbox didn't start successfully: %s", err) 178 | } 179 | 180 | // Get configs for tests 181 | select1Want, mcpMyFailToolWant, createTableStatement, mcpSelect1Want := tests.GetPostgresWants() 182 | 183 | // Run tests 184 | tests.RunToolGetTest(t) 185 | tests.RunToolInvokeTest(t, select1Want) 186 | tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want) 187 | tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want) 188 | tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam) 189 | 190 | // Run specific Postgres tool tests 191 | runPostgresListTablesTest(t, tableNameParam, tableNameAuth) 192 | runPostgresListActiveQueriesTest(t, ctx, pool) 193 | runPostgresListAvailableExtensionsTest(t) 194 | runPostgresListInstalledExtensionsTest(t) 195 | } 196 | 197 | func runPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth string) { 198 | // TableNameParam columns to construct want 199 | paramTableColumns := fmt.Sprintf(`[ 200 | {"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null}, 201 | {"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null} 202 | ]`, tableNameParam) 203 | 204 | // TableNameAuth columns to construct want 205 | authTableColumns := fmt.Sprintf(`[ 206 | {"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null}, 207 | {"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null}, 208 | {"data_type": "text", "column_name": "email", "column_default": null, "is_not_nullable": false, "ordinal_position": 3, "column_comment": null} 209 | ]`, tableNameAuth) 210 | 211 | const ( 212 | // Template to construct detailed output want 213 | detailedObjectTemplate = `{ 214 | "object_name": "%[1]s", "schema_name": "public", 215 | "object_details": { 216 | "owner": "%[3]s", "comment": null, 217 | "indexes": [{"is_primary": true, "is_unique": true, "index_name": "%[1]s_pkey", "index_method": "btree", "index_columns": ["id"], "index_definition": "CREATE UNIQUE INDEX %[1]s_pkey ON public.%[1]s USING btree (id)"}], 218 | "triggers": [], "columns": %[2]s, "object_name": "%[1]s", "object_type": "TABLE", "schema_name": "public", 219 | "constraints": [{"constraint_name": "%[1]s_pkey", "constraint_type": "PRIMARY KEY", "constraint_columns": ["id"], "constraint_definition": "PRIMARY KEY (id)", "foreign_key_referenced_table": null, "foreign_key_referenced_columns": null}] 220 | } 221 | }` 222 | 223 | // Template to construct simple output want 224 | simpleObjectTemplate = `{"object_name":"%s", "schema_name":"public", "object_details":{"name":"%s"}}` 225 | ) 226 | 227 | // Helper to build json for detailed want 228 | getDetailedWant := func(tableName, columnJSON string) string { 229 | return fmt.Sprintf(detailedObjectTemplate, tableName, columnJSON, PostgresUser) 230 | } 231 | 232 | // Helper to build template for simple want 233 | getSimpleWant := func(tableName string) string { 234 | return fmt.Sprintf(simpleObjectTemplate, tableName, tableName) 235 | } 236 | 237 | invokeTcs := []struct { 238 | name string 239 | api string 240 | requestBody io.Reader 241 | wantStatusCode int 242 | want string 243 | isAllTables bool 244 | }{ 245 | { 246 | name: "invoke list_tables all tables detailed output", 247 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 248 | requestBody: bytes.NewBuffer([]byte(`{"table_names": ""}`)), 249 | wantStatusCode: http.StatusOK, 250 | want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)), 251 | isAllTables: true, 252 | }, 253 | { 254 | name: "invoke list_tables all tables simple output", 255 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 256 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "simple"}`)), 257 | wantStatusCode: http.StatusOK, 258 | want: fmt.Sprintf("[%s,%s]", getSimpleWant(tableNameAuth), getSimpleWant(tableNameParam)), 259 | isAllTables: true, 260 | }, 261 | { 262 | name: "invoke list_tables detailed output", 263 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 264 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s"}`, tableNameAuth))), 265 | wantStatusCode: http.StatusOK, 266 | want: fmt.Sprintf("[%s]", getDetailedWant(tableNameAuth, authTableColumns)), 267 | }, 268 | { 269 | name: "invoke list_tables simple output", 270 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 271 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s", "output_format": "simple"}`, tableNameAuth))), 272 | wantStatusCode: http.StatusOK, 273 | want: fmt.Sprintf("[%s]", getSimpleWant(tableNameAuth)), 274 | }, 275 | { 276 | name: "invoke list_tables with invalid output format", 277 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 278 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "abcd"}`)), 279 | wantStatusCode: http.StatusBadRequest, 280 | }, 281 | { 282 | name: "invoke list_tables with malformed table_names parameter", 283 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 284 | requestBody: bytes.NewBuffer([]byte(`{"table_names": 12345, "output_format": "detailed"}`)), 285 | wantStatusCode: http.StatusBadRequest, 286 | }, 287 | { 288 | name: "invoke list_tables with multiple table names", 289 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 290 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,%s"}`, tableNameParam, tableNameAuth))), 291 | wantStatusCode: http.StatusOK, 292 | want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)), 293 | }, 294 | { 295 | name: "invoke list_tables with non-existent table", 296 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 297 | requestBody: bytes.NewBuffer([]byte(`{"table_names": "non_existent_table"}`)), 298 | wantStatusCode: http.StatusOK, 299 | want: `null`, 300 | }, 301 | { 302 | name: "invoke list_tables with one existing and one non-existent table", 303 | api: "http://127.0.0.1:5000/api/tool/list_tables/invoke", 304 | requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,non_existent_table"}`, tableNameParam))), 305 | wantStatusCode: http.StatusOK, 306 | want: fmt.Sprintf("[%s]", getDetailedWant(tableNameParam, paramTableColumns)), 307 | }, 308 | } 309 | for _, tc := range invokeTcs { 310 | t.Run(tc.name, func(t *testing.T) { 311 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) 312 | if err != nil { 313 | t.Fatalf("unable to create request: %s", err) 314 | } 315 | req.Header.Add("Content-type", "application/json") 316 | resp, err := http.DefaultClient.Do(req) 317 | if err != nil { 318 | t.Fatalf("unable to send request: %s", err) 319 | } 320 | defer resp.Body.Close() 321 | 322 | if resp.StatusCode != tc.wantStatusCode { 323 | bodyBytes, _ := io.ReadAll(resp.Body) 324 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) 325 | } 326 | 327 | if tc.wantStatusCode == http.StatusOK { 328 | var bodyWrapper map[string]json.RawMessage 329 | respBytes, err := io.ReadAll(resp.Body) 330 | if err != nil { 331 | t.Fatalf("error reading response body: %s", err) 332 | } 333 | 334 | if err := json.Unmarshal(respBytes, &bodyWrapper); err != nil { 335 | t.Fatalf("error parsing response wrapper: %s, body: %s", err, string(respBytes)) 336 | } 337 | 338 | resultJSON, ok := bodyWrapper["result"] 339 | if !ok { 340 | t.Fatal("unable to find 'result' in response body") 341 | } 342 | 343 | var resultString string 344 | if err := json.Unmarshal(resultJSON, &resultString); err != nil { 345 | t.Fatalf("'result' is not a JSON-encoded string: %s", err) 346 | } 347 | 348 | var got, want []any 349 | 350 | if err := json.Unmarshal([]byte(resultString), &got); err != nil { 351 | t.Fatalf("failed to unmarshal actual result string: %v", err) 352 | } 353 | if err := json.Unmarshal([]byte(tc.want), &want); err != nil { 354 | t.Fatalf("failed to unmarshal expected want string: %v", err) 355 | } 356 | 357 | // Checking only the default public schema where the test tables are created to avoid brittle tests. 358 | if tc.isAllTables { 359 | var filteredGot []any 360 | for _, item := range got { 361 | if tableMap, ok := item.(map[string]interface{}); ok { 362 | if schema, ok := tableMap["schema_name"]; ok && schema == "public" { 363 | filteredGot = append(filteredGot, item) 364 | } 365 | } 366 | } 367 | got = filteredGot 368 | } 369 | 370 | sort.SliceStable(got, func(i, j int) bool { 371 | return fmt.Sprintf("%v", got[i]) < fmt.Sprintf("%v", got[j]) 372 | }) 373 | sort.SliceStable(want, func(i, j int) bool { 374 | return fmt.Sprintf("%v", want[i]) < fmt.Sprintf("%v", want[j]) 375 | }) 376 | 377 | if !reflect.DeepEqual(got, want) { 378 | t.Errorf("Unexpected result: got %#v, want: %#v", got, want) 379 | } 380 | } 381 | }) 382 | } 383 | } 384 | 385 | func runPostgresListActiveQueriesTest(t *testing.T, ctx context.Context, pool *pgxpool.Pool) { 386 | type queryListDetails struct { 387 | ProcessId any `json:"pid"` 388 | User string `json:"user"` 389 | Datname string `json:"datname"` 390 | ApplicationName string `json:"application_name"` 391 | ClientAddress string `json:"client_addr"` 392 | State string `json:"state"` 393 | WaitEventType string `json:"wait_event_type"` 394 | WaitEvent string `json:"wait_event"` 395 | BackendStart any `json:"backend_start"` 396 | TransactionStart any `json:"xact_start"` 397 | QueryStart any `json:"query_start"` 398 | QueryDuration any `json:"query_duration"` 399 | Query string `json:"query"` 400 | } 401 | 402 | singleQueryWanted := queryListDetails{ 403 | ProcessId: any(nil), 404 | User: "", 405 | Datname: "", 406 | ApplicationName: "", 407 | ClientAddress: "", 408 | State: "", 409 | WaitEventType: "", 410 | WaitEvent: "", 411 | BackendStart: any(nil), 412 | TransactionStart: any(nil), 413 | QueryStart: any(nil), 414 | QueryDuration: any(nil), 415 | Query: "SELECT pg_sleep(10);", 416 | } 417 | 418 | invokeTcs := []struct { 419 | name string 420 | requestBody io.Reader 421 | clientSleepSecs int 422 | waitSecsBeforeCheck int 423 | wantStatusCode int 424 | want any 425 | }{ 426 | { 427 | name: "invoke list_active_queries when the system is idle", 428 | requestBody: bytes.NewBufferString(`{}`), 429 | clientSleepSecs: 0, 430 | waitSecsBeforeCheck: 0, 431 | wantStatusCode: http.StatusOK, 432 | want: []queryListDetails(nil), 433 | }, 434 | { 435 | name: "invoke list_active_queries when there is 1 ongoing but lower than the threshold", 436 | requestBody: bytes.NewBufferString(`{"min_duration": "100 seconds"}`), 437 | clientSleepSecs: 1, 438 | waitSecsBeforeCheck: 1, 439 | wantStatusCode: http.StatusOK, 440 | want: []queryListDetails(nil), 441 | }, 442 | { 443 | name: "invoke list_active_queries when 1 ongoing query should show up", 444 | requestBody: bytes.NewBufferString(`{"min_duration": "1 seconds"}`), 445 | clientSleepSecs: 10, 446 | waitSecsBeforeCheck: 5, 447 | wantStatusCode: http.StatusOK, 448 | want: []queryListDetails{singleQueryWanted}, 449 | }, 450 | } 451 | 452 | var wg sync.WaitGroup 453 | for _, tc := range invokeTcs { 454 | t.Run(tc.name, func(t *testing.T) { 455 | if tc.clientSleepSecs > 0 { 456 | wg.Add(1) 457 | 458 | go func() { 459 | defer wg.Done() 460 | 461 | err := pool.Ping(ctx) 462 | if err != nil { 463 | t.Errorf("unable to connect to test database: %s", err) 464 | return 465 | } 466 | _, err = pool.Exec(ctx, fmt.Sprintf("SELECT pg_sleep(%d);", tc.clientSleepSecs)) 467 | if err != nil { 468 | t.Errorf("Executing 'SELECT pg_sleep' failed: %s", err) 469 | } 470 | }() 471 | } 472 | 473 | if tc.waitSecsBeforeCheck > 0 { 474 | time.Sleep(time.Duration(tc.waitSecsBeforeCheck) * time.Second) 475 | } 476 | 477 | const api = "http://127.0.0.1:5000/api/tool/list_active_queries/invoke" 478 | req, err := http.NewRequest(http.MethodPost, api, tc.requestBody) 479 | if err != nil { 480 | t.Fatalf("unable to create request: %v", err) 481 | } 482 | req.Header.Add("Content-type", "application/json") 483 | 484 | resp, err := http.DefaultClient.Do(req) 485 | if err != nil { 486 | t.Fatalf("unable to send request: %v", err) 487 | } 488 | defer resp.Body.Close() 489 | 490 | if resp.StatusCode != tc.wantStatusCode { 491 | body, _ := io.ReadAll(resp.Body) 492 | t.Fatalf("wrong status code: got %d, want %d, body: %s", resp.StatusCode, tc.wantStatusCode, string(body)) 493 | } 494 | if tc.wantStatusCode != http.StatusOK { 495 | return 496 | } 497 | 498 | var bodyWrapper struct { 499 | Result json.RawMessage `json:"result"` 500 | } 501 | if err := json.NewDecoder(resp.Body).Decode(&bodyWrapper); err != nil { 502 | t.Fatalf("error decoding response wrapper: %v", err) 503 | } 504 | 505 | var resultString string 506 | if err := json.Unmarshal(bodyWrapper.Result, &resultString); err != nil { 507 | resultString = string(bodyWrapper.Result) 508 | } 509 | 510 | var got any 511 | var details []queryListDetails 512 | if err := json.Unmarshal([]byte(resultString), &details); err != nil { 513 | t.Fatalf("failed to unmarshal nested ObjectDetails string: %v", err) 514 | } 515 | got = details 516 | 517 | if diff := cmp.Diff(tc.want, got, cmp.Comparer(func(a, b queryListDetails) bool { 518 | return a.Query == b.Query 519 | })); diff != "" { 520 | t.Errorf("Unexpected result: got %#v, want: %#v", got, tc.want) 521 | } 522 | }) 523 | } 524 | wg.Wait() 525 | } 526 | 527 | func runPostgresListAvailableExtensionsTest(t *testing.T) { 528 | invokeTcs := []struct { 529 | name string 530 | api string 531 | requestBody io.Reader 532 | wantStatusCode int 533 | }{ 534 | { 535 | name: "invoke list_available_extensions output", 536 | api: "http://127.0.0.1:5000/api/tool/list_available_extensions/invoke", 537 | wantStatusCode: http.StatusOK, 538 | requestBody: bytes.NewBuffer([]byte(`{}`)), 539 | }, 540 | } 541 | for _, tc := range invokeTcs { 542 | t.Run(tc.name, func(t *testing.T) { 543 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) 544 | if err != nil { 545 | t.Fatalf("unable to create request: %s", err) 546 | } 547 | req.Header.Add("Content-type", "application/json") 548 | resp, err := http.DefaultClient.Do(req) 549 | if err != nil { 550 | t.Fatalf("unable to send request: %s", err) 551 | } 552 | defer resp.Body.Close() 553 | 554 | if resp.StatusCode != tc.wantStatusCode { 555 | bodyBytes, _ := io.ReadAll(resp.Body) 556 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) 557 | } 558 | 559 | // Intentionally not adding the output check as output depends on the postgres instance used where the the functional test runs. 560 | // Adding the check will make the test flaky. 561 | }) 562 | } 563 | } 564 | 565 | func runPostgresListInstalledExtensionsTest(t *testing.T) { 566 | invokeTcs := []struct { 567 | name string 568 | api string 569 | requestBody io.Reader 570 | wantStatusCode int 571 | }{ 572 | { 573 | name: "invoke list_installed_extensions output", 574 | api: "http://127.0.0.1:5000/api/tool/list_installed_extensions/invoke", 575 | wantStatusCode: http.StatusOK, 576 | requestBody: bytes.NewBuffer([]byte(`{}`)), 577 | }, 578 | } 579 | for _, tc := range invokeTcs { 580 | t.Run(tc.name, func(t *testing.T) { 581 | req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody) 582 | if err != nil { 583 | t.Fatalf("unable to create request: %s", err) 584 | } 585 | req.Header.Add("Content-type", "application/json") 586 | resp, err := http.DefaultClient.Do(req) 587 | if err != nil { 588 | t.Fatalf("unable to send request: %s", err) 589 | } 590 | defer resp.Body.Close() 591 | 592 | if resp.StatusCode != tc.wantStatusCode { 593 | bodyBytes, _ := io.ReadAll(resp.Body) 594 | t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes)) 595 | } 596 | 597 | // Intentionally not adding the output check as output depends on the postgres instance used where the the functional test runs. 598 | // Adding the check will make the test flaky. 599 | }) 600 | } 601 | } 602 | ``` -------------------------------------------------------------------------------- /internal/tools/neo4j/neo4jschema/neo4jschema.go: -------------------------------------------------------------------------------- ```go 1 | // Copyright 2025 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | package neo4jschema 16 | 17 | import ( 18 | "context" 19 | "fmt" 20 | "sync" 21 | "time" 22 | 23 | "github.com/goccy/go-yaml" 24 | "github.com/googleapis/genai-toolbox/internal/sources" 25 | neo4jsc "github.com/googleapis/genai-toolbox/internal/sources/neo4j" 26 | "github.com/googleapis/genai-toolbox/internal/tools" 27 | "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/cache" 28 | "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/helpers" 29 | "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema/types" 30 | "github.com/neo4j/neo4j-go-driver/v5/neo4j" 31 | ) 32 | 33 | // kind defines the unique identifier for this tool. 34 | const kind string = "neo4j-schema" 35 | 36 | // init registers the tool with the application's tool registry when the package is initialized. 37 | func init() { 38 | if !tools.Register(kind, newConfig) { 39 | panic(fmt.Sprintf("tool kind %q already registered", kind)) 40 | } 41 | } 42 | 43 | // newConfig decodes a YAML configuration into a Config struct. 44 | // This function is called by the tool registry to create a new configuration object. 45 | func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) { 46 | actual := Config{Name: name} 47 | if err := decoder.DecodeContext(ctx, &actual); err != nil { 48 | return nil, err 49 | } 50 | return actual, nil 51 | } 52 | 53 | // compatibleSource defines the interface a data source must implement to be used by this tool. 54 | // It ensures that the source can provide a Neo4j driver and database name. 55 | type compatibleSource interface { 56 | Neo4jDriver() neo4j.DriverWithContext 57 | Neo4jDatabase() string 58 | } 59 | 60 | // Statically verify that our compatible source implementation is valid. 61 | var _ compatibleSource = &neo4jsc.Source{} 62 | 63 | // compatibleSources lists the kinds of sources that are compatible with this tool. 64 | var compatibleSources = [...]string{neo4jsc.SourceKind} 65 | 66 | // Config holds the configuration settings for the Neo4j schema tool. 67 | // These settings are typically read from a YAML file. 68 | type Config struct { 69 | Name string `yaml:"name" validate:"required"` 70 | Kind string `yaml:"kind" validate:"required"` 71 | Source string `yaml:"source" validate:"required"` 72 | Description string `yaml:"description" validate:"required"` 73 | AuthRequired []string `yaml:"authRequired"` 74 | CacheExpireMinutes *int `yaml:"cacheExpireMinutes,omitempty"` // Cache expiration time in minutes. 75 | } 76 | 77 | // Statically verify that Config implements the tools.ToolConfig interface. 78 | var _ tools.ToolConfig = Config{} 79 | 80 | // ToolConfigKind returns the kind of this tool configuration. 81 | func (cfg Config) ToolConfigKind() string { 82 | return kind 83 | } 84 | 85 | // Initialize sets up the tool with its dependencies and returns a ready-to-use Tool instance. 86 | func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) { 87 | // Verify that the specified source exists. 88 | rawS, ok := srcs[cfg.Source] 89 | if !ok { 90 | return nil, fmt.Errorf("no source named %q configured", cfg.Source) 91 | } 92 | 93 | // Verify the source is of a compatible kind. 94 | s, ok := rawS.(compatibleSource) 95 | if !ok { 96 | return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources) 97 | } 98 | 99 | parameters := tools.Parameters{} 100 | mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters) 101 | 102 | // Set a default cache expiration if not provided in the configuration. 103 | if cfg.CacheExpireMinutes == nil { 104 | defaultExpiration := cache.DefaultExpiration // Default to 60 minutes 105 | cfg.CacheExpireMinutes = &defaultExpiration 106 | } 107 | 108 | // Finish tool setup by creating the Tool instance. 109 | t := Tool{ 110 | Name: cfg.Name, 111 | Kind: kind, 112 | AuthRequired: cfg.AuthRequired, 113 | Driver: s.Neo4jDriver(), 114 | Database: s.Neo4jDatabase(), 115 | cache: cache.NewCache(), 116 | cacheExpireMinutes: cfg.CacheExpireMinutes, 117 | manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired}, 118 | mcpManifest: mcpManifest, 119 | } 120 | return t, nil 121 | } 122 | 123 | // Statically verify that Tool implements the tools.Tool interface. 124 | var _ tools.Tool = Tool{} 125 | 126 | // Tool represents the Neo4j schema extraction tool. 127 | // It holds the Neo4j driver, database information, and a cache for the schema. 128 | type Tool struct { 129 | Name string `yaml:"name"` 130 | Kind string `yaml:"kind"` 131 | AuthRequired []string `yaml:"authRequired"` 132 | Driver neo4j.DriverWithContext 133 | Database string 134 | cache *cache.Cache 135 | cacheExpireMinutes *int 136 | manifest tools.Manifest 137 | mcpManifest tools.McpManifest 138 | } 139 | 140 | // Invoke executes the tool's main logic: fetching the Neo4j schema. 141 | // It first checks the cache for a valid schema before extracting it from the database. 142 | func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) { 143 | // Check if a valid schema is already in the cache. 144 | if cachedSchema, ok := t.cache.Get("schema"); ok { 145 | if schema, ok := cachedSchema.(*types.SchemaInfo); ok { 146 | return schema, nil 147 | } 148 | } 149 | 150 | // If not cached, extract the schema from the database. 151 | schema, err := t.extractSchema(ctx) 152 | if err != nil { 153 | return nil, fmt.Errorf("failed to extract database schema: %w", err) 154 | } 155 | 156 | // Cache the newly extracted schema for future use. 157 | expiration := time.Duration(*t.cacheExpireMinutes) * time.Minute 158 | t.cache.Set("schema", schema, expiration) 159 | 160 | return schema, nil 161 | } 162 | 163 | // ParseParams is a placeholder as this tool does not require input parameters. 164 | func (t Tool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (tools.ParamValues, error) { 165 | return tools.ParamValues{}, nil 166 | } 167 | 168 | // Manifest returns the tool's manifest, which describes its purpose and parameters. 169 | func (t Tool) Manifest() tools.Manifest { 170 | return t.manifest 171 | } 172 | 173 | // McpManifest returns the machine-consumable manifest for the tool. 174 | func (t Tool) McpManifest() tools.McpManifest { 175 | return t.mcpManifest 176 | } 177 | 178 | // Authorized checks if the tool is authorized to run based on the provided authentication services. 179 | func (t Tool) Authorized(verifiedAuthServices []string) bool { 180 | return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices) 181 | } 182 | 183 | func (t Tool) RequiresClientAuthorization() bool { 184 | return false 185 | } 186 | 187 | // checkAPOCProcedures verifies if essential APOC procedures are available in the database. 188 | // It returns true only if all required procedures are found. 189 | func (t Tool) checkAPOCProcedures(ctx context.Context) (bool, error) { 190 | proceduresToCheck := []string{"apoc.meta.schema", "apoc.meta.cypher.types"} 191 | 192 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 193 | defer session.Close(ctx) 194 | 195 | // This query efficiently counts how many of the specified procedures exist. 196 | query := "SHOW PROCEDURES YIELD name WHERE name IN $procs RETURN count(name) AS procCount" 197 | params := map[string]any{"procs": proceduresToCheck} 198 | 199 | result, err := session.Run(ctx, query, params) 200 | if err != nil { 201 | return false, fmt.Errorf("failed to execute procedure check query: %w", err) 202 | } 203 | 204 | record, err := result.Single(ctx) 205 | if err != nil { 206 | return false, fmt.Errorf("failed to retrieve single result for procedure check: %w", err) 207 | } 208 | 209 | rawCount, found := record.Get("procCount") 210 | if !found { 211 | return false, fmt.Errorf("field 'procCount' not found in result record") 212 | } 213 | 214 | procCount, ok := rawCount.(int64) 215 | if !ok { 216 | return false, fmt.Errorf("expected 'procCount' to be of type int64, but got %T", rawCount) 217 | } 218 | 219 | // Return true only if the number of found procedures matches the number we were looking for. 220 | return procCount == int64(len(proceduresToCheck)), nil 221 | } 222 | 223 | // extractSchema orchestrates the concurrent extraction of different parts of the database schema. 224 | // It runs several extraction tasks in parallel for efficiency. 225 | func (t Tool) extractSchema(ctx context.Context) (*types.SchemaInfo, error) { 226 | schema := &types.SchemaInfo{} 227 | var mu sync.Mutex 228 | 229 | // Define the different schema extraction tasks. 230 | tasks := []struct { 231 | name string 232 | fn func() error 233 | }{ 234 | { 235 | name: "database-info", 236 | fn: func() error { 237 | dbInfo, err := t.extractDatabaseInfo(ctx) 238 | if err != nil { 239 | return fmt.Errorf("failed to extract database info: %w", err) 240 | } 241 | mu.Lock() 242 | defer mu.Unlock() 243 | schema.DatabaseInfo = *dbInfo 244 | return nil 245 | }, 246 | }, 247 | { 248 | name: "schema-extraction", 249 | fn: func() error { 250 | // Check if APOC procedures are available. 251 | hasAPOC, err := t.checkAPOCProcedures(ctx) 252 | if err != nil { 253 | return fmt.Errorf("failed to check APOC procedures: %w", err) 254 | } 255 | 256 | var nodeLabels []types.NodeLabel 257 | var relationships []types.Relationship 258 | var stats *types.Statistics 259 | 260 | // Use APOC if available for a more detailed schema; otherwise, use native queries. 261 | if hasAPOC { 262 | nodeLabels, relationships, stats, err = t.GetAPOCSchema(ctx) 263 | } else { 264 | nodeLabels, relationships, stats, err = t.GetSchemaWithoutAPOC(ctx, 100) 265 | } 266 | if err != nil { 267 | return fmt.Errorf("failed to get schema: %w", err) 268 | } 269 | 270 | mu.Lock() 271 | defer mu.Unlock() 272 | schema.NodeLabels = nodeLabels 273 | schema.Relationships = relationships 274 | schema.Statistics = *stats 275 | return nil 276 | }, 277 | }, 278 | { 279 | name: "constraints", 280 | fn: func() error { 281 | constraints, err := t.extractConstraints(ctx) 282 | if err != nil { 283 | return fmt.Errorf("failed to extract constraints: %w", err) 284 | } 285 | mu.Lock() 286 | defer mu.Unlock() 287 | schema.Constraints = constraints 288 | return nil 289 | }, 290 | }, 291 | { 292 | name: "indexes", 293 | fn: func() error { 294 | indexes, err := t.extractIndexes(ctx) 295 | if err != nil { 296 | return fmt.Errorf("failed to extract indexes: %w", err) 297 | } 298 | mu.Lock() 299 | defer mu.Unlock() 300 | schema.Indexes = indexes 301 | return nil 302 | }, 303 | }, 304 | } 305 | 306 | var wg sync.WaitGroup 307 | errCh := make(chan error, len(tasks)) 308 | 309 | // Execute all tasks concurrently. 310 | for _, task := range tasks { 311 | wg.Add(1) 312 | go func(task struct { 313 | name string 314 | fn func() error 315 | }) { 316 | defer wg.Done() 317 | if err := task.fn(); err != nil { 318 | errCh <- err 319 | } 320 | }(task) 321 | } 322 | 323 | wg.Wait() 324 | close(errCh) 325 | 326 | // Collect any errors that occurred during the concurrent tasks. 327 | for err := range errCh { 328 | if err != nil { 329 | schema.Errors = append(schema.Errors, err.Error()) 330 | } 331 | } 332 | return schema, nil 333 | } 334 | 335 | // GetAPOCSchema extracts schema information using the APOC library, which provides detailed metadata. 336 | func (t Tool) GetAPOCSchema(ctx context.Context) ([]types.NodeLabel, []types.Relationship, *types.Statistics, error) { 337 | var nodeLabels []types.NodeLabel 338 | var relationships []types.Relationship 339 | stats := &types.Statistics{ 340 | NodesByLabel: make(map[string]int64), 341 | RelationshipsByType: make(map[string]int64), 342 | PropertiesByLabel: make(map[string]int64), 343 | PropertiesByRelType: make(map[string]int64), 344 | } 345 | 346 | var mu sync.Mutex 347 | var firstErr error 348 | ctx, cancel := context.WithCancel(ctx) 349 | defer cancel() 350 | 351 | handleError := func(err error) { 352 | mu.Lock() 353 | defer mu.Unlock() 354 | if firstErr == nil { 355 | firstErr = err 356 | cancel() // Cancel other operations on the first error. 357 | } 358 | } 359 | 360 | tasks := []struct { 361 | name string 362 | fn func(session neo4j.SessionWithContext) error 363 | }{ 364 | { 365 | name: "apoc-schema", 366 | fn: func(session neo4j.SessionWithContext) error { 367 | result, err := session.Run(ctx, "CALL apoc.meta.schema({sample: 10}) YIELD value RETURN value", nil) 368 | if err != nil { 369 | return fmt.Errorf("failed to run APOC schema query: %w", err) 370 | } 371 | if !result.Next(ctx) { 372 | return fmt.Errorf("no results from APOC schema query") 373 | } 374 | schemaMap, ok := result.Record().Values[0].(map[string]any) 375 | if !ok { 376 | return fmt.Errorf("unexpected result format from APOC schema query: %T", result.Record().Values[0]) 377 | } 378 | apocSchema, err := helpers.MapToAPOCSchema(schemaMap) 379 | if err != nil { 380 | return fmt.Errorf("failed to convert schema map to APOCSchemaResult: %w", err) 381 | } 382 | nodes, _, apocStats := helpers.ProcessAPOCSchema(apocSchema) 383 | mu.Lock() 384 | defer mu.Unlock() 385 | nodeLabels = nodes 386 | stats.TotalNodes = apocStats.TotalNodes 387 | stats.TotalProperties += apocStats.TotalProperties 388 | stats.NodesByLabel = apocStats.NodesByLabel 389 | stats.PropertiesByLabel = apocStats.PropertiesByLabel 390 | return nil 391 | }, 392 | }, 393 | { 394 | name: "apoc-relationships", 395 | fn: func(session neo4j.SessionWithContext) error { 396 | query := ` 397 | MATCH (startNode)-[rel]->(endNode) 398 | WITH 399 | labels(startNode)[0] AS startNode, 400 | type(rel) AS relType, 401 | apoc.meta.cypher.types(rel) AS relProperties, 402 | labels(endNode)[0] AS endNode, 403 | count(*) AS count 404 | RETURN relType, startNode, endNode, relProperties, count` 405 | result, err := session.Run(ctx, query, nil) 406 | if err != nil { 407 | return fmt.Errorf("failed to extract relationships: %w", err) 408 | } 409 | for result.Next(ctx) { 410 | record := result.Record() 411 | relType, startNode, endNode := record.Values[0].(string), record.Values[1].(string), record.Values[2].(string) 412 | properties, count := record.Values[3].(map[string]any), record.Values[4].(int64) 413 | 414 | if relType == "" || count == 0 { 415 | continue 416 | } 417 | relationship := types.Relationship{Type: relType, StartNode: startNode, EndNode: endNode, Count: count, Properties: []types.PropertyInfo{}} 418 | for prop, propType := range properties { 419 | relationship.Properties = append(relationship.Properties, types.PropertyInfo{Name: prop, Types: []string{propType.(string)}}) 420 | } 421 | mu.Lock() 422 | relationships = append(relationships, relationship) 423 | stats.RelationshipsByType[relType] += count 424 | stats.TotalRelationships += count 425 | propCount := int64(len(relationship.Properties)) 426 | stats.TotalProperties += propCount 427 | stats.PropertiesByRelType[relType] += propCount 428 | mu.Unlock() 429 | } 430 | mu.Lock() 431 | defer mu.Unlock() 432 | if len(stats.RelationshipsByType) == 0 { 433 | stats.RelationshipsByType = nil 434 | } 435 | if len(stats.PropertiesByRelType) == 0 { 436 | stats.PropertiesByRelType = nil 437 | } 438 | return nil 439 | }, 440 | }, 441 | } 442 | 443 | var wg sync.WaitGroup 444 | wg.Add(len(tasks)) 445 | for _, task := range tasks { 446 | go func(task struct { 447 | name string 448 | fn func(session neo4j.SessionWithContext) error 449 | }) { 450 | defer wg.Done() 451 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 452 | defer session.Close(ctx) 453 | if err := task.fn(session); err != nil { 454 | handleError(fmt.Errorf("task %s failed: %w", task.name, err)) 455 | } 456 | }(task) 457 | } 458 | wg.Wait() 459 | 460 | if firstErr != nil { 461 | return nil, nil, nil, firstErr 462 | } 463 | return nodeLabels, relationships, stats, nil 464 | } 465 | 466 | // GetSchemaWithoutAPOC extracts schema information using native Cypher queries. 467 | // This serves as a fallback for databases without APOC installed. 468 | func (t Tool) GetSchemaWithoutAPOC(ctx context.Context, sampleSize int) ([]types.NodeLabel, []types.Relationship, *types.Statistics, error) { 469 | nodePropsMap := make(map[string]map[string]map[string]bool) 470 | relPropsMap := make(map[string]map[string]map[string]bool) 471 | nodeCounts := make(map[string]int64) 472 | relCounts := make(map[string]int64) 473 | relConnectivity := make(map[string]types.RelConnectivityInfo) 474 | 475 | var mu sync.Mutex 476 | var firstErr error 477 | ctx, cancel := context.WithCancel(ctx) 478 | defer cancel() 479 | 480 | handleError := func(err error) { 481 | mu.Lock() 482 | defer mu.Unlock() 483 | if firstErr == nil { 484 | firstErr = err 485 | cancel() 486 | } 487 | } 488 | 489 | tasks := []struct { 490 | name string 491 | fn func(session neo4j.SessionWithContext) error 492 | }{ 493 | { 494 | name: "node-schema", 495 | fn: func(session neo4j.SessionWithContext) error { 496 | countResult, err := session.Run(ctx, `MATCH (n) UNWIND labels(n) AS label RETURN label, count(*) AS count ORDER BY count DESC`, nil) 497 | if err != nil { 498 | return fmt.Errorf("node count query failed: %w", err) 499 | } 500 | var labelsList []string 501 | mu.Lock() 502 | for countResult.Next(ctx) { 503 | record := countResult.Record() 504 | label, count := record.Values[0].(string), record.Values[1].(int64) 505 | nodeCounts[label] = count 506 | labelsList = append(labelsList, label) 507 | } 508 | mu.Unlock() 509 | if err = countResult.Err(); err != nil { 510 | return fmt.Errorf("node count result error: %w", err) 511 | } 512 | 513 | for _, label := range labelsList { 514 | propQuery := fmt.Sprintf(`MATCH (n:%s) WITH n LIMIT $sampleSize UNWIND keys(n) AS key WITH key, n[key] AS value WHERE value IS NOT NULL RETURN key, COLLECT(DISTINCT valueType(value)) AS types`, label) 515 | propResult, err := session.Run(ctx, propQuery, map[string]any{"sampleSize": sampleSize}) 516 | if err != nil { 517 | return fmt.Errorf("node properties query for label %s failed: %w", label, err) 518 | } 519 | mu.Lock() 520 | if nodePropsMap[label] == nil { 521 | nodePropsMap[label] = make(map[string]map[string]bool) 522 | } 523 | for propResult.Next(ctx) { 524 | record := propResult.Record() 525 | key, types := record.Values[0].(string), record.Values[1].([]any) 526 | if nodePropsMap[label][key] == nil { 527 | nodePropsMap[label][key] = make(map[string]bool) 528 | } 529 | for _, tp := range types { 530 | nodePropsMap[label][key][tp.(string)] = true 531 | } 532 | } 533 | mu.Unlock() 534 | if err = propResult.Err(); err != nil { 535 | return fmt.Errorf("node properties result error for label %s: %w", label, err) 536 | } 537 | } 538 | return nil 539 | }, 540 | }, 541 | { 542 | name: "relationship-schema", 543 | fn: func(session neo4j.SessionWithContext) error { 544 | relQuery := ` 545 | MATCH (start)-[r]->(end) 546 | WITH type(r) AS relType, labels(start) AS startLabels, labels(end) AS endLabels, count(*) AS count 547 | RETURN relType, CASE WHEN size(startLabels) > 0 THEN startLabels[0] ELSE null END AS startLabel, CASE WHEN size(endLabels) > 0 THEN endLabels[0] ELSE null END AS endLabel, sum(count) AS totalCount 548 | ORDER BY totalCount DESC` 549 | relResult, err := session.Run(ctx, relQuery, nil) 550 | if err != nil { 551 | return fmt.Errorf("relationship count query failed: %w", err) 552 | } 553 | var relTypesList []string 554 | mu.Lock() 555 | for relResult.Next(ctx) { 556 | record := relResult.Record() 557 | relType := record.Values[0].(string) 558 | startLabel := "" 559 | if record.Values[1] != nil { 560 | startLabel = record.Values[1].(string) 561 | } 562 | endLabel := "" 563 | if record.Values[2] != nil { 564 | endLabel = record.Values[2].(string) 565 | } 566 | count := record.Values[3].(int64) 567 | relCounts[relType] = count 568 | relTypesList = append(relTypesList, relType) 569 | if existing, ok := relConnectivity[relType]; !ok || count > existing.Count { 570 | relConnectivity[relType] = types.RelConnectivityInfo{StartNode: startLabel, EndNode: endLabel, Count: count} 571 | } 572 | } 573 | mu.Unlock() 574 | if err = relResult.Err(); err != nil { 575 | return fmt.Errorf("relationship count result error: %w", err) 576 | } 577 | 578 | for _, relType := range relTypesList { 579 | propQuery := fmt.Sprintf(`MATCH ()-[r:%s]->() WITH r LIMIT $sampleSize WHERE size(keys(r)) > 0 UNWIND keys(r) AS key WITH key, r[key] AS value WHERE value IS NOT NULL RETURN key, COLLECT(DISTINCT valueType(value)) AS types`, relType) 580 | propResult, err := session.Run(ctx, propQuery, map[string]any{"sampleSize": sampleSize}) 581 | if err != nil { 582 | return fmt.Errorf("relationship properties query for type %s failed: %w", relType, err) 583 | } 584 | mu.Lock() 585 | if relPropsMap[relType] == nil { 586 | relPropsMap[relType] = make(map[string]map[string]bool) 587 | } 588 | for propResult.Next(ctx) { 589 | record := propResult.Record() 590 | key, propTypes := record.Values[0].(string), record.Values[1].([]any) 591 | if relPropsMap[relType][key] == nil { 592 | relPropsMap[relType][key] = make(map[string]bool) 593 | } 594 | for _, t := range propTypes { 595 | relPropsMap[relType][key][t.(string)] = true 596 | } 597 | } 598 | mu.Unlock() 599 | if err = propResult.Err(); err != nil { 600 | return fmt.Errorf("relationship properties result error for type %s: %w", relType, err) 601 | } 602 | } 603 | return nil 604 | }, 605 | }, 606 | } 607 | 608 | var wg sync.WaitGroup 609 | wg.Add(len(tasks)) 610 | for _, task := range tasks { 611 | go func(task struct { 612 | name string 613 | fn func(session neo4j.SessionWithContext) error 614 | }) { 615 | defer wg.Done() 616 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 617 | defer session.Close(ctx) 618 | if err := task.fn(session); err != nil { 619 | handleError(fmt.Errorf("task %s failed: %w", task.name, err)) 620 | } 621 | }(task) 622 | } 623 | wg.Wait() 624 | 625 | if firstErr != nil { 626 | return nil, nil, nil, firstErr 627 | } 628 | 629 | nodeLabels, relationships, stats := helpers.ProcessNonAPOCSchema(nodeCounts, nodePropsMap, relCounts, relPropsMap, relConnectivity) 630 | return nodeLabels, relationships, stats, nil 631 | } 632 | 633 | // extractDatabaseInfo retrieves general information about the Neo4j database instance. 634 | func (t Tool) extractDatabaseInfo(ctx context.Context) (*types.DatabaseInfo, error) { 635 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 636 | defer session.Close(ctx) 637 | 638 | result, err := session.Run(ctx, "CALL dbms.components() YIELD name, versions, edition", nil) 639 | if err != nil { 640 | return nil, err 641 | } 642 | 643 | dbInfo := &types.DatabaseInfo{} 644 | if result.Next(ctx) { 645 | record := result.Record() 646 | dbInfo.Name = record.Values[0].(string) 647 | if versions, ok := record.Values[1].([]any); ok && len(versions) > 0 { 648 | dbInfo.Version = versions[0].(string) 649 | } 650 | dbInfo.Edition = record.Values[2].(string) 651 | } 652 | return dbInfo, result.Err() 653 | } 654 | 655 | // extractConstraints fetches all schema constraints from the database. 656 | func (t Tool) extractConstraints(ctx context.Context) ([]types.Constraint, error) { 657 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 658 | defer session.Close(ctx) 659 | 660 | result, err := session.Run(ctx, "SHOW CONSTRAINTS", nil) 661 | if err != nil { 662 | return nil, err 663 | } 664 | 665 | var constraints []types.Constraint 666 | for result.Next(ctx) { 667 | record := result.Record().AsMap() 668 | constraint := types.Constraint{ 669 | Name: helpers.GetStringValue(record["name"]), 670 | Type: helpers.GetStringValue(record["type"]), 671 | EntityType: helpers.GetStringValue(record["entityType"]), 672 | } 673 | if labels, ok := record["labelsOrTypes"].([]any); ok && len(labels) > 0 { 674 | constraint.Label = labels[0].(string) 675 | } 676 | if props, ok := record["properties"].([]any); ok { 677 | constraint.Properties = helpers.ConvertToStringSlice(props) 678 | } 679 | constraints = append(constraints, constraint) 680 | } 681 | return constraints, result.Err() 682 | } 683 | 684 | // extractIndexes fetches all schema indexes from the database. 685 | func (t Tool) extractIndexes(ctx context.Context) ([]types.Index, error) { 686 | session := t.Driver.NewSession(ctx, neo4j.SessionConfig{DatabaseName: t.Database}) 687 | defer session.Close(ctx) 688 | 689 | result, err := session.Run(ctx, "SHOW INDEXES", nil) 690 | if err != nil { 691 | return nil, err 692 | } 693 | 694 | var indexes []types.Index 695 | for result.Next(ctx) { 696 | record := result.Record().AsMap() 697 | index := types.Index{ 698 | Name: helpers.GetStringValue(record["name"]), 699 | State: helpers.GetStringValue(record["state"]), 700 | Type: helpers.GetStringValue(record["type"]), 701 | EntityType: helpers.GetStringValue(record["entityType"]), 702 | } 703 | if labels, ok := record["labelsOrTypes"].([]any); ok && len(labels) > 0 { 704 | index.Label = labels[0].(string) 705 | } 706 | if props, ok := record["properties"].([]any); ok { 707 | index.Properties = helpers.ConvertToStringSlice(props) 708 | } 709 | indexes = append(indexes, index) 710 | } 711 | return indexes, result.Err() 712 | } 713 | ```