mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 08:28:11 -05:00
Compare commits
58 Commits
pgtriggers
...
new-image-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2959576afa | ||
|
|
7daa4111f4 | ||
|
|
18885f6433 | ||
|
|
21d676ed58 | ||
|
|
1c353a3c8e | ||
|
|
a02ca45ba3 | ||
|
|
8217d1424d | ||
|
|
f520b4ed8a | ||
|
|
80315a0ebd | ||
|
|
5788605818 | ||
|
|
0641da0353 | ||
|
|
c9b775d38e | ||
|
|
8ea39ec32f | ||
|
|
aa270b2630 | ||
|
|
e1bd98ef5b | ||
|
|
fa148c60a7 | ||
|
|
6e87349431 | ||
|
|
3fe4e2b671 | ||
|
|
271f39d4b9 | ||
|
|
97b0e7d3ac | ||
|
|
914b3eefda | ||
|
|
776a5ca438 | ||
|
|
d08dd144ad | ||
|
|
fbd92c68ba | ||
|
|
af3d3c5204 | ||
|
|
466aef024f | ||
|
|
a6830744fc | ||
|
|
615b5f0130 | ||
|
|
2b45266598 | ||
|
|
26ead2ed78 | ||
|
|
1f31c2c9b2 | ||
|
|
78e015d7df | ||
|
|
c6ccf4bd87 | ||
|
|
5605eabd69 | ||
|
|
e29c0616d6 | ||
|
|
285aa46b88 | ||
|
|
c5a6daa768 | ||
|
|
78b02f08c3 | ||
|
|
18d0440f4e | ||
|
|
7a135ce078 | ||
|
|
ea9e2d12bd | ||
|
|
bea9705450 | ||
|
|
489117d747 | ||
|
|
32367a472f | ||
|
|
3b40fea25e | ||
|
|
f6b6a9fb5d | ||
|
|
1dd971b8d5 | ||
|
|
cb4529cbaa | ||
|
|
ac375114fd | ||
|
|
8a0eba9d62 | ||
|
|
5ad7c6127b | ||
|
|
f4b1f0a680 | ||
|
|
17a979207d | ||
|
|
3bf3fe8fa7 | ||
|
|
1bf0b51f03 | ||
|
|
744214e04c | ||
|
|
155bff80c1 | ||
|
|
e84252feb4 |
@@ -305,4 +305,4 @@ substitutions:
|
||||
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
|
||||
_AR_REPO_NAME: toolbox-dev
|
||||
_BUCKET_NAME: genai-toolbox-dev
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
|
||||
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
|
||||
@@ -212,6 +212,26 @@ steps:
|
||||
bigquery \
|
||||
bigquery
|
||||
|
||||
- id: "cloud-gda"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "CLOUD_GDA_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud Gemini Data Analytics" \
|
||||
cloudgda \
|
||||
cloudgda
|
||||
|
||||
- id: "dataplex"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -318,7 +338,7 @@ steps:
|
||||
.ci/test_with_coverage.sh \
|
||||
"Spanner" \
|
||||
spanner \
|
||||
spanner
|
||||
spanner || echo "Integration tests failed." # ignore test failures
|
||||
|
||||
- id: "neo4j"
|
||||
name: golang:1
|
||||
@@ -589,6 +609,26 @@ steps:
|
||||
firestore \
|
||||
firestore
|
||||
|
||||
- id: "mongodb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "MONGODB_DATABASE=$_DATABASE_NAME"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["MONGODB_URI", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"MongoDB" \
|
||||
mongodb \
|
||||
mongodb
|
||||
|
||||
- id: "looker"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -806,8 +846,8 @@ steps:
|
||||
cassandra
|
||||
|
||||
- id: "oracle"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
name: ghcr.io/oracle/oraclelinux9-instantclient:23
|
||||
waitFor: ["install-dependencies"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
@@ -820,10 +860,25 @@ steps:
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Oracle" \
|
||||
oracle \
|
||||
oracle
|
||||
# Install the C compiler and Oracle SDK headers needed for cgo
|
||||
dnf install -y gcc oracle-instantclient-devel
|
||||
# Install Go
|
||||
curl -L -o go.tar.gz "https://go.dev/dl/go1.25.1.linux-amd64.tar.gz"
|
||||
tar -C /usr/local -xzf go.tar.gz
|
||||
export PATH="/usr/local/go/bin:$$PATH"
|
||||
|
||||
go test -v ./internal/sources/oracle/... \
|
||||
-coverprofile=oracle_coverage.out \
|
||||
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
||||
|
||||
# Coverage check
|
||||
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
||||
echo "Oracle total coverage: $total_coverage"
|
||||
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
|
||||
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 30)}'; then
|
||||
echo "Coverage failure: $total_coverage is below 30%."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- id: "serverless-spark"
|
||||
name: golang:1
|
||||
@@ -867,6 +922,26 @@ steps:
|
||||
singlestore \
|
||||
singlestore
|
||||
|
||||
- id: "mariadb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "MARIADB_DATABASE=$_MARIADB_DATABASE"
|
||||
- "MARIADB_PORT=$_MARIADB_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["MARIADB_USER", "MARIADB_PASS", "MARIADB_HOST", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
# skip coverage check as it re-uses current MySQL implementation
|
||||
go test ./tests/mariadb
|
||||
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
@@ -979,6 +1054,14 @@ availableSecrets:
|
||||
env: SINGLESTORE_PASSWORD
|
||||
- versionName: projects/$PROJECT_ID/secrets/singlestore_host/versions/latest
|
||||
env: SINGLESTORE_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_user/versions/latest
|
||||
env: MARIADB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_pass/versions/latest
|
||||
env: MARIADB_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_host/versions/latest
|
||||
env: MARIADB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
|
||||
env: MONGODB_URI
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -1039,3 +1122,6 @@ substitutions:
|
||||
_SINGLESTORE_PORT: "3308"
|
||||
_SINGLESTORE_DATABASE: "singlestore"
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
|
||||
18
.github/renovate.json5
vendored
18
.github/renovate.json5
vendored
@@ -24,5 +24,23 @@
|
||||
],
|
||||
pinDigests: true,
|
||||
},
|
||||
{
|
||||
groupName: 'Go',
|
||||
matchManagers: [
|
||||
'gomod',
|
||||
],
|
||||
},
|
||||
{
|
||||
groupName: 'Node',
|
||||
matchManagers: [
|
||||
'npm',
|
||||
],
|
||||
},
|
||||
{
|
||||
groupName: 'Pip',
|
||||
matchManagers: [
|
||||
'pip_requirements',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -1 +1,9 @@
|
||||
@import 'td/code-dark';
|
||||
@import 'td/code-dark';
|
||||
|
||||
// Make tabs scrollable horizontally instead of wrapping
|
||||
.nav-tabs {
|
||||
flex-wrap: nowrap;
|
||||
white-space: nowrap;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
}
|
||||
@@ -51,6 +51,18 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.24.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.23.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.23.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.22.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.21.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"
|
||||
|
||||
79
CHANGELOG.md
79
CHANGELOG.md
@@ -1,5 +1,84 @@
|
||||
# Changelog
|
||||
|
||||
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **sources/cloud-gemini-data-analytics:** Add the Gemini Data Analytics (GDA) integration for DB NL2SQL conversion to Toolbox ([#2181](https://github.com/googleapis/genai-toolbox/issues/2181)) ([aa270b2](https://github.com/googleapis/genai-toolbox/commit/aa270b2630da2e3d618db804ca95550445367dbc))
|
||||
* **source/cloudsqlmysql:** Add support for IAM authentication in Cloud SQL MySQL source ([#2050](https://github.com/googleapis/genai-toolbox/issues/2050)) ([af3d3c5](https://github.com/googleapis/genai-toolbox/commit/af3d3c52044bea17781b89ce4ab71ff0f874ac20))
|
||||
* **sources/oracle:** Add Oracle OCI and Wallet support ([#1945](https://github.com/googleapis/genai-toolbox/issues/1945)) ([8ea39ec](https://github.com/googleapis/genai-toolbox/commit/8ea39ec32fbbaa97939c626fec8c5d86040ed464))
|
||||
* Support combining prebuilt and custom tool configurations ([#2188](https://github.com/googleapis/genai-toolbox/issues/2188)) ([5788605](https://github.com/googleapis/genai-toolbox/commit/57886058188aa5d2a51d5846a98bc6d8a650edd1))
|
||||
* **tools/mysql-get-query-plan:** Add new `mysql-get-query-plan` tool for MySQL source ([#2123](https://github.com/googleapis/genai-toolbox/issues/2123)) ([0641da0](https://github.com/googleapis/genai-toolbox/commit/0641da0353857317113b2169e547ca69603ddfde))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **spanner:** Move list graphs validation to runtime ([#2154](https://github.com/googleapis/genai-toolbox/issues/2154)) ([914b3ee](https://github.com/googleapis/genai-toolbox/commit/914b3eefda40a650efe552d245369e007277dab5))
|
||||
|
||||
|
||||
## [0.23.0](https://github.com/googleapis/genai-toolbox/compare/v0.22.0...v0.23.0) (2025-12-11)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **serverless-spark:** add URLs to create batch tool outputs
|
||||
* **serverless-spark:** add URLs to list_batches output
|
||||
* **serverless-spark:** add Cloud Console and Logging URLs to get_batch
|
||||
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033))
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add list-table-stats-tool to list table statistics. ([#2055](https://github.com/googleapis/genai-toolbox/issues/2055)) ([78b02f0](https://github.com/googleapis/genai-toolbox/commit/78b02f08c3cc3062943bb2f91cf60d5149c8d28d))
|
||||
* **looker/tools:** Enhance dashboard creation with dashboard filters ([#2133](https://github.com/googleapis/genai-toolbox/issues/2133)) ([285aa46](https://github.com/googleapis/genai-toolbox/commit/285aa46b887d9acb2da8766e107bbf1ab75b8812))
|
||||
* **serverless-spark:** Add Cloud Console and Logging URLs to get_batch ([e29c061](https://github.com/googleapis/genai-toolbox/commit/e29c0616d6b9ecda2badcaf7b69614e511ac031b))
|
||||
* **serverless-spark:** Add URLs to create batch tool outputs ([c6ccf4b](https://github.com/googleapis/genai-toolbox/commit/c6ccf4bd87026484143a2d0f5527b2edab03b54a))
|
||||
* **serverless-spark:** Add URLs to list_batches output ([5605eab](https://github.com/googleapis/genai-toolbox/commit/5605eabd696696ade07f52431a28ef65c0fb1f77))
|
||||
* **sources/mariadb:** Add MariaDB source and MySQL tools integration ([#1908](https://github.com/googleapis/genai-toolbox/issues/1908)) ([3b40fea](https://github.com/googleapis/genai-toolbox/commit/3b40fea25edae607e02c1e8fc2b0c957fa2c8e9a))
|
||||
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033)) ([489117d](https://github.com/googleapis/genai-toolbox/commit/489117d74711ac9260e7547163ca463eb45eeaa2))
|
||||
* **tools/postgres:** Add list_pg_settings, list_database_stats tools for postgres ([#2030](https://github.com/googleapis/genai-toolbox/issues/2030)) ([32367a4](https://github.com/googleapis/genai-toolbox/commit/32367a472fae9653fed7f126428eba0252978bd5))
|
||||
* **tools/postgres:** Add new postgres-list-roles tool ([#2038](https://github.com/googleapis/genai-toolbox/issues/2038)) ([bea9705](https://github.com/googleapis/genai-toolbox/commit/bea97054502cfa236aa10e2ebc8ff58eb00ad035))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* List tables tools null fix ([#2107](https://github.com/googleapis/genai-toolbox/issues/2107)) ([2b45266](https://github.com/googleapis/genai-toolbox/commit/2b452665983154041d4cd0ed7d82532e4af682eb))
|
||||
* **tools/mongodb:** Removed sortPayload and sortParams ([#1238](https://github.com/googleapis/genai-toolbox/issues/1238)) ([c5a6daa](https://github.com/googleapis/genai-toolbox/commit/c5a6daa7683d2f9be654300d977692c368e55e31))
|
||||
|
||||
|
||||
### Miscellaneous Chores
|
||||
* **looker:** Upgrade to latest go sdk ([#2159](https://github.com/googleapis/genai-toolbox/issues/2159)) ([78e015d](https://github.com/googleapis/genai-toolbox/commit/78e015d7dfd9cce7e2b444ed934da17eb355bc86))
|
||||
|
||||
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add allowed-origins flag ([#1984](https://github.com/googleapis/genai-toolbox/issues/1984)) ([862868f](https://github.com/googleapis/genai-toolbox/commit/862868f28476ea981575ce412faa7d6a03138f31))
|
||||
* **tools/postgres:** Add list-query-stats and get-column-cardinality functions ([#1976](https://github.com/googleapis/genai-toolbox/issues/1976)) ([9f76026](https://github.com/googleapis/genai-toolbox/commit/9f760269253a8cc92a357e995c6993ccc4a0fb7b))
|
||||
* **tools/spanner:** Add spanner list graphs to prebuiltconfigs ([#2056](https://github.com/googleapis/genai-toolbox/issues/2056)) ([0e7fbf4](https://github.com/googleapis/genai-toolbox/commit/0e7fbf465c488397aa9d8cab2e55165fff4eb53c))
|
||||
* **prebuilt/cloud-sql:** Add clone instance tool for cloud sql ([#1845](https://github.com/googleapis/genai-toolbox/issues/1845)) ([5e43630](https://github.com/googleapis/genai-toolbox/commit/5e43630907aa2d7bc6818142483a33272eab060b))
|
||||
* **serverless-spark:** Add create_pyspark_batch tool ([1bf0b51](https://github.com/googleapis/genai-toolbox/commit/1bf0b51f033c956790be1577bf5310d0b17e9c12))
|
||||
* **serverless-spark:** Add create_spark_batch tool ([17a9792](https://github.com/googleapis/genai-toolbox/commit/17a979207dbc4fe70acd0ebda164d1a8d34c1ed3))
|
||||
* Support alternate accessToken header name ([#1968](https://github.com/googleapis/genai-toolbox/issues/1968)) ([18017d6](https://github.com/googleapis/genai-toolbox/commit/18017d6545335a6fc1c472617101c35254d9a597))
|
||||
* Support for annotations ([#2007](https://github.com/googleapis/genai-toolbox/issues/2007)) ([ac21335](https://github.com/googleapis/genai-toolbox/commit/ac21335f4e88ca52d954d7f8143a551a35661b94))
|
||||
* **tool/mssql:** Set default host and port for MSSQL source ([#1943](https://github.com/googleapis/genai-toolbox/issues/1943)) ([7a9cc63](https://github.com/googleapis/genai-toolbox/commit/7a9cc633768d9ae9a7ff8230002da69d6a36ca86))
|
||||
* **tools/cloudsqlpg:** Add CloudSQL PostgreSQL pre-check tool ([#1722](https://github.com/googleapis/genai-toolbox/issues/1722)) ([8752e05](https://github.com/googleapis/genai-toolbox/commit/8752e05ab6e98812d95673a6f1ff67e9a6ae48d2))
|
||||
* **tools/postgres-list-publication-tables:** Add new postgres-list-publication-tables tool ([#1919](https://github.com/googleapis/genai-toolbox/issues/1919)) ([f4b1f0a](https://github.com/googleapis/genai-toolbox/commit/f4b1f0a68000ca2fc0325f55a1905705417c38a2))
|
||||
* **tools/postgres-list-tablespaces:** Add new postgres-list-tablespaces tool ([#1934](https://github.com/googleapis/genai-toolbox/issues/1934)) ([5ad7c61](https://github.com/googleapis/genai-toolbox/commit/5ad7c6127b3e47504fc4afda0b7f3de1dff78b8b))
|
||||
* **tools/spanner-list-graph:** Tool impl + docs + tests ([#1923](https://github.com/googleapis/genai-toolbox/issues/1923)) ([a0f44d3](https://github.com/googleapis/genai-toolbox/commit/a0f44d34ea3f044dd08501be616f70ddfd63ab45))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Add import for firebirdsql ([#2045](https://github.com/googleapis/genai-toolbox/issues/2045)) ([fb7aae9](https://github.com/googleapis/genai-toolbox/commit/fb7aae9d35b760d3471d8379642f835a0d84ec41))
|
||||
* Correct FAQ to mention HTTP tools ([#2036](https://github.com/googleapis/genai-toolbox/issues/2036)) ([7b44237](https://github.com/googleapis/genai-toolbox/commit/7b44237d4a21bfbf8d3cebe4d32a15affa29584d))
|
||||
* Format BigQuery numeric output as decimal strings ([#2084](https://github.com/googleapis/genai-toolbox/issues/2084)) ([155bff8](https://github.com/googleapis/genai-toolbox/commit/155bff80c1da4fae1e169e425fd82e1dc3373041))
|
||||
* Set default annotations for tools in code if annotation not provided in yaml ([#2049](https://github.com/googleapis/genai-toolbox/issues/2049)) ([565460c](https://github.com/googleapis/genai-toolbox/commit/565460c4ea8953dbe80070a8e469f957c0f7a70c))
|
||||
* **tools/alloydb-postgres-list-tables:** Exclude google_ml schema from list_tables ([#2046](https://github.com/googleapis/genai-toolbox/issues/2046)) ([a03984c](https://github.com/googleapis/genai-toolbox/commit/a03984cc15254c928f30085f8fa509ded6a79a0c))
|
||||
* **tools/alloydbcreateuser:** Remove duplication of project praram ([#2028](https://github.com/googleapis/genai-toolbox/issues/2028)) ([730ac6d](https://github.com/googleapis/genai-toolbox/commit/730ac6d22805fd50b4a675b74c1865f4e7689e7c))
|
||||
* **tools/mongodb:** Remove `required` tag from the `canonical` field ([#2099](https://github.com/googleapis/genai-toolbox/issues/2099)) ([744214e](https://github.com/googleapis/genai-toolbox/commit/744214e04cd12b11d166e6eb7da8ce4714904abc))
|
||||
|
||||
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)
|
||||
|
||||
|
||||
|
||||
@@ -167,15 +167,15 @@ tools.
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
[tool-get]:
|
||||
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L31
|
||||
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L41
|
||||
[tool-call]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L79>
|
||||
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L229
|
||||
[mcp-call]:
|
||||
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L554
|
||||
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L789
|
||||
[execute-sql]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L431>
|
||||
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L609
|
||||
[temp-param]:
|
||||
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L297>
|
||||
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L454
|
||||
[temp-param-doc]:
|
||||
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ golangci-lint run --fix
|
||||
Execute unit tests locally:
|
||||
|
||||
```bash
|
||||
go test -race -v ./...
|
||||
go test -race -v ./cmd/... ./internal/...
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
57
README.md
57
README.md
@@ -105,6 +105,21 @@ redeploying your application.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### (Non-production) Running Toolbox
|
||||
|
||||
You can run Toolbox directly with a [configuration file](#configuration):
|
||||
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
This runs the latest version of the toolbox server with your configuration file.
|
||||
|
||||
> [!NOTE]
|
||||
> This method should only be used for non-production use cases such as
|
||||
> experimentation. For any production use-cases, please consider [Installing the
|
||||
> server](#installing-the-server) and then [running it](#running-the-server).
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
@@ -125,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.24.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -138,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.24.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -151,21 +166,33 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.24.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
> <details>
|
||||
> <summary>Windows (AMD64)</summary>
|
||||
> <summary>Windows (Command Prompt)</summary>
|
||||
>
|
||||
> To install Toolbox as a binary on Windows (AMD64):
|
||||
> To install Toolbox as a binary on Windows (Command Prompt):
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.24.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
> <details>
|
||||
> <summary>Windows (PowerShell)</summary>
|
||||
>
|
||||
> To install Toolbox as a binary on Windows (PowerShell):
|
||||
>
|
||||
> ```powershell
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.21.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.24.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
@@ -177,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.24.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -201,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -291,6 +318,16 @@ toolbox --tools-file "tools.yaml"
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>NPM</summary>
|
||||
|
||||
To run Toolbox directly without manually downloading the binary (requires Node.js):
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Gemini CLI</summary>
|
||||
|
||||
179
cmd/root.go
179
cmd/root.go
@@ -73,6 +73,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
|
||||
@@ -120,6 +121,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
||||
@@ -150,6 +152,9 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrenderdashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrenderlook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrenderquery"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
|
||||
@@ -167,6 +172,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
|
||||
@@ -184,13 +190,19 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||
@@ -198,6 +210,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
@@ -224,6 +238,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
|
||||
@@ -344,12 +359,12 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
||||
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
||||
|
||||
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
|
||||
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||
// deprecate tools_file
|
||||
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder.")
|
||||
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder.")
|
||||
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files.")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
|
||||
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
|
||||
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
|
||||
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
@@ -357,7 +372,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
// Fetch prebuilt tools sources to customize the help description
|
||||
prebuiltHelp := fmt.Sprintf(
|
||||
"Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: '%s'.",
|
||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
|
||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||
)
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
||||
@@ -451,6 +466,9 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
|
||||
if _, exists := merged.AuthSources[name]; exists {
|
||||
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
|
||||
} else {
|
||||
if merged.AuthSources == nil {
|
||||
merged.AuthSources = make(server.AuthServiceConfigs)
|
||||
}
|
||||
merged.AuthSources[name] = authSource
|
||||
}
|
||||
}
|
||||
@@ -827,16 +845,10 @@ func run(cmd *Command) error {
|
||||
}
|
||||
}()
|
||||
|
||||
var toolsFile ToolsFile
|
||||
var allToolsFiles []ToolsFile
|
||||
|
||||
// Load Prebuilt Configuration
|
||||
if cmd.prebuiltConfig != "" {
|
||||
// Make sure --prebuilt and --tools-file/--tools-files/--tools-folder flags are mutually exclusive
|
||||
if cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != "" {
|
||||
errMsg := fmt.Errorf("--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
// Use prebuilt tools
|
||||
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
@@ -847,72 +859,96 @@ func run(cmd *Command) error {
|
||||
// Append prebuilt.source to Version string for the User Agent
|
||||
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
||||
|
||||
toolsFile, err = parseToolsFile(ctx, buf)
|
||||
parsed, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
} else if len(cmd.tools_files) > 0 {
|
||||
// Make sure --tools-file, --tools-files, and --tools-folder flags are mutually exclusive
|
||||
if cmd.tools_file != "" || cmd.tools_folder != "" {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use multiple tools files
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||
var err error
|
||||
toolsFile, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
} else if cmd.tools_folder != "" {
|
||||
// Make sure --tools-folder and other flags are mutually exclusive
|
||||
if cmd.tools_file != "" || len(cmd.tools_files) > 0 {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use tools folder
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||
var err error
|
||||
toolsFile, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Set default value of tools-file flag to tools.yaml
|
||||
if cmd.tools_file == "" {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
}
|
||||
|
||||
// Read single tool file contents
|
||||
buf, err := os.ReadFile(cmd.tools_file)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
toolsFile, err = parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, parsed)
|
||||
}
|
||||
|
||||
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs, cmd.cfg.PromptConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts
|
||||
// Determine if Custom Files should be loaded
|
||||
// Check for explicit custom flags
|
||||
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
||||
|
||||
authSourceConfigs := toolsFile.AuthSources
|
||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
|
||||
|
||||
if useDefaultToolsFile {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
isCustomConfigured = true
|
||||
}
|
||||
|
||||
// Load Custom Configurations
|
||||
if isCustomConfigured {
|
||||
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
|
||||
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
|
||||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
|
||||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
|
||||
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
var customTools ToolsFile
|
||||
var err error
|
||||
|
||||
if len(cmd.tools_files) > 0 {
|
||||
// Use tools-files
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
|
||||
} else if cmd.tools_folder != "" {
|
||||
// Use tools-folder
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
|
||||
} else {
|
||||
// Use single file (tools-file or default `tools.yaml`)
|
||||
buf, readFileErr := os.ReadFile(cmd.tools_file)
|
||||
if readFileErr != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
customTools, err = parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, customTools)
|
||||
}
|
||||
|
||||
// Merge Everything
|
||||
// This will error if custom tools collide with prebuilt tools
|
||||
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
cmd.cfg.SourceConfigs = finalToolsFile.Sources
|
||||
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
|
||||
cmd.cfg.ToolConfigs = finalToolsFile.Tools
|
||||
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
|
||||
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
|
||||
|
||||
authSourceConfigs := finalToolsFile.AuthSources
|
||||
if authSourceConfigs != nil {
|
||||
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
|
||||
cmd.cfg.AuthServiceConfigs = authSourceConfigs
|
||||
|
||||
for k, v := range authSourceConfigs {
|
||||
if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists {
|
||||
errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
cmd.cfg.AuthServiceConfigs[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
@@ -963,9 +999,8 @@ func run(cmd *Command) error {
|
||||
}()
|
||||
}
|
||||
|
||||
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||
|
||||
if !cmd.cfg.DisableReload {
|
||||
if isCustomConfigured && !cmd.cfg.DisableReload {
|
||||
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
||||
go watchChanges(ctx, watchDirs, watchedFiles, s)
|
||||
}
|
||||
|
||||
255
cmd/root_test.go
255
cmd/root_test.go
@@ -92,6 +92,21 @@ func invokeCommand(args []string) (*Command, string, error) {
|
||||
return c, buf.String(), err
|
||||
}
|
||||
|
||||
// invokeCommandWithContext executes the command with a context and returns the captured output.
|
||||
func invokeCommandWithContext(ctx context.Context, args []string) (*Command, string, error) {
|
||||
// Capture output using a buffer
|
||||
buf := new(bytes.Buffer)
|
||||
c := NewCommand(WithStreams(buf, buf))
|
||||
|
||||
c.SetArgs(args)
|
||||
c.SilenceUsage = true
|
||||
c.SilenceErrors = true
|
||||
c.SetContext(ctx)
|
||||
|
||||
err := c.Execute()
|
||||
return c, buf.String(), err
|
||||
}
|
||||
|
||||
func TestVersion(t *testing.T) {
|
||||
data, err := os.ReadFile("version.txt")
|
||||
if err != nil {
|
||||
@@ -1488,7 +1503,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1518,7 +1533,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1558,7 +1573,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"serverless_spark_tools": tools.ToolsetConfig{
|
||||
Name: "serverless_spark_tools",
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch"},
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1598,7 +1613,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_tools": tools.ToolsetConfig{
|
||||
Name: "looker_tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "render_query", "render_look", "render_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1618,7 +1633,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1755,11 +1770,6 @@ func TestMutuallyExclusiveFlags(t *testing.T) {
|
||||
args []string
|
||||
errString string
|
||||
}{
|
||||
{
|
||||
desc: "--prebuilt and --tools-file",
|
||||
args: []string{"--prebuilt", "alloydb", "--tools-file", "my.yaml"},
|
||||
errString: "--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously",
|
||||
},
|
||||
{
|
||||
desc: "--tools-file and --tools-files",
|
||||
args: []string{"--tools-file", "my.yaml", "--tools-files", "a.yaml,b.yaml"},
|
||||
@@ -1902,3 +1912,228 @@ func TestMergeToolsFiles(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
func TestPrebuiltAndCustomTools(t *testing.T) {
|
||||
t.Setenv("SQLITE_DATABASE", "test.db")
|
||||
// Setup custom tools file
|
||||
customContent := `
|
||||
tools:
|
||||
custom_tool:
|
||||
kind: http
|
||||
source: my-http
|
||||
method: GET
|
||||
path: /
|
||||
description: "A custom tool for testing"
|
||||
sources:
|
||||
my-http:
|
||||
kind: http
|
||||
baseUrl: http://example.com
|
||||
`
|
||||
customFile := filepath.Join(t.TempDir(), "custom.yaml")
|
||||
if err := os.WriteFile(customFile, []byte(customContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Tool Conflict File
|
||||
// SQLite prebuilt has a tool named 'list_tables'
|
||||
toolConflictContent := `
|
||||
tools:
|
||||
list_tables:
|
||||
kind: http
|
||||
source: my-http
|
||||
method: GET
|
||||
path: /
|
||||
description: "Conflicting tool"
|
||||
sources:
|
||||
my-http:
|
||||
kind: http
|
||||
baseUrl: http://example.com
|
||||
`
|
||||
toolConflictFile := filepath.Join(t.TempDir(), "tool_conflict.yaml")
|
||||
if err := os.WriteFile(toolConflictFile, []byte(toolConflictContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Source Conflict File
|
||||
// SQLite prebuilt has a source named 'sqlite-source'
|
||||
sourceConflictContent := `
|
||||
sources:
|
||||
sqlite-source:
|
||||
kind: http
|
||||
baseUrl: http://example.com
|
||||
tools:
|
||||
dummy_tool:
|
||||
kind: http
|
||||
source: sqlite-source
|
||||
method: GET
|
||||
path: /
|
||||
description: "Dummy"
|
||||
`
|
||||
sourceConflictFile := filepath.Join(t.TempDir(), "source_conflict.yaml")
|
||||
if err := os.WriteFile(sourceConflictFile, []byte(sourceConflictContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Toolset Conflict File
|
||||
// SQLite prebuilt has a toolset named 'sqlite_database_tools'
|
||||
toolsetConflictContent := `
|
||||
sources:
|
||||
dummy-src:
|
||||
kind: http
|
||||
baseUrl: http://example.com
|
||||
tools:
|
||||
dummy_tool:
|
||||
kind: http
|
||||
source: dummy-src
|
||||
method: GET
|
||||
path: /
|
||||
description: "Dummy"
|
||||
toolsets:
|
||||
sqlite_database_tools:
|
||||
- dummy_tool
|
||||
`
|
||||
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
|
||||
if err := os.WriteFile(toolsetConflictFile, []byte(toolsetConflictContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
//Legacy Auth File
|
||||
authContent := `
|
||||
authSources:
|
||||
legacy-auth:
|
||||
kind: google
|
||||
clientId: "test-client-id"
|
||||
`
|
||||
authFile := filepath.Join(t.TempDir(), "auth.yaml")
|
||||
if err := os.WriteFile(authFile, []byte(authContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
wantErr bool
|
||||
errString string
|
||||
cfgCheck func(server.ServerConfig) error
|
||||
}{
|
||||
{
|
||||
desc: "success mixed",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", customFile},
|
||||
wantErr: false,
|
||||
cfgCheck: func(cfg server.ServerConfig) error {
|
||||
if _, ok := cfg.ToolConfigs["custom_tool"]; !ok {
|
||||
return fmt.Errorf("custom tool not found")
|
||||
}
|
||||
if _, ok := cfg.ToolConfigs["list_tables"]; !ok {
|
||||
return fmt.Errorf("prebuilt tool 'list_tables' not found")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "tool conflict error",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
||||
wantErr: true,
|
||||
errString: "resource conflicts detected",
|
||||
},
|
||||
{
|
||||
desc: "source conflict error",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", sourceConflictFile},
|
||||
wantErr: true,
|
||||
errString: "resource conflicts detected",
|
||||
},
|
||||
{
|
||||
desc: "toolset conflict error",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", toolsetConflictFile},
|
||||
wantErr: true,
|
||||
errString: "resource conflicts detected",
|
||||
},
|
||||
{
|
||||
desc: "legacy auth additive",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", authFile},
|
||||
wantErr: false,
|
||||
cfgCheck: func(cfg server.ServerConfig) error {
|
||||
if _, ok := cfg.AuthServiceConfigs["legacy-auth"]; !ok {
|
||||
return fmt.Errorf("legacy auth source not merged into auth services")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
|
||||
defer cancel()
|
||||
|
||||
cmd, output, err := invokeCommandWithContext(ctx, tc.args)
|
||||
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected an error but got none")
|
||||
}
|
||||
if !strings.Contains(err.Error(), tc.errString) {
|
||||
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
|
||||
}
|
||||
} else {
|
||||
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if !strings.Contains(output, "Server ready to serve!") {
|
||||
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
|
||||
}
|
||||
if tc.cfgCheck != nil {
|
||||
if err := tc.cfgCheck(cmd.cfg); err != nil {
|
||||
t.Errorf("config check failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultToolsFileBehavior(t *testing.T) {
|
||||
t.Setenv("SQLITE_DATABASE", "test.db")
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
expectRun bool
|
||||
errString string
|
||||
}{
|
||||
{
|
||||
desc: "no flags (defaults to tools.yaml)",
|
||||
args: []string{},
|
||||
expectRun: false,
|
||||
errString: "tools.yaml", // Expect error because tools.yaml doesn't exist in test env
|
||||
},
|
||||
{
|
||||
desc: "prebuilt only (skips tools.yaml)",
|
||||
args: []string{"--prebuilt", "sqlite"},
|
||||
expectRun: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
|
||||
defer cancel()
|
||||
_, output, err := invokeCommandWithContext(ctx, tc.args)
|
||||
|
||||
if tc.expectRun {
|
||||
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
|
||||
t.Fatalf("expected server start, got error: %v", err)
|
||||
}
|
||||
// Verify it actually started
|
||||
if !strings.Contains(output, "Server ready to serve!") {
|
||||
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error reading default file, got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), tc.errString) {
|
||||
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.21.0
|
||||
0.24.0
|
||||
|
||||
@@ -11,11 +11,11 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
||||
|
||||
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||
|
||||
3. Click "View raw config" and update the `tools.yaml` path with the full absolute path to your file.
|
||||
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
|
||||
@@ -183,11 +183,11 @@ Protocol (OTLP). If you would like to use a collector, please refer to this
|
||||
|
||||
The following flags are used to determine Toolbox's telemetry configuration:
|
||||
|
||||
| **flag** | **type** | **description** |
|
||||
|----------------------------|----------|------------------------------------------------------------------------------------------------------------------|
|
||||
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
|
||||
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
|
||||
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
|
||||
| **flag** | **type** | **description** |
|
||||
|----------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
|
||||
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "127.0.0.1:4318"). To pass an insecure endpoint here, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`. |
|
||||
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
|
||||
|
||||
In addition to the flags noted above, you can also make additional configuration
|
||||
for OpenTelemetry via the [General SDK Configuration][sdk-configuration] through
|
||||
@@ -207,5 +207,5 @@ To enable Google Cloud Exporter:
|
||||
To enable OTLP Exporter, provide Collector endpoint:
|
||||
|
||||
```bash
|
||||
./toolbox --telemetry-otlp="http://127.0.0.1:4553"
|
||||
./toolbox --telemetry-otlp="127.0.0.1:4553"
|
||||
```
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"version = \"0.24.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -71,6 +71,22 @@ redeploying your application.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### (Non-production) Running Toolbox
|
||||
|
||||
You can run Toolbox directly with a [configuration file](../configure.md):
|
||||
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
This runs the latest version of the toolbox server with your configuration file.
|
||||
|
||||
{{< notice note >}}
|
||||
This method should only be used for non-production use cases such as
|
||||
experimentation. For any production use-cases, please consider [Installing the
|
||||
server](#installing-the-server) and then [running it](#running-the-server).
|
||||
{{< /notice >}}
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
@@ -87,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.24.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -98,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.24.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -109,19 +125,29 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.24.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windows (AMD64)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (AMD64):
|
||||
{{% tab header="Windows (Command Prompt)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.24.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windows (PowerShell)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.21.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.24.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -132,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.24.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -151,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.24.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
245
docs/en/getting-started/prompts_quickstart_gemini_cli.md
Normal file
245
docs/en/getting-started/prompts_quickstart_gemini_cli.md
Normal file
@@ -0,0 +1,245 @@
|
||||
---
|
||||
title: "Prompts using Gemini CLI"
|
||||
type: docs
|
||||
weight: 5
|
||||
description: >
|
||||
How to get started using Toolbox prompts locally with PostgreSQL and [Gemini CLI](https://pypi.org/project/gemini-cli/).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create the required tables using the following commands:
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(50) NOT NULL,
|
||||
email VARCHAR(100) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE restaurants (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
location VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INT REFERENCES users(id),
|
||||
restaurant_id INT REFERENCES restaurants(id),
|
||||
rating INT CHECK (rating >= 1 AND rating <= 5),
|
||||
review_text TEXT,
|
||||
is_published BOOLEAN DEFAULT false,
|
||||
moderation_status VARCHAR(50) DEFAULT 'pending_manual_review',
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert dummy data into the tables.
|
||||
|
||||
```sql
|
||||
INSERT INTO users (id, username, email) VALUES
|
||||
(123, 'jane_d', 'jane.d@example.com'),
|
||||
(124, 'john_s', 'john.s@example.com'),
|
||||
(125, 'sam_b', 'sam.b@example.com');
|
||||
|
||||
INSERT INTO restaurants (id, name, location) VALUES
|
||||
(455, 'Pizza Palace', '123 Main St'),
|
||||
(456, 'The Corner Bistro', '456 Oak Ave'),
|
||||
(457, 'Sushi Spot', '789 Pine Ln');
|
||||
|
||||
INSERT INTO reviews (user_id, restaurant_id, rating, review_text, is_published, moderation_status) VALUES
|
||||
(124, 455, 5, 'Best pizza in town! The crust was perfect.', true, 'approved'),
|
||||
(125, 457, 4, 'Great sushi, very fresh. A bit pricey but worth it.', true, 'approved'),
|
||||
(123, 457, 5, 'Absolutely loved the dragon roll. Will be back!', true, 'approved'),
|
||||
(123, 456, 4, 'The atmosphere was lovely and the food was great. My photo upload might have been weird though.', false, 'pending_manual_review'),
|
||||
(125, 456, 1, 'This review contains inappropriate language.', false, 'rejected');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
## Step 2: Configure Toolbox
|
||||
|
||||
Create a file named `tools.yaml`. This file defines the database connection, the
|
||||
SQL tools available, and the prompts the agents will use.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-foodiefind-db:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
tools:
|
||||
find_user_by_email:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a user's ID by their email address.
|
||||
parameters:
|
||||
- name: email
|
||||
type: string
|
||||
description: The email address of the user to find.
|
||||
statement: SELECT id FROM users WHERE email = $1;
|
||||
find_restaurant_by_name:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a restaurant's ID by its exact name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the restaurant to find.
|
||||
statement: SELECT id FROM restaurants WHERE name = $1;
|
||||
find_review_by_user_and_restaurant:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: integer
|
||||
description: The numerical ID of the user.
|
||||
- name: restaurant_id
|
||||
type: integer
|
||||
description: The numerical ID of the restaurant.
|
||||
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
||||
prompts:
|
||||
investigate_missing_review:
|
||||
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
||||
arguments:
|
||||
- name: "user_email"
|
||||
description: "The email of the user who wrote the review."
|
||||
- name: "restaurant_name"
|
||||
description: "The name of the restaurant being reviewed."
|
||||
messages:
|
||||
- content: >-
|
||||
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
|
||||
**Workflow:**
|
||||
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
|
||||
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
|
||||
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
|
||||
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
|
||||
```
|
||||
|
||||
## Step 3: Connect to Gemini CLI
|
||||
|
||||
Configure the Gemini CLI to talk to your local Toolbox MCP server.
|
||||
|
||||
1. Open or create your Gemini settings file: `~/.gemini/settings.json`.
|
||||
2. Add the following configuration to the file:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"MCPToolbox": {
|
||||
"httpUrl": "http://localhost:5000/mcp"
|
||||
}
|
||||
},
|
||||
"mcp": {
|
||||
"allowed": ["MCPToolbox"]
|
||||
}
|
||||
}
|
||||
```
|
||||
3. Start Gemini CLI using
|
||||
```sh
|
||||
gemini
|
||||
```
|
||||
In case Gemini CLI is already running, use `/mcp refresh` to refresh the MCP server.
|
||||
|
||||
4. Use gemini slash commands to run your prompt:
|
||||
```sh
|
||||
/investigate_missing_review --user_email="jane.d@example.com" --restaurant_name="The Corner Bistro"
|
||||
```
|
||||
@@ -5,7 +5,7 @@ go 1.24.4
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/adk v0.1.0
|
||||
google.golang.org/genai v1.35.0
|
||||
google.golang.org/genai v1.36.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -108,8 +108,8 @@ google.golang.org/adk v0.1.0 h1:+w/fHuqRVolotOATlujRA+2DKUuDrFH2poRdEX2QjB8=
|
||||
google.golang.org/adk v0.1.0/go.mod h1:NvtSLoNx7UzZIiUAI1KoJQLMmt9sG3oCgiCx1TLqKFw=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
|
||||
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/genai v1.35.0
|
||||
google.golang.org/genai v1.36.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -102,8 +102,8 @@ gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
|
||||
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -33,12 +33,12 @@ require (
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sync v0.17.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f // indirect
|
||||
|
||||
@@ -100,18 +100,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
@@ -872,11 +872,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
|
||||
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"jwa": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
llama-index==0.14.8
|
||||
llama-index==0.14.10
|
||||
llama-index-llms-google-genai==0.7.3
|
||||
toolbox-llamaindex==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -49,19 +49,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -323,6 +323,8 @@ instance and create new saved content.
|
||||
data
|
||||
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
|
||||
1. **add_dashboard_element**: Add a tile to a dashboard
|
||||
1. **add_dashboard_filter**: Add a filter to a dashboard
|
||||
1. **generate_embed_url**: Generate an embed url for content
|
||||
|
||||
### Looker Instance Health Tools
|
||||
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -79,12 +79,16 @@ There are a couple of steps to run and use a Collector.
|
||||
```
|
||||
|
||||
1. Run toolbox with the `--telemetry-otlp` flag. Configure it to send them to
|
||||
`http://127.0.0.1:4553` (for HTTP) or the Collector's URL.
|
||||
`127.0.0.1:4553` (for HTTP) or the Collector's URL.
|
||||
|
||||
```bash
|
||||
./toolbox --telemetry-otlp=http://127.0.0.1:4553
|
||||
./toolbox --telemetry-otlp=127.0.0.1:4553
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
To pass an insecure endpoint, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Once telemetry datas are collected, you can view them in your telemetry
|
||||
backend. If you are using GCP exporters, telemetry will be visible in GCP
|
||||
dashboard at [Metrics Explorer][metrics-explorer] and [Trace
|
||||
|
||||
@@ -16,14 +16,14 @@ description: >
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
|
||||
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
|
||||
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
|
||||
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
@@ -46,6 +46,9 @@ description: >
|
||||
```bash
|
||||
# Basic server with custom port configuration
|
||||
./toolbox --tools-file "tools.yaml" --port 8080
|
||||
|
||||
# Server with prebuilt + custom tools configurations
|
||||
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
||||
```
|
||||
|
||||
### Tool Configuration Sources
|
||||
@@ -72,8 +75,8 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
|
||||
|
||||
{{< notice tip >}}
|
||||
The CLI enforces mutual exclusivity between configuration source flags,
|
||||
preventing simultaneous use of `--prebuilt` with file-based options, and
|
||||
ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is
|
||||
preventing simultaneous use of the file-based options ensuring only one of
|
||||
`--tools-file`, `--tools-files`, or `--tools-folder` is
|
||||
used at a time.
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
@@ -13,6 +13,12 @@ allowing developers to interact with and take action on databases.
|
||||
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for
|
||||
details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
|
||||
{{< notice tip >}}
|
||||
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
||||
`--tools-folder` to combine prebuilt configs with custom tools.
|
||||
See [Usage Examples](../reference/cli.md#examples).
|
||||
{{< /notice >}}
|
||||
|
||||
## AlloyDB Postgres
|
||||
|
||||
* `--prebuilt` value: `alloydb-postgres`
|
||||
@@ -50,6 +56,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the AlloyDB instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
@@ -227,6 +239,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the postgreSQL instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## Cloud SQL for PostgreSQL Observability
|
||||
|
||||
@@ -404,6 +422,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `run_dashboard`: Runs the queries associated with a dashboard.
|
||||
* `make_dashboard`: Creates a new dashboard.
|
||||
* `add_dashboard_element`: Adds a tile to a dashboard.
|
||||
* `add_dashboard_filter`: Adds a filter to a dashboard.
|
||||
* `generate_embed_url`: Generate an embed url for content.
|
||||
* `health_pulse`: Test the health of a Looker instance.
|
||||
* `health_analyze`: Analyze the LookML usage of a Looker instance.
|
||||
* `health_vacuum`: Suggest LookML elements that can be removed.
|
||||
@@ -532,6 +552,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the PostgreSQL server.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## Google Cloud Serverless for Apache Spark
|
||||
|
||||
|
||||
@@ -77,6 +77,25 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in an AlloyDB for PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the AlloyDB
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
|
||||
40
docs/en/resources/sources/cloud-gda.md
Normal file
40
docs/en/resources/sources/cloud-gda.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
title: "Gemini Data Analytics"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "cloud-gemini-data-analytics" source provides a client for the Gemini Data Analytics API.
|
||||
aliases:
|
||||
- /resources/sources/cloud-gemini-data-analytics
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-gemini-data-analytics` source provides a client to interact with the [Gemini Data Analytics API](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/reference/rest). This allows tools to send natural language queries to the API.
|
||||
|
||||
Authentication can be handled in two ways:
|
||||
|
||||
1. **Application Default Credentials (ADC) (Recommended):** By default, the source uses ADC to authenticate with the API. The Toolbox server will fetch the credentials from its running environment (server-side authentication). This is the recommended method.
|
||||
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source expects the authentication token to be provided by the caller when making a request to the Toolbox server (typically via an HTTP Bearer token). The Toolbox server will then forward this token to the underlying Gemini Data Analytics API calls.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-gda-source:
|
||||
kind: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
|
||||
my-oauth-gda-source:
|
||||
kind: cloud-gemini-data-analytics
|
||||
projectId: my-project-id
|
||||
useClientOAuth: true
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-gemini-data-analytics". |
|
||||
| projectId | string | true | The Google Cloud Project ID where the API is enabled. |
|
||||
| useClientOAuth | boolean | false | If true, the source uses the token provided by the caller (forwarded to the API). Otherwise, it uses server-side Application Default Credentials (ADC). Defaults to `false`. |
|
||||
@@ -31,6 +31,9 @@ to a database by following these instructions][csql-mysql-quickstart].
|
||||
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
|
||||
List active queries in Cloud SQL for MySQL.
|
||||
|
||||
- [`mysql-get-query-plan`](../tools/mysql/mysql-get-query-plan.md)
|
||||
Provide information about how MySQL executes a SQL statement (EXPLAIN).
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a Cloud SQL for MySQL database.
|
||||
|
||||
@@ -88,13 +91,40 @@ mTLS.
|
||||
[public-ip]: https://cloud.google.com/sql/docs/mysql/configure-ip
|
||||
[conn-overview]: https://cloud.google.com/sql/docs/mysql/connect-overview
|
||||
|
||||
### Database User
|
||||
### Authentication
|
||||
|
||||
Currently, this source only uses standard authentication. You will need to [create
|
||||
a MySQL user][cloud-sql-users] to login to the database with.
|
||||
This source supports both password-based authentication and IAM
|
||||
authentication (using your [Application Default Credentials][adc]).
|
||||
|
||||
#### Standard Authentication
|
||||
|
||||
To connect using user/password, [create
|
||||
a MySQL user][cloud-sql-users] and input your credentials in the `user` and
|
||||
`password` fields.
|
||||
|
||||
```yaml
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
[cloud-sql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
|
||||
|
||||
#### IAM Authentication
|
||||
|
||||
To connect using IAM authentication:
|
||||
|
||||
1. Prepare your database instance and user following this [guide][iam-guide].
|
||||
2. You could choose one of the two ways to log in:
|
||||
- Specify your IAM email as the `user`.
|
||||
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
|
||||
automatically and log in using the email associated with it.
|
||||
|
||||
3. Leave the `password` field blank.
|
||||
|
||||
[iam-guide]: https://cloud.google.com/sql/docs/mysql/iam-logins
|
||||
[cloudsql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -124,6 +154,6 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MySQL user to connect as (e.g. "my-pg-user"). |
|
||||
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
|
||||
| user | string | false | Name of the MySQL user to connect as (e.g "my-mysql-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
|
||||
| password | string | false | Password of the MySQL user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
|
||||
| ipType | string | false | IP Type of the Cloud SQL instance, must be either `public`, `private`, or `psc`. Default: `public`. |
|
||||
|
||||
@@ -58,6 +58,7 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
|
||||
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
|
||||
List sequences in a PostgreSQL database.
|
||||
|
||||
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
|
||||
List long running transactions in a PostgreSQL database.
|
||||
|
||||
@@ -73,6 +74,25 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the postgreSQL
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using
|
||||
|
||||
@@ -91,18 +91,17 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header |
|
||||
| | | | name is provided, it will be used instead of "Authorization". |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header name is provided, it will be used instead of "Authorization". |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
78
docs/en/resources/sources/mariadb.md
Normal file
78
docs/en/resources/sources/mariadb.md
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
title: "MariaDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
MariaDB is an open-source relational database compatible with MySQL.
|
||||
|
||||
---
|
||||
## About
|
||||
|
||||
MariaDB is a relational database management system derived from MySQL. It
|
||||
implements the MySQL protocol and client libraries and supports modern SQL
|
||||
features with a focus on performance and reliability.
|
||||
|
||||
**Note**: MariaDB is supported using the MySQL source.
|
||||
## Available Tools
|
||||
|
||||
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
|
||||
Execute pre-defined prepared SQL queries in MariaDB.
|
||||
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in MariaDB.
|
||||
|
||||
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
|
||||
List active queries in MariaDB.
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a MariaDB database.
|
||||
|
||||
- [`mysql-list-tables-missing-unique-indexes`](../tools/mysql/mysql-list-tables-missing-unique-indexes.md)
|
||||
List tables in a MariaDB database that do not have primary or unique indices.
|
||||
|
||||
- [`mysql-list-table-fragmentation`](../tools/mysql/mysql-list-table-fragmentation.md)
|
||||
List table fragmentation in MariaDB tables.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source only uses standard authentication. You will need to [create a
|
||||
MariaDB user][mariadb-users] to log in to the database.
|
||||
|
||||
[mariadb-users]: https://mariadb.com/kb/en/create-user/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_mariadb_db:
|
||||
kind: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${MARIADB_USER}
|
||||
password: ${MARIADB_PASS}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variables instead of committing credentials to source files.
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be `mysql`. |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3307"). |
|
||||
| database | string | true | Name of the MariaDB database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MariaDB user to connect as (e.g. "my-mysql-user"). |
|
||||
| password | string | true | Password of the MariaDB user (e.g. "my-password"). |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
| queryParams | map<string,string> | false | Arbitrary DSN parameters passed to the driver (e.g. `tls: preferred`, `charset: utf8mb4`). Useful for enabling TLS or other connection options. |
|
||||
@@ -25,6 +25,9 @@ reliability, performance, and ease of use.
|
||||
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
|
||||
List active queries in MySQL.
|
||||
|
||||
- [`mysql-get-query-plan`](../tools/mysql/mysql-get-query-plan.md)
|
||||
Provide information about how MySQL executes a SQL statement (EXPLAIN).
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a MySQL database.
|
||||
|
||||
|
||||
@@ -18,10 +18,10 @@ DW) database workloads.
|
||||
## Available Tools
|
||||
|
||||
- [`oracle-sql`](../tools/oracle/oracle-sql.md)
|
||||
Execute pre-defined prepared SQL queries in Oracle.
|
||||
Execute pre-defined prepared SQL queries in Oracle.
|
||||
|
||||
- [`oracle-execute-sql`](../tools/oracle/oracle-execute-sql.md)
|
||||
Run parameterized SQL queries in Oracle.
|
||||
Run parameterized SQL queries in Oracle.
|
||||
|
||||
## Requirements
|
||||
|
||||
@@ -33,6 +33,25 @@ user][oracle-users] to log in to the database with the necessary permissions.
|
||||
[oracle-users]:
|
||||
https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/CREATE-USER.html
|
||||
|
||||
### Oracle Driver Requirement (Conditional)
|
||||
|
||||
The Oracle source offers two connection drivers:
|
||||
|
||||
1. **Pure Go Driver (`useOCI: false`, default):** Uses the `go-ora` library.
|
||||
This driver is simpler and does not require any local Oracle software
|
||||
installation, but it **lacks support for advanced features** like Oracle
|
||||
Wallets or Kerberos authentication.
|
||||
|
||||
2. **OCI-Based Driver (`useOCI: true`):** Uses the `godror` library, which
|
||||
provides access to **advanced Oracle features** like Digital Wallet support.
|
||||
|
||||
If you set `useOCI: true`, you **must** install the **Oracle Instant Client**
|
||||
libraries on the machine where this tool runs.
|
||||
|
||||
You can download the Instant Client from the official Oracle website: [Oracle
|
||||
Instant Client
|
||||
Downloads](https://www.oracle.com/database/technologies/instant-client/downloads.html)
|
||||
|
||||
## Connection Methods
|
||||
|
||||
You can configure the connection to your Oracle database using one of the
|
||||
@@ -66,12 +85,15 @@ using a TNS (Transparent Network Substrate) alias.
|
||||
containing it. This setting will override the `TNS_ADMIN` environment
|
||||
variable.
|
||||
|
||||
## Example
|
||||
## Examples
|
||||
|
||||
This example demonstrates the four connection methods you could choose from:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-oracle-source:
|
||||
kind: oracle
|
||||
|
||||
# --- Choose one connection method ---
|
||||
# 1. Host, Port, and Service Name
|
||||
host: 127.0.0.1
|
||||
@@ -88,6 +110,43 @@ sources:
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
|
||||
# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client)
|
||||
```
|
||||
|
||||
### Using an Oracle Wallet
|
||||
|
||||
Oracle Wallet allows you to store credentails used for database connection. Depending whether you are using an OCI-based driver, the wallet configuration is different.
|
||||
|
||||
#### Pure Go Driver (`useOCI: false`) - Oracle Wallet
|
||||
|
||||
The `go-ora` driver uses the `walletLocation` field to connect to a database secured with an Oracle Wallet without standard username and password.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
pure-go-wallet:
|
||||
kind: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
# The TNS Alias is often required to connect to a service registered in tnsnames.ora
|
||||
tnsAlias: "SECURE_DB_ALIAS"
|
||||
walletLocation: "/path/to/my/wallet/directory"
|
||||
```
|
||||
|
||||
#### OCI-Based Driver (`useOCI: true`) - Oracle Wallet
|
||||
|
||||
For the OCI-based driver, wallet authentication is triggered by setting tnsAdmin to the wallet directory and connecting via a tnsAlias.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
oci-wallet:
|
||||
kind: oracle
|
||||
connectionString: "127.0.0.1:1521/XEPDB1"
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tnsAlias: "WALLET_DB_ALIAS"
|
||||
tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files
|
||||
useOCI: true
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -97,14 +156,15 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "oracle". |
|
||||
| user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). |
|
||||
| password | string | true | Password of the Oracle user (e.g. "my-password"). |
|
||||
| host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. |
|
||||
| port | integer | false | Port to connect to (e.g. "1521"). Required if not using `connectionString` or `tnsAlias`. |
|
||||
| serviceName | string | false | The Oracle service name of the database to connect to. Required if not using `connectionString` or `tnsAlias`. |
|
||||
| connectionString | string | false | A direct connection string (e.g. "hostname:port/servicename"). Use as an alternative to `host`, `port`, and `serviceName`. |
|
||||
| tnsAlias | string | false | A TNS alias from a `tnsnames.ora` file. Use as an alternative to `host`/`port` or `connectionString`. |
|
||||
| tnsAdmin | string | false | Path to the directory containing the `tnsnames.ora` file. This overrides the `TNS_ADMIN` environment variable if it is set. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "oracle". |
|
||||
| user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). |
|
||||
| password | string | true | Password of the Oracle user (e.g. "my-password"). |
|
||||
| host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. |
|
||||
| port | integer | false | Port to connect to (e.g. "1521"). Required if not using `connectionString` or `tnsAlias`. |
|
||||
| serviceName | string | false | The Oracle service name of the database to connect to. Required if not using `connectionString` or `tnsAlias`. |
|
||||
| connectionString | string | false | A direct connection string (e.g. "hostname:port/servicename"). Use as an alternative to `host`, `port`, and `serviceName`. |
|
||||
| tnsAlias | string | false | A TNS alias from a `tnsnames.ora` file. Use as an alternative to `host`/`port` or `connectionString`. |
|
||||
| tnsAdmin | string | false | Path to the directory containing the `tnsnames.ora` file. This overrides the `TNS_ADMIN` environment variable if it is set. |
|
||||
| useOCI | bool | false | If true, uses the OCI-based driver (godror) which supports Oracle Wallet/Kerberos but requires the Oracle Instant Client libraries to be installed. Defaults to false (pure Go driver). |
|
||||
|
||||
@@ -68,6 +68,25 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the postgreSQL
|
||||
server.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
|
||||
@@ -21,6 +21,10 @@ Apache Spark.
|
||||
Get a Serverless Spark batch.
|
||||
- [`serverless-spark-cancel-batch`](../tools/serverless-spark/serverless-spark-cancel-batch.md)
|
||||
Cancel a running Serverless Spark batch operation.
|
||||
- [`serverless-spark-create-pyspark-batch`](../tools/serverless-spark/serverless-spark-create-pyspark-batch.md)
|
||||
Create a Serverless Spark PySpark batch operation.
|
||||
- [`serverless-spark-create-spark-batch`](../tools/serverless-spark/serverless-spark-create-spark-batch.md)
|
||||
Create a Serverless Spark Java batch operation.
|
||||
|
||||
## Requirements
|
||||
|
||||
|
||||
7
docs/en/resources/tools/cloudgda/_index.md
Normal file
7
docs/en/resources/tools/cloudgda/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Gemini Data Analytics"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools for Gemini Data Analytics.
|
||||
---
|
||||
92
docs/en/resources/tools/cloudgda/cloud-gda-query.md
Normal file
92
docs/en/resources/tools/cloudgda/cloud-gda-query.md
Normal file
@@ -0,0 +1,92 @@
|
||||
---
|
||||
title: "Gemini Data Analytics QueryData"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A tool to convert natural language queries into SQL statements using the Gemini Data Analytics QueryData API.
|
||||
aliases:
|
||||
- /resources/tools/cloud-gemini-data-analytics-query
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
my-gda-query-tool:
|
||||
kind: cloud-gemini-data-analytics-query
|
||||
source: my-gda-source
|
||||
description: "Use this tool to send natural language queries to the Gemini Data Analytics API and receive SQL, natural language answers, and explanations."
|
||||
location: ${your_database_location}
|
||||
context:
|
||||
datasourceReferences:
|
||||
cloudSqlReference:
|
||||
databaseReference:
|
||||
projectId: "${your_project_id}"
|
||||
region: "${your_database_instance_region}"
|
||||
instanceId: "${your_database_instance_id}"
|
||||
databaseId: "${your_database_name}"
|
||||
engine: "POSTGRESQL"
|
||||
agentContextReference:
|
||||
contextSetId: "${your_context_set_id}" # E.g. projects/${project_id}/locations/${context_set_location}/contextSets/${context_set_id}
|
||||
generationOptions:
|
||||
generateQueryResult: true
|
||||
generateNaturalLanguageAnswer: true
|
||||
generateExplanation: true
|
||||
generateDisambiguationQuestion: true
|
||||
```
|
||||
|
||||
### Usage Flow
|
||||
|
||||
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||
|
||||
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
||||
|
||||
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
||||
|
||||
**Example Input Prompt:**
|
||||
|
||||
```text
|
||||
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
||||
```
|
||||
|
||||
**Example API Response:**
|
||||
|
||||
```json
|
||||
{
|
||||
"generatedQuery": "SELECT COUNT(T1.account_id) FROM account AS T1 INNER JOIN loan AS T2 ON T1.account_id = T2.account_id INNER JOIN district AS T3 ON T1.district_id = T3.district_id WHERE T3.A3 = 'Prague'",
|
||||
"intentExplanation": "I found a template that matches the user's question. The template asks about the number of accounts who have region in a given city and are eligible for loans. The question asks about the number of accounts who have region in Prague and are eligible for loans. The template's parameterized SQL is 'SELECT COUNT(T1.account_id) FROM account AS T1 INNER JOIN loan AS T2 ON T1.account_id = T2.account_id INNER JOIN district AS T3 ON T1.district_id = T3.district_id WHERE T3.A3 = ?'. I will replace the named parameter '?' with 'Prague'.",
|
||||
"naturalLanguageAnswer": "There are 84 accounts from the Prague region that are eligible for loans.",
|
||||
"queryResult": {
|
||||
"columns": [
|
||||
{
|
||||
"type": "INT64"
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"value": "84"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"totalRowCount": "1"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| kind | string | true | Must be "cloud-gemini-data-analytics-query". |
|
||||
| source | string | true | The name of the `cloud-gemini-data-analytics` source to use. |
|
||||
| description | string | true | A description of the tool's purpose. |
|
||||
| location | string | true | The Google Cloud location of the target database resource (e.g., "us-central1"). This is used to construct the parent resource name in the API call. |
|
||||
| context | object | true | The context for the query, including datasource references. See [QueryDataContext](https://github.com/googleapis/googleapis/blob/b32495a713a68dd0dff90cf0b24021debfca048a/google/cloud/geminidataanalytics/v1beta/data_chat_service.proto#L156) for details. |
|
||||
| generationOptions | object | false | Options for generating the response. See [GenerationOptions](https://github.com/googleapis/googleapis/blob/b32495a713a68dd0dff90cf0b24021debfca048a/google/cloud/geminidataanalytics/v1beta/data_chat_service.proto#L135) for details. |
|
||||
@@ -10,27 +10,18 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
The `looker-add-dashboard-element` creates a dashboard element
|
||||
in the given dashboard.
|
||||
The `looker-add-dashboard-element` tool creates a new tile (element) within an existing Looker dashboard.
|
||||
Tiles are added in the order this tool is called for a given `dashboard_id`.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create the dashboard using `make_dashboard`.
|
||||
2. Add any dashboard-level filters using `add_dashboard_filter`.
|
||||
3. Then, add elements (tiles) using this tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-add-dashboard-element` takes eleven parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
3. the `fields` list
|
||||
4. an optional set of `filters`
|
||||
5. an optional set of `pivots`
|
||||
6. an optional set of `sorts`
|
||||
7. an optional `limit`
|
||||
8. an optional `tz`
|
||||
9. an optional `vis_config`
|
||||
10. the `title`
|
||||
11. the `dashboard_id`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -39,24 +30,37 @@ tools:
|
||||
kind: looker-add-dashboard-element
|
||||
source: looker-source
|
||||
description: |
|
||||
add_dashboard_element Tool
|
||||
This tool creates a new tile (element) within an existing Looker dashboard.
|
||||
Tiles are added in the order this tool is called for a given `dashboard_id`.
|
||||
|
||||
This tool creates a new tile in a Looker dashboard using
|
||||
the query parameters and the vis_config specified.
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create the dashboard using `make_dashboard`.
|
||||
2. Add any dashboard-level filters using `add_dashboard_filter`.
|
||||
3. Then, add elements (tiles) using this tool.
|
||||
|
||||
Most of the parameters are the same as the query_url
|
||||
tool. In addition, there is a title that may be provided.
|
||||
The dashboard_id must be specified. That is obtained
|
||||
from calling make_dashboard.
|
||||
Required Parameters:
|
||||
- dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`.
|
||||
- model_name, explore_name, fields: These query parameters are inherited
|
||||
from the `query` tool and are required to define the data for the tile.
|
||||
|
||||
This tool can be called many times for one dashboard_id
|
||||
and the resulting tiles will be added in order.
|
||||
Optional Parameters:
|
||||
- title: An optional title for the dashboard tile.
|
||||
- pivots, filters, sorts, limit, query_timezone: These query parameters are
|
||||
inherited from the `query` tool and can be used to customize the tile's query.
|
||||
- vis_config: A JSON object defining the visualization settings for this tile.
|
||||
The structure and options are the same as for the `query_url` tool's `vis_config`.
|
||||
|
||||
Connecting to Dashboard Filters:
|
||||
A dashboard element can be connected to one or more dashboard filters (created with
|
||||
`add_dashboard_filter`). To do this, specify the `name` of the dashboard filter
|
||||
and the `field` from the element's query that the filter should apply to.
|
||||
The format for specifying the field is `view_name.field_name`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-element" |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|:------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-element". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -0,0 +1,75 @@
|
||||
---
|
||||
title: "looker-add-dashboard-filter"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "looker-add-dashboard-filter" tool adds a filter to a specified dashboard.
|
||||
aliases:
|
||||
- /resources/tools/looker-add-dashboard-filter
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-add-dashboard-filter` tool adds a filter to a specified Looker dashboard.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create a dashboard using `make_dashboard`.
|
||||
2. Add all desired filters using this tool (`add_dashboard_filter`).
|
||||
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
## Parameters
|
||||
|
||||
| **parameter** | **type** | **required** | **default** | **description** |
|
||||
|:----------------------|:--------:|:-----------------:|:--------------:|-------------------------------------------------------------------------------------------------------------------------------|
|
||||
| dashboard_id | string | true | none | The ID of the dashboard to add the filter to, obtained from `make_dashboard`. |
|
||||
| name | string | true | none | A unique internal identifier for the filter. This name is used later in `add_dashboard_element` to bind tiles to this filter. |
|
||||
| title | string | true | none | The label displayed to users in the Looker UI. |
|
||||
| filter_type | string | true | `field_filter` | The filter type of filter. Can be `date_filter`, `number_filter`, `string_filter`, or `field_filter`. |
|
||||
| default_value | string | false | none | The initial value for the filter. |
|
||||
| model | string | if `field_filter` | none | The name of the LookML model, obtained from `get_models`. |
|
||||
| explore | string | if `field_filter` | none | The name of the explore within the model, obtained from `get_explores`. |
|
||||
| dimension | string | if `field_filter` | none | The name of the field (e.g., `view_name.field_name`) to base the filter on, obtained from `get_dimensions`. |
|
||||
| allow_multiple_values | boolean | false | true | The Dashboard Filter should allow multiple values |
|
||||
| required | boolean | false | false | The Dashboard Filter is required to run dashboard |
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
add_dashboard_filter:
|
||||
kind: looker-add-dashboard-filter
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool adds a filter to a Looker dashboard.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create a dashboard using `make_dashboard`.
|
||||
2. Add all desired filters using this tool (`add_dashboard_filter`).
|
||||
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
|
||||
|
||||
Parameters:
|
||||
- dashboard_id (required): The ID from `make_dashboard`.
|
||||
- name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter.
|
||||
- title (required): The label displayed to users in the UI.
|
||||
- filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`.
|
||||
- default_value (optional): The initial value for the filter.
|
||||
|
||||
Field Filters (`flter_type: field_filter`):
|
||||
If creating a field filter, you must also provide:
|
||||
- model
|
||||
- explore
|
||||
- dimension
|
||||
The filter will inherit suggestions and type information from this LookML field.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-filter". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -34,9 +34,10 @@ tools:
|
||||
kind: looker-conversational-analytics
|
||||
source: looker-source
|
||||
description: |
|
||||
Use this tool to perform data analysis, get insights,
|
||||
or answer complex questions about the contents of specific
|
||||
Looker explores.
|
||||
Use this tool to ask questions about your data using the Looker Conversational
|
||||
Analytics API. You must provide a natural language query and a list of
|
||||
1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]).
|
||||
Use the 'get_models' and 'get_explores' tools to discover available models and explores.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,13 +27,18 @@ tools:
|
||||
kind: looker-create-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
create_project_file Tool
|
||||
This tool creates a new LookML file within a specified project, populating
|
||||
it with the provided content.
|
||||
|
||||
Given a project_id and a file path within the project, as well as the content
|
||||
of a LookML file, this tool will create a new file within the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The desired path and filename for the new file within the project.
|
||||
- content (required): The full LookML content to write into the new file.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file creation.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,13 +26,17 @@ tools:
|
||||
kind: looker-delete-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
delete_project_file Tool
|
||||
This tool permanently deletes a specified LookML file from within a project.
|
||||
Use with caution, as this action cannot be undone through the API.
|
||||
|
||||
Given a project_id and a file path within the project, this tool will delete
|
||||
the file from the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The exact path to the LookML file to delete within the project.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file deletion.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,10 +27,13 @@ tools:
|
||||
kind: looker-dev-mode
|
||||
source: looker-source
|
||||
description: |
|
||||
dev_mode Tool
|
||||
This tool allows toggling the Looker IDE session between Development Mode and Production Mode.
|
||||
Development Mode enables making and testing changes to LookML projects.
|
||||
|
||||
Passing true to this tool switches the session to dev mode. Passing false to this tool switches the
|
||||
session to production mode.
|
||||
Parameters:
|
||||
- enable (required): A boolean value.
|
||||
- `true`: Switches the current session to Development Mode.
|
||||
- `false`: Switches the current session to Production Mode.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -36,11 +36,17 @@ tools:
|
||||
kind: looker-generate-embed-url
|
||||
source: looker-source
|
||||
description: |
|
||||
generate_embed_url Tool
|
||||
This tool generates a signed, private embed URL for specific Looker content,
|
||||
allowing users to access it directly.
|
||||
|
||||
This tool generates an embeddable URL for Looker content.
|
||||
You need to provide the type of content (e.g., 'dashboards', 'looks', 'query-visualization')
|
||||
and the ID of the content.
|
||||
Parameters:
|
||||
- type (required): The type of content to embed. Common values include:
|
||||
- `dashboards`
|
||||
- `looks`
|
||||
- `explore`
|
||||
- id (required): The unique identifier for the content.
|
||||
- For dashboards and looks, use the numeric ID (e.g., "123").
|
||||
- For explores, use the format "model_name/explore_name".
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-connection-databases
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_databases Tool
|
||||
This tool retrieves a list of databases available through a specified Looker connection.
|
||||
This is only applicable for connections that support multiple databases.
|
||||
Use `get_connections` to check if a connection supports multiple databases.
|
||||
|
||||
This tool will list the databases available from a connection if the connection
|
||||
supports multiple databases.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available database.
|
||||
If the connection does not support multiple databases, an empty list or an error will be returned.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-connection-schemas
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_schemas Tool
|
||||
This tool retrieves a list of database schemas available through a specified
|
||||
Looker connection.
|
||||
|
||||
This tool will list the schemas available from a connection, filtered by
|
||||
an optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- database (optional): An optional database name to filter the schemas.
|
||||
Only applicable for connections that support multiple databases.
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available schema.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,11 +26,20 @@ tools:
|
||||
kind: looker-get-connection-table-columns
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_table_columns Tool
|
||||
This tool retrieves a list of columns for one or more specified tables within a
|
||||
given database schema and connection.
|
||||
|
||||
This tool will list the columns available from a connection, for all the tables
|
||||
given in a comma separated list of table names, filtered by the
|
||||
schema name and optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`.
|
||||
- tables (required): A comma-separated string of table names for which to retrieve columns
|
||||
(e.g., "users,orders,products"), obtained from `get_connection_tables`.
|
||||
- database (optional): The name of the database to filter by. Only applicable for connections
|
||||
that support multiple databases (check with `get_connections`).
|
||||
|
||||
Output:
|
||||
A JSON array of objects, where each object represents a column and contains details
|
||||
such as `table_name`, `column_name`, `data_type`, and `is_nullable`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,10 +27,17 @@ tools:
|
||||
kind: looker-get-connection-tables
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_tables Tool
|
||||
This tool retrieves a list of tables available within a specified database schema
|
||||
through a Looker connection.
|
||||
|
||||
This tool will list the tables available from a connection, filtered by the
|
||||
schema name and optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`.
|
||||
- database (optional): The name of the database to filter by. Only applicable for connections
|
||||
that support multiple databases (check with `get_connections`).
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available table.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,11 +26,18 @@ tools:
|
||||
kind: looker-get-connections
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connections Tool
|
||||
This tool retrieves a list of all database connections configured in the Looker system.
|
||||
|
||||
This tool will list all the connections available in the Looker system, as
|
||||
well as the dialect name, the default schema, the database if applicable,
|
||||
and whether the connection supports multiple databases.
|
||||
Parameters:
|
||||
This tool takes no parameters.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each representing a database connection and including details such as:
|
||||
- `name`: The connection's unique identifier.
|
||||
- `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery").
|
||||
- `default_schema`: The default schema for the connection.
|
||||
- `database`: The associated database name (if applicable).
|
||||
- `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -29,25 +29,29 @@ default to 100 and 0.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_dashboards:
|
||||
kind: looker-get-dashboards
|
||||
source: looker-source
|
||||
description: |
|
||||
get_dashboards Tool
|
||||
|
||||
This tool is used to search for saved dashboards in a Looker instance.
|
||||
String search params use case-insensitive matching. String search
|
||||
params can contain % and '_' as SQL LIKE pattern match wildcard
|
||||
expressions. example="dan%" will match "danger" and "Danzig" but
|
||||
not "David" example="D_m%" will match "Damage" and "dump".
|
||||
|
||||
Most search params can accept "IS NULL" and "NOT NULL" as special
|
||||
expressions to match or exclude (respectively) rows where the
|
||||
column is null.
|
||||
|
||||
The limit and offset are used to paginate the results.
|
||||
|
||||
The result of the get_dashboards tool is a list of json objects.
|
||||
get_dashboards:
|
||||
kind: looker-get-dashboards
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard.
|
||||
|
||||
Search Parameters:
|
||||
- title (optional): Filter by dashboard title (supports wildcards).
|
||||
- folder_id (optional): Filter by the ID of the folder where the dashboard is saved.
|
||||
- user_id (optional): Filter by the ID of the user who created the dashboard.
|
||||
- description (optional): Filter by description content (supports wildcards).
|
||||
- id (optional): Filter by specific dashboard ID.
|
||||
- limit (optional): Maximum number of results to return. Defaults to a system limit.
|
||||
- offset (optional): Starting point for pagination.
|
||||
|
||||
String Search Behavior:
|
||||
- Case-insensitive matching.
|
||||
- Supports SQL LIKE pattern match wildcards:
|
||||
- `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance")
|
||||
- `_`: Matches any single character. (e.g., `"s_les"` matches "sales")
|
||||
- Special expressions for null checks:
|
||||
- `"IS NULL"`: Matches dashboards where the field is null.
|
||||
- `"NOT NULL"`: Excludes dashboards where the field is null.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,16 +28,20 @@ tools:
|
||||
kind: looker-get-dimensions
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_dimensions tool retrieves the list of dimensions defined in
|
||||
an explore.
|
||||
This tool retrieves a list of dimensions defined within a specific Looker explore.
|
||||
Dimensions are non-aggregatable attributes or characteristics of your data
|
||||
(e.g., product name, order date, customer city) that can be used for grouping,
|
||||
filtering, or segmenting query results.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
|
||||
If this returns a suggestions field for a dimension, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
Output Details:
|
||||
- If a dimension includes a `suggestions` field, its contents are valid values
|
||||
that can be used directly as filters for that dimension.
|
||||
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
|
||||
that specified explore and dimension to retrieve a list of valid filter values.
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -40,10 +40,13 @@ tools:
|
||||
kind: looker-get-explores
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_explores tool retrieves the list of explores defined in a LookML model
|
||||
in the Looker system.
|
||||
This tool retrieves a list of explores defined within a specific LookML model.
|
||||
Explores represent a curated view of your data, typically joining several
|
||||
tables together to allow for focused analysis on a particular subject area.
|
||||
The output provides details like the explore's `name` and `label`.
|
||||
|
||||
It takes one parameter, the model_name looked up from get_models.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -24,15 +24,22 @@ It's compatible with the following sources:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_dimensions:
|
||||
get_filters:
|
||||
kind: looker-get-filters
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_filters tool retrieves the list of filters defined in
|
||||
an explore.
|
||||
This tool retrieves a list of "filter-only fields" defined within a specific
|
||||
Looker explore. These are special fields defined in LookML specifically to
|
||||
create user-facing filter controls that do not directly affect the `GROUP BY`
|
||||
clause of the SQL query. They are often used in conjunction with liquid templating
|
||||
to create dynamic queries.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Note: Regular dimensions and measures can also be used as filters in a query.
|
||||
This tool *only* returns fields explicitly defined as `filter:` in LookML.
|
||||
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
@@ -34,21 +34,26 @@ tools:
|
||||
kind: looker-get-looks
|
||||
source: looker-source
|
||||
description: |
|
||||
get_looks Tool
|
||||
This tool searches for saved Looks (pre-defined queries and visualizations)
|
||||
in a Looker instance. It returns a list of JSON objects, each representing a Look.
|
||||
|
||||
This tool is used to search for saved looks in a Looker instance.
|
||||
String search params use case-insensitive matching. String search
|
||||
params can contain % and '_' as SQL LIKE pattern match wildcard
|
||||
expressions. example="dan%" will match "danger" and "Danzig" but
|
||||
not "David" example="D_m%" will match "Damage" and "dump".
|
||||
Search Parameters:
|
||||
- title (optional): Filter by Look title (supports wildcards).
|
||||
- folder_id (optional): Filter by the ID of the folder where the Look is saved.
|
||||
- user_id (optional): Filter by the ID of the user who created the Look.
|
||||
- description (optional): Filter by description content (supports wildcards).
|
||||
- id (optional): Filter by specific Look ID.
|
||||
- limit (optional): Maximum number of results to return. Defaults to a system limit.
|
||||
- offset (optional): Starting point for pagination.
|
||||
|
||||
Most search params can accept "IS NULL" and "NOT NULL" as special
|
||||
expressions to match or exclude (respectively) rows where the
|
||||
column is null.
|
||||
|
||||
The limit and offset are used to paginate the results.
|
||||
|
||||
The result of the get_looks tool is a list of json objects.
|
||||
String Search Behavior:
|
||||
- Case-insensitive matching.
|
||||
- Supports SQL LIKE pattern match wildcards:
|
||||
- `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig")
|
||||
- `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump")
|
||||
- Special expressions for null checks:
|
||||
- `"IS NULL"`: Matches Looks where the field is null.
|
||||
- `"NOT NULL"`: Excludes Looks where the field is null.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,16 +28,19 @@ tools:
|
||||
kind: looker-get-measures
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_measures tool retrieves the list of measures defined in
|
||||
an explore.
|
||||
This tool retrieves a list of measures defined within a specific Looker explore.
|
||||
Measures are aggregatable metrics (e.g., total sales, average price, count of users)
|
||||
that are used for calculations and quantitative analysis in your queries.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
|
||||
If this returns a suggestions field for a measure, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
Output Details:
|
||||
- If a measure includes a `suggestions` field, its contents are valid values
|
||||
that can be used directly as filters for that measure.
|
||||
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
|
||||
that specified explore and dimension to retrieve a list of valid filter values.
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -26,9 +26,12 @@ tools:
|
||||
kind: looker-get-models
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_models tool retrieves the list of LookML models in the Looker system.
|
||||
This tool retrieves a list of available LookML models in the Looker instance.
|
||||
LookML models define the data structure and relationships that users can query.
|
||||
The output includes details like the model's `name` and `label`, which are
|
||||
essential for subsequent calls to tools like `get_explores` or `query`.
|
||||
|
||||
It takes no parameters.
|
||||
This tool takes no parameters.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,11 +28,15 @@ tools:
|
||||
kind: looker-get-parameters
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_parameters tool retrieves the list of parameters defined in
|
||||
an explore.
|
||||
This tool retrieves a list of parameters defined within a specific Looker explore.
|
||||
LookML parameters are dynamic input fields that allow users to influence query
|
||||
behavior without directly modifying the underlying LookML. They are often used
|
||||
with `liquid` templating to create flexible dashboards and reports, enabling
|
||||
users to choose dimensions, measures, or other query components at runtime.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
@@ -26,10 +26,15 @@ tools:
|
||||
kind: looker-get-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
get_project_file Tool
|
||||
This tool retrieves the raw content of a specific LookML file from within a project.
|
||||
|
||||
Given a project_id and a file path within the project, this tool returns
|
||||
the contents of the LookML file.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
|
||||
- file_path (required): The path to the LookML file within the project,
|
||||
typically obtained from `get_project_files`.
|
||||
|
||||
Output:
|
||||
The raw text content of the specified LookML file.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,15 @@ tools:
|
||||
kind: looker-get-project-files
|
||||
source: looker-source
|
||||
description: |
|
||||
get_project_files Tool
|
||||
This tool retrieves a list of all LookML files within a specified project,
|
||||
providing details about each file.
|
||||
|
||||
Given a project_id this tool returns the details about
|
||||
the LookML files that make up that project.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each representing a LookML file and containing
|
||||
details such as `path`, `id`, `type`, and `git_status`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-projects
|
||||
source: looker-source
|
||||
description: |
|
||||
get_projects Tool
|
||||
This tool retrieves a list of all LookML projects available on the Looker instance.
|
||||
It is useful for identifying projects before performing actions like retrieving
|
||||
project files or making modifications.
|
||||
|
||||
This tool returns the project_id and project_name for
|
||||
all the LookML projects on the looker instance.
|
||||
Parameters:
|
||||
This tool takes no parameters.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each containing the `project_id` and `project_name`
|
||||
for a LookML project.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -42,17 +42,18 @@ tools:
|
||||
kind: looker-health-analyze
|
||||
source: looker-source
|
||||
description: |
|
||||
health-analyze Tool
|
||||
This tool calculates the usage statistics for Looker projects, models, and explores.
|
||||
|
||||
This tool calculates the usage of projects, models and explores.
|
||||
Parameters:
|
||||
- action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`.
|
||||
- project (optional): The specific project ID to analyze.
|
||||
- model (optional): The specific model name to analyze. Requires `project` if used without `explore`.
|
||||
- explore (optional): The specific explore name to analyze. Requires `model` if used.
|
||||
- timeframe (optional): The lookback period in days for usage data. Defaults to `90` days.
|
||||
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "projects", "models", or "explores"
|
||||
2. `project`: the project to analyze (optional)
|
||||
3. `model`: the model to analyze (optional)
|
||||
4. `explore`: the explore to analyze (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
Output:
|
||||
The result is a JSON object containing usage metrics for the specified resources.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -49,20 +49,22 @@ tools:
|
||||
kind: looker-health-pulse
|
||||
source: looker-source
|
||||
description: |
|
||||
health-pulse Tool
|
||||
This tool performs various health checks on a Looker instance.
|
||||
|
||||
This tool takes the pulse of a Looker instance by taking
|
||||
one of the following actions:
|
||||
1. `check_db_connections`,
|
||||
2. `check_dashboard_performance`,
|
||||
3. `check_dashboard_errors`,
|
||||
4. `check_explore_performance`,
|
||||
5. `check_schedule_failures`, or
|
||||
6. `check_legacy_features`
|
||||
|
||||
The `check_legacy_features` action is only available in Looker Core. If
|
||||
it is called on a Looker Core instance, you will get a notice. That notice
|
||||
should not be reported as an error.
|
||||
Parameters:
|
||||
- action (required): Specifies the type of health check to perform.
|
||||
Choose one of the following:
|
||||
- `check_db_connections`: Verifies database connectivity.
|
||||
- `check_dashboard_performance`: Assesses dashboard loading performance.
|
||||
- `check_dashboard_errors`: Identifies errors within dashboards.
|
||||
- `check_explore_performance`: Evaluates explore query performance.
|
||||
- `check_schedule_failures`: Reports on failed scheduled deliveries.
|
||||
- `check_legacy_features`: Checks for the usage of legacy features.
|
||||
|
||||
Note on `check_legacy_features`:
|
||||
This action is exclusively available in Looker Core instances. If invoked
|
||||
on a non-Looker Core instance, it will return a notice rather than an error.
|
||||
This notice should be considered normal behavior and not an indication of an issue.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -39,20 +39,19 @@ tools:
|
||||
kind: looker-health-vacuum
|
||||
source: looker-source
|
||||
description: |
|
||||
health-vacuum Tool
|
||||
This tool identifies and suggests LookML models or explores that can be
|
||||
safely removed due to inactivity or low usage.
|
||||
|
||||
This tool suggests models or explores that can removed
|
||||
because they are unused.
|
||||
Parameters:
|
||||
- action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`.
|
||||
- project (optional): The specific project ID to consider.
|
||||
- model (optional): The specific model name to consider. Requires `project` if used without `explore`.
|
||||
- explore (optional): The specific explore name to consider. Requires `model` if used.
|
||||
- timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days.
|
||||
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "models" or "explores"
|
||||
2. `project`: the project to vacuum (optional)
|
||||
3. `model`: the model to vacuum (optional)
|
||||
4. `explore`: the explore to vacuum (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
|
||||
The result is a list of objects that are candidates for deletion.
|
||||
Output:
|
||||
A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage.
|
||||
```
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -30,18 +30,19 @@ tools:
|
||||
kind: looker-make-dashboard
|
||||
source: looker-source
|
||||
description: |
|
||||
make_dashboard Tool
|
||||
This tool creates a new, empty dashboard in Looker. Dashboards are stored
|
||||
in the user's personal folder, and the dashboard name must be unique.
|
||||
After creation, use `add_dashboard_filter` to add filters and
|
||||
`add_dashboard_element` to add content tiles.
|
||||
|
||||
This tool creates a new dashboard in Looker. The dashboard is
|
||||
initially empty and the add_dashboard_element tool is used to
|
||||
add content to the dashboard.
|
||||
Required Parameters:
|
||||
- title (required): A unique title for the new dashboard.
|
||||
- description (required): A brief description of the dashboard's purpose.
|
||||
|
||||
The newly created dashboard will be created in the user's
|
||||
personal folder in looker. The dashboard name must be unique.
|
||||
|
||||
The result is a json document with a link to the newly
|
||||
created dashboard and the id of the dashboard. Use the id
|
||||
when calling add_dashboard_element.
|
||||
Output:
|
||||
A JSON object containing a link (`url`) to the newly created dashboard and
|
||||
its unique `id`. This `dashboard_id` is crucial for subsequent calls to
|
||||
`add_dashboard_filter` and `add_dashboard_element`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -40,20 +40,24 @@ tools:
|
||||
kind: looker-make-look
|
||||
source: looker-source
|
||||
description: |
|
||||
make_look Tool
|
||||
This tool creates a new Look (saved query with visualization) in Looker.
|
||||
The Look will be saved in the user's personal folder, and its name must be unique.
|
||||
|
||||
This tool creates a new look in Looker, using the query
|
||||
parameters and the vis_config specified.
|
||||
Required Parameters:
|
||||
- title: A unique title for the new Look.
|
||||
- description: A brief description of the Look's purpose.
|
||||
- model_name: The name of the LookML model (from `get_models`).
|
||||
- explore_name: The name of the explore (from `get_explores`).
|
||||
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
|
||||
|
||||
Most of the parameters are the same as the query_url
|
||||
tool. In addition, there is a title and a description
|
||||
that must be provided.
|
||||
Optional Parameters:
|
||||
- pivots, filters, sorts, limit, query_timezone: These parameters are identical
|
||||
to those described for the `query` tool.
|
||||
- vis_config: A JSON object defining the visualization settings for the Look.
|
||||
The structure and options are the same as for the `query_url` tool's `vis_config`.
|
||||
|
||||
The newly created look will be created in the user's
|
||||
personal folder in looker. The look name must be unique.
|
||||
|
||||
The result is a json document with a link to the newly
|
||||
created look.
|
||||
Output:
|
||||
A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -41,38 +41,17 @@ tools:
|
||||
kind: looker-query-sql
|
||||
source: looker-source
|
||||
description: |
|
||||
Query SQL Tool
|
||||
This tool generates the underlying SQL query that Looker would execute
|
||||
against the database for a given set of parameters. It is useful for
|
||||
understanding how Looker translates a request into SQL.
|
||||
|
||||
This tool is used to generate a sql query against the LookML model. The
|
||||
model, explore, and fields list must be specified. Pivots,
|
||||
filters and sorts are optional.
|
||||
Parameters:
|
||||
All parameters for this tool are identical to those of the `query` tool.
|
||||
This includes `model_name`, `explore_name`, `fields` (required),
|
||||
and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`.
|
||||
|
||||
The model can be found from the get_models tool. The explore
|
||||
can be found from the get_explores tool passing in the model.
|
||||
The fields can be found from the get_dimensions, get_measures,
|
||||
get_filters, and get_parameters tools, passing in the model
|
||||
and the explore.
|
||||
|
||||
Provide a model_id and explore_name, then a list
|
||||
of fields. Optionally a list of pivots can be provided.
|
||||
The pivots must also be included in the fields list.
|
||||
|
||||
Filters are provided as a map of {"field.id": "condition",
|
||||
"field.id2": "condition2", ...}. Do not put the field.id in
|
||||
quotes. Filter expressions can be found at
|
||||
https://cloud.google.com/looker/docs/filter-expressions.
|
||||
|
||||
Sorts can be specified like [ "field.id desc 0" ].
|
||||
|
||||
An optional row limit can be added. If not provided the limit
|
||||
will default to 500. "-1" can be specified for unlimited.
|
||||
|
||||
An optional query timezone can be added. The query_timezone to
|
||||
will default to that of the workstation where this MCP server
|
||||
is running, or Etc/UTC if that can't be determined. Not all
|
||||
models support custom timezones.
|
||||
|
||||
The result of the query tool is the sql string.
|
||||
Output:
|
||||
The result of this tool is the raw SQL text.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -37,17 +37,21 @@ tools:
|
||||
kind: looker-query-url
|
||||
source: looker-source
|
||||
description: |
|
||||
Query URL Tool
|
||||
This tool generates a shareable URL for a Looker query, allowing users to
|
||||
explore the query further within the Looker UI. It returns the generated URL,
|
||||
along with the `query_id` and `slug`.
|
||||
|
||||
This tool is used to generate the URL of a query in Looker.
|
||||
The user can then explore the query further inside Looker.
|
||||
The tool also returns the query_id and slug. The parameters
|
||||
are the same as the query tool with an additional vis_config
|
||||
parameter.
|
||||
Parameters:
|
||||
All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`,
|
||||
`filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool.
|
||||
|
||||
The vis_config is optional. If provided, it will be used to
|
||||
control the default visualization for the query. Here are
|
||||
some notes on making visualizations.
|
||||
Additionally, it accepts an optional `vis_config` parameter:
|
||||
- vis_config (optional): A JSON object that controls the default visualization
|
||||
settings for the generated query.
|
||||
|
||||
vis_config Details:
|
||||
The `vis_config` object supports a wide range of properties for various chart types.
|
||||
Here are some notes on making visualizations.
|
||||
|
||||
### Cartesian Charts (Area, Bar, Column, Line, Scatter)
|
||||
|
||||
|
||||
@@ -41,38 +41,24 @@ tools:
|
||||
kind: looker-query
|
||||
source: looker-source
|
||||
description: |
|
||||
Query Tool
|
||||
This tool runs a query against a LookML model and returns the results in JSON format.
|
||||
|
||||
This tool is used to run a query against the LookML model. The
|
||||
model, explore, and fields list must be specified. Pivots,
|
||||
filters and sorts are optional.
|
||||
Required Parameters:
|
||||
- model_name: The name of the LookML model (from `get_models`).
|
||||
- explore_name: The name of the explore (from `get_explores`).
|
||||
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
|
||||
|
||||
The model can be found from the get_models tool. The explore
|
||||
can be found from the get_explores tool passing in the model.
|
||||
The fields can be found from the get_dimensions, get_measures,
|
||||
get_filters, and get_parameters tools, passing in the model
|
||||
and the explore.
|
||||
Optional Parameters:
|
||||
- pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list.
|
||||
- filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`.
|
||||
- Do not quote field names.
|
||||
- Use `not null` instead of `-NULL`.
|
||||
- If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'").
|
||||
- sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`).
|
||||
- limit: Row limit (default 500). Use "-1" for unlimited.
|
||||
- query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`).
|
||||
|
||||
Provide a model_id and explore_name, then a list
|
||||
of fields. Optionally a list of pivots can be provided.
|
||||
The pivots must also be included in the fields list.
|
||||
|
||||
Filters are provided as a map of {"field.id": "condition",
|
||||
"field.id2": "condition2", ...}. Do not put the field.id in
|
||||
quotes. Filter expressions can be found at
|
||||
https://cloud.google.com/looker/docs/filter-expressions.
|
||||
If the condition is a string that contains a comma, use a second
|
||||
set of quotes. For example, {"user.city": "'New York, NY'"}.
|
||||
|
||||
Sorts can be specified like [ "field.id desc 0" ].
|
||||
|
||||
An optional row limit can be added. If not provided the limit
|
||||
will default to 500. "-1" can be specified for unlimited.
|
||||
|
||||
An optional query timezone can be added. The query_timezone to
|
||||
will default to that of the workstation where this MCP server
|
||||
is running, or Etc/UTC if that can't be determined. Not all
|
||||
models support custom timezones.
|
||||
Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields.
|
||||
|
||||
The result of the query tool is JSON
|
||||
```
|
||||
|
||||
@@ -27,11 +27,15 @@ tools:
|
||||
kind: looker-run-dashboard
|
||||
source: looker-source
|
||||
description: |
|
||||
run_dashboard Tool
|
||||
This tool executes the queries associated with each tile in a specified dashboard
|
||||
and returns the aggregated data in a JSON structure.
|
||||
|
||||
This tools runs the query associated with each tile in a dashboard
|
||||
and returns the data in a JSON structure. It accepts the dashboard_id
|
||||
as the parameter.
|
||||
Parameters:
|
||||
- dashboard_id (required): The unique identifier of the dashboard to run,
|
||||
typically obtained from the `get_dashboards` tool.
|
||||
|
||||
Output:
|
||||
The data from all dashboard tiles is returned as a JSON object.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,11 +27,15 @@ tools:
|
||||
kind: looker-run-look
|
||||
source: looker-source
|
||||
description: |
|
||||
run_look Tool
|
||||
This tool executes the query associated with a saved Look and
|
||||
returns the resulting data in a JSON structure.
|
||||
|
||||
This tool runs the query associated with a look and returns
|
||||
the data in a JSON structure. It accepts the look_id as the
|
||||
parameter.
|
||||
Parameters:
|
||||
- look_id (required): The unique identifier of the Look to run,
|
||||
typically obtained from the `get_looks` tool.
|
||||
|
||||
Output:
|
||||
The query results are returned as a JSON object.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,13 +27,17 @@ tools:
|
||||
kind: looker-update-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
update_project_file Tool
|
||||
This tool modifies the content of an existing LookML file within a specified project.
|
||||
|
||||
Given a project_id and a file path within the project, as well as the content
|
||||
of a LookML file, this tool will modify the file within the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The exact path to the LookML file to modify within the project.
|
||||
- content (required): The new, complete LookML content to overwrite the existing file.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file modification.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -64,5 +64,3 @@ tools:
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| projectPayload | string | false | An optional MongoDB projection document to specify which fields to include (1) or exclude (0) in the result. |
|
||||
| projectParams | list | false | A list of parameter objects for the `projectPayload`. |
|
||||
| sortPayload | string | false | An optional MongoDB sort document. Useful for selecting which document to return if the filter matches multiple (e.g., get the most recent). |
|
||||
| sortParams | list | false | A list of parameter objects for the `sortPayload`. |
|
||||
|
||||
@@ -48,11 +48,11 @@ in the `data` parameter, like this:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
|
||||
@@ -43,11 +43,11 @@ An LLM would call this tool by providing the document as a JSON string in the
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
39
docs/en/resources/tools/mysql/mysql-get-query-plan.md
Normal file
39
docs/en/resources/tools/mysql/mysql-get-query-plan.md
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
title: "mysql-get-query-plan"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "mysql-get-query-plan" tool gets the execution plan for a SQL statement against a MySQL
|
||||
database.
|
||||
aliases:
|
||||
- /resources/tools/mysql-get-query-plan
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `mysql-get-query-plan` tool gets the execution plan for a SQL statement against a MySQL
|
||||
database. It's compatible with any of the following sources:
|
||||
|
||||
- [cloud-sql-mysql](../../sources/cloud-sql-mysql.md)
|
||||
- [mysql](../../sources/mysql.md)
|
||||
|
||||
`mysql-get-query-plan` takes one input parameter `sql_statement` and gets the execution plan for the SQL
|
||||
statement against the `source`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_query_plan_tool:
|
||||
kind: mysql-get-query-plan
|
||||
source: my-mysql-instance
|
||||
description: Use this tool to get the execution plan for a sql statement.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql-get-query-plan". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -0,0 +1,95 @@
|
||||
---
|
||||
title: "postgres-list-database-stats"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-database-stats" tool lists lists key performance and activity statistics of PostgreSQL databases.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-database-stats
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-database-stats` lists the key performance and activity statistics for each PostgreSQL database in the instance, offering insights into cache efficiency, transaction throughput, row-level activity, temporary file usage, and contention. It's compatible with
|
||||
any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-database-stats` lists detailed information as JSON for each database. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `database_name` (optional): A text to filter results by database name. Default: `""`
|
||||
- `include_templates` (optional): Boolean, set to `true` to include template databases in the results. Default: `false`
|
||||
- `database_owner` (optional): A text to filter results by database owner. Default: `""`
|
||||
- `default_tablespace` (optional): A text to filter results by the default tablespace name. Default: `""`
|
||||
- `order_by` (optional): Specifies the sorting order. Valid values are `'size'` (descending) or `'commit'` (descending). Default: `database_name` ascending.
|
||||
- `limit` (optional): The maximum number of databases to return. Default: `10`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_database_stats:
|
||||
kind: postgres-list-database-stats
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists the key performance and activity statistics for each PostgreSQL
|
||||
database in the instance, offering insights into cache efficiency,
|
||||
transaction throughput row-level activity, temporary file usage, and
|
||||
contention. It returns: the database name, whether the database is
|
||||
connectable, database owner, default tablespace name, the percentage of
|
||||
data blocks found in the buffer cache rather than being read from disk
|
||||
(a higher value indicates better cache performance), the total number of
|
||||
disk blocks read from disk, the total number of times disk blocks were
|
||||
found already in the cache; the total number of committed transactions,
|
||||
the total number of rolled back transactions, the percentage of rolled
|
||||
back transactions compared to the total number of completed
|
||||
transactions, the total number of rows returned by queries, the total
|
||||
number of live rows fetched by scans, the total number of rows inserted,
|
||||
the total number of rows updated, the total number of rows deleted, the
|
||||
number of temporary files created by queries, the total size of
|
||||
temporary files used by queries in bytes, the number of query
|
||||
cancellations due to conflicts with recovery, the number of deadlocks
|
||||
detected, the current number of active backend connections, the
|
||||
timestamp when the database statistics were last reset, and the total
|
||||
database size in bytes.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"database_name": "Name of the database",
|
||||
"is_connectable": "Boolean indicating Whether the database allows connections",
|
||||
"database_owner": "Username of the database owner",
|
||||
"default_tablespace": "Name of the default tablespace for the database",
|
||||
"cache_hit_ratio_percent": "The percentage of data blocks found in the buffer cache rather than being read from disk",
|
||||
"blocks_read_from_disk": "The total number of disk blocks read for this database",
|
||||
"blocks_hit_in_cache": "The total number of times disk blocks were found already in the cache.",
|
||||
"xact_commit": "The total number of committed transactions",
|
||||
"xact_rollback": "The total number of rolled back transactions",
|
||||
"rollback_ratio_percent": "The percentage of rolled back transactions compared to the total number of completed transactions",
|
||||
"rows_returned_by_queries": "The total number of rows returned by queries",
|
||||
"rows_fetched_by_scans": "The total number of live rows fetched by scans",
|
||||
"tup_inserted": "The total number of rows inserted",
|
||||
"tup_updated": "The total number of rows updated",
|
||||
"tup_deleted": "The total number of rows deleted",
|
||||
"temp_files": "The number of temporary files created by queries",
|
||||
"temp_size_bytes": "The total size of temporary files used by queries in bytes",
|
||||
"conflicts": "Number of query cancellations due to conflicts",
|
||||
"deadlocks": "Number of deadlocks detected",
|
||||
"active_connections": "The current number of active backend connections",
|
||||
"statistics_last_reset": "The timestamp when the database statistics were last reset",
|
||||
"database_size_bytes": "The total disk size of the database in bytes"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-database-stats". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -21,12 +21,10 @@ any of the following sources:
|
||||
`postgres-list-indexes` lists detailed information as JSON for indexes. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `table_name` (optional): A text to filter results by table name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `index_name` (optional): A text to filter results by index name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `schema_name` (optional): A text to filter results by schema name. The input
|
||||
is used within a LIKE clause. Default: `""`
|
||||
- `table_name` (optional): A text to filter results by table name. Default: `""`
|
||||
- `index_name` (optional): A text to filter results by index name. Default: `""`
|
||||
- `schema_name` (optional): A text to filter results by schema name. Default: `""`
|
||||
- `only_unused` (optional): If true, returns indexes that have never been used.
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
---
|
||||
title: "postgres-list-pg-settings"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-pg-settings" tool lists PostgreSQL run-time configuration settings.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-pg-settings
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-pg-settings` tool lists the configuration parameters for the postgres server, their current values, and related information. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-pg-settings` lists detailed information as JSON for each setting. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `setting_name` (optional): A text to filter results by setting name. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-pg-settings
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists configuration parameters for the postgres server ordered lexicographically,
|
||||
with a default limit of 50 rows. It returns the parameter name, its current setting,
|
||||
unit of measurement, a short description, the source of the current setting (e.g.,
|
||||
default, configuration file, session), and whether a restart is required when the
|
||||
parameter value is changed."
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Setting name",
|
||||
"current_value": "Current value of the setting",
|
||||
"unit": "Unit of the setting",
|
||||
"short_desc": "Short description of the setting",
|
||||
"source": "Source of the current value (e.g., default, configuration file, session)",
|
||||
"requires_restart": "Indicates if a server restart is required to apply a change ('Yes', 'No', or 'No (Reload sufficient)')"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-pg-settings". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -0,0 +1,66 @@
|
||||
---
|
||||
title: "postgres-list-publication-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-publication-tables" tool lists publication tables in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-publication-tables
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-publication-tables` tool lists all publication tables in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-publication-tables` lists detailed information as JSON for publication tables. A publication table in PostgreSQL is a
|
||||
table that is explicitly included as a source for replication within a publication (a set of changes generated from a table or group
|
||||
of tables) as part of the logical replication feature. The tool takes the following input parameters:
|
||||
|
||||
- `table_names` (optional): Filters by a comma-separated list of table names. Default: `""`
|
||||
- `publication_names` (optional): Filters by a comma-separated list of publication names. Default: `""`
|
||||
- `schema_names` (optional): Filters by a comma-separated list of schema names. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-publication-tables
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tables that are explicitly part of a publication in the database.
|
||||
Tables that are part of a publication via 'FOR ALL TABLES' are not included,
|
||||
unless they are also explicitly added to the publication.
|
||||
Returns the publication name, schema name, and table name, along with
|
||||
definition details indicating if it publishes all tables, whether it
|
||||
replicates inserts, updates, deletes, or truncates, and the publication
|
||||
owner.
|
||||
```
|
||||
|
||||
The response is a JSON array with the following elements:
|
||||
```json
|
||||
{
|
||||
"publication_name": "Name of the publication",
|
||||
"schema_name": "Name of the schema the table belongs to",
|
||||
"table_name": "Name of the table",
|
||||
"publishes_all_tables": "boolean indicating if the publication was created with FOR ALL TABLES",
|
||||
"publishes_inserts": "boolean indicating if INSERT operations are replicated",
|
||||
"publishes_updates": "boolean indicating if UPDATE operations are replicated",
|
||||
"publishes_deletes": "boolean indicating if DELETE operations are replicated",
|
||||
"publishes_truncates": "boolean indicating if TRUNCATE operations are replicated",
|
||||
"publication_owner": "Username of the database role that owns the publication"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-publication-tables". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
70
docs/en/resources/tools/postgres/postgres-list-roles.md
Normal file
70
docs/en/resources/tools/postgres/postgres-list-roles.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "postgres-list-roles"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-roles" tool lists user-created roles in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-roles
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-roles` tool lists all the user-created roles in the instance, excluding system roles (like `cloudsql%` or `pg_%`). It provides details about each role's attributes and memberships. It's compatible with
|
||||
any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-roles` lists detailed information as JSON for each role. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `role_name` (optional): A text to filter results by role name. Default: `""`
|
||||
- `limit` (optional): The maximum number of roles to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-roles
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all the user-created roles in the instance . It returns the role name,
|
||||
Object ID, the maximum number of concurrent connections the role can make,
|
||||
along with boolean indicators for: superuser status, privilege inheritance
|
||||
from member roles, ability to create roles, ability to create databases,
|
||||
ability to log in, replication privilege, and the ability to bypass
|
||||
row-level security, the password expiration timestamp, a list of direct
|
||||
members belonging to this role, and a list of other roles/groups that this
|
||||
role is a member of.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"role_name": "Name of the role",
|
||||
"oid": "Object ID of the role",
|
||||
"connection_limit": "Maximum concurrent connections allowed (-1 for no limit)",
|
||||
"is_superuser": "Boolean, true if the role is a superuser",
|
||||
"inherits_privileges": "Boolean, true if the role inherits privileges of roles it is a member of",
|
||||
"can_create_roles": "Boolean, true if the role can create other roles",
|
||||
"can_create_db": "Boolean, true if the role can create databases",
|
||||
"can_login": "Boolean, true if the role can log in",
|
||||
"is_replication_role": "Boolean, true if this is a replication role",
|
||||
"bypass_rls": "Boolean, true if the role bypasses row-level security policies",
|
||||
"valid_until": "Timestamp until the password is valid (null if forever)",
|
||||
"direct_members": ["Array of role names that are direct members of this role"],
|
||||
"member_of": ["Array of role names that this role is a member of"]
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-roles". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -21,9 +21,9 @@ the following sources:
|
||||
`postgres-list-schemas` lists detailed information as JSON for each schema. The
|
||||
tool takes the following input parameters:
|
||||
|
||||
- `schema_name` (optional): A pattern to filter schema names using SQL LIKE
|
||||
operator.
|
||||
If omitted, all user-defined schemas are returned.
|
||||
- `schema_name` (optional): A text to filter results by schema name. Default: `""`
|
||||
- `owner` (optional): A text to filter results by owner name. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -20,9 +20,9 @@ Postgres database. It's compatible with any of the following sources:
|
||||
`postgres-list-sequences` lists detailed information as JSON for all sequences.
|
||||
The tool takes the following input parameters:
|
||||
|
||||
- `sequencename` (optional): A text to filter results by sequence name. The
|
||||
- `sequence_name` (optional): A text to filter results by sequence name. The
|
||||
input is used within a LIKE clause. Default: `""`
|
||||
- `schemaname` (optional): A text to filter results by schema name. The input is
|
||||
- `schema_name` (optional): A text to filter results by schema name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
@@ -45,9 +45,9 @@ The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"sequencename": "sequence name",
|
||||
"schemaname": "schema name",
|
||||
"sequenceowner": "owner of the sequence",
|
||||
"sequence_name": "sequence name",
|
||||
"schema_name": "schema name",
|
||||
"sequence_owner": "owner of the sequence",
|
||||
"data_type": "data type of the sequence",
|
||||
"start_value": "starting value of the sequence",
|
||||
"min_value": "minimum value of the sequence",
|
||||
|
||||
171
docs/en/resources/tools/postgres/postgres-list-table-stats.md
Normal file
171
docs/en/resources/tools/postgres/postgres-list-table-stats.md
Normal file
@@ -0,0 +1,171 @@
|
||||
---
|
||||
title: "postgres-list-table-stats"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-table-stats" tool reports table statistics including size, scan metrics, and bloat indicators for PostgreSQL tables.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-table-stats
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-table-stats` tool queries `pg_stat_all_tables` to provide comprehensive statistics about tables in the database. It calculates useful metrics like index scan ratio and dead row ratio to help identify performance issues and table bloat.
|
||||
|
||||
Compatible sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
The tool returns a JSON array where each element represents statistics for a table, including scan metrics, row counts, and vacuum history. Results are sorted by sequential scans by default and limited to 50 rows.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table_stats:
|
||||
kind: postgres-list-table-stats
|
||||
source: postgres-source
|
||||
description: "Lists table statistics including size, scans, and bloat metrics."
|
||||
```
|
||||
|
||||
### Example Requests
|
||||
|
||||
**List default tables in public schema:**
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
**Filter by specific table name:**
|
||||
```json
|
||||
{
|
||||
"table_name": "users"
|
||||
}
|
||||
```
|
||||
|
||||
**Filter by owner and sort by size:**
|
||||
```json
|
||||
{
|
||||
"owner": "app_user",
|
||||
"sort_by": "size",
|
||||
"limit": 10
|
||||
}
|
||||
```
|
||||
|
||||
**Find tables with high dead row ratio:**
|
||||
```json
|
||||
{
|
||||
"sort_by": "dead_rows",
|
||||
"limit": 20
|
||||
}
|
||||
```
|
||||
|
||||
### Example Response
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"schema_name": "public",
|
||||
"table_name": "users",
|
||||
"owner": "postgres",
|
||||
"total_size_bytes": 8388608,
|
||||
"seq_scan": 150,
|
||||
"idx_scan": 450,
|
||||
"idx_scan_ratio_percent": 75.0,
|
||||
"live_rows": 50000,
|
||||
"dead_rows": 1200,
|
||||
"dead_row_ratio_percent": 2.34,
|
||||
"n_tup_ins": 52000,
|
||||
"n_tup_upd": 12500,
|
||||
"n_tup_del": 800,
|
||||
"last_vacuum": "2025-11-27T10:30:00Z",
|
||||
"last_autovacuum": "2025-11-27T09:15:00Z",
|
||||
"last_autoanalyze": "2025-11-27T09:16:00Z"
|
||||
},
|
||||
{
|
||||
"schema_name": "public",
|
||||
"table_name": "orders",
|
||||
"owner": "postgres",
|
||||
"total_size_bytes": 16777216,
|
||||
"seq_scan": 50,
|
||||
"idx_scan": 1200,
|
||||
"idx_scan_ratio_percent": 96.0,
|
||||
"live_rows": 100000,
|
||||
"dead_rows": 5000,
|
||||
"dead_row_ratio_percent": 4.76,
|
||||
"n_tup_ins": 120000,
|
||||
"n_tup_upd": 45000,
|
||||
"n_tup_del": 15000,
|
||||
"last_vacuum": "2025-11-26T14:22:00Z",
|
||||
"last_autovacuum": "2025-11-27T02:30:00Z",
|
||||
"last_autoanalyze": "2025-11-27T02:31:00Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | type | required | default | description |
|
||||
|-------------|---------|----------|---------|-------------|
|
||||
| schema_name | string | false | "public" | Optional: A specific schema name to filter by (supports partial matching) |
|
||||
| table_name | string | false | null | Optional: A specific table name to filter by (supports partial matching) |
|
||||
| owner | string | false | null | Optional: A specific owner to filter by (supports partial matching) |
|
||||
| sort_by | string | false | null | Optional: The column to sort by. Valid values: `size`, `dead_rows`, `seq_scan`, `idx_scan` (defaults to `seq_scan`) |
|
||||
| limit | integer | false | 50 | Optional: The maximum number of results to return |
|
||||
|
||||
## Output Fields Reference
|
||||
|
||||
| field | type | description |
|
||||
|------------------------|-----------|-------------|
|
||||
| schema_name | string | Name of the schema containing the table. |
|
||||
| table_name | string | Name of the table. |
|
||||
| owner | string | PostgreSQL user who owns the table. |
|
||||
| total_size_bytes | integer | Total size of the table including all indexes in bytes. |
|
||||
| seq_scan | integer | Number of sequential (full table) scans performed on this table. |
|
||||
| idx_scan | integer | Number of index scans performed on this table. |
|
||||
| idx_scan_ratio_percent | decimal | Percentage of total scans (seq_scan + idx_scan) that used an index. A low ratio may indicate missing or ineffective indexes. |
|
||||
| live_rows | integer | Number of live (non-deleted) rows in the table. |
|
||||
| dead_rows | integer | Number of dead (deleted but not yet vacuumed) rows in the table. |
|
||||
| dead_row_ratio_percent | decimal | Percentage of dead rows relative to total rows. High values indicate potential table bloat. |
|
||||
| n_tup_ins | integer | Total number of rows inserted into this table. |
|
||||
| n_tup_upd | integer | Total number of rows updated in this table. |
|
||||
| n_tup_del | integer | Total number of rows deleted from this table. |
|
||||
| last_vacuum | timestamp | Timestamp of the last manual VACUUM operation on this table (null if never manually vacuumed). |
|
||||
| last_autovacuum | timestamp | Timestamp of the last automatic vacuum operation on this table. |
|
||||
| last_autoanalyze | timestamp | Timestamp of the last automatic analyze operation on this table. |
|
||||
|
||||
## Interpretation Guide
|
||||
|
||||
### Index Scan Ratio (`idx_scan_ratio_percent`)
|
||||
|
||||
- **High ratio (> 80%)**: Table queries are efficiently using indexes. This is typically desirable.
|
||||
- **Low ratio (< 20%)**: Many sequential scans indicate missing indexes or queries that cannot use existing indexes effectively. Consider adding indexes to frequently searched columns.
|
||||
- **0%**: No index scans performed; all queries performed sequential scans. May warrant index investigation.
|
||||
|
||||
### Dead Row Ratio (`dead_row_ratio_percent`)
|
||||
|
||||
- **< 2%**: Healthy table with minimal bloat.
|
||||
- **2-5%**: Moderate bloat; consider running VACUUM if not recent.
|
||||
- **> 5%**: High bloat; may benefit from manual VACUUM or VACUUM FULL.
|
||||
|
||||
### Vacuum History
|
||||
|
||||
- **Null `last_vacuum`**: Table has never been manually vacuumed; relies on autovacuum.
|
||||
- **Recent `last_autovacuum`**: Autovacuum is actively managing the table.
|
||||
- **Stale timestamps**: Consider running manual VACUUM and ANALYZE if maintenance windows exist.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- Statistics are collected from `pg_stat_all_tables`, which resets on PostgreSQL restart.
|
||||
- Run `ANALYZE` on tables to update statistics for accurate query planning.
|
||||
- The tool defaults to limiting results to 50 rows; adjust the `limit` parameter for larger result sets.
|
||||
- Filtering by schema, table name, or owner uses `LIKE` pattern matching (supports partial matches).
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Finding ineffective indexes**: Identify tables with low `idx_scan_ratio_percent` to evaluate index strategy.
|
||||
- **Detecting table bloat**: Sort by `dead_rows` to find tables needing VACUUM.
|
||||
- **Monitoring growth**: Track `total_size_bytes` over time for capacity planning.
|
||||
- **Audit maintenance**: Check `last_autovacuum` and `last_autoanalyze` timestamps to ensure maintenance tasks are running.
|
||||
- **Understanding workload**: Examine `seq_scan` vs `idx_scan` ratios to understand query patterns.
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
title: "postgres-list-tablespaces"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-tablespaces" tool lists tablespaces in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-tablespaces
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-tablespaces` tool lists available tablespaces in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-tablespaces` lists detailed information as JSON for tablespaces. The tool takes the following input parameters:
|
||||
|
||||
- `tablespace_name` (optional): A text to filter results by tablespace name. Default: `""`
|
||||
- `limit` (optional): The maximum number of tablespaces to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tablespaces in the database. Returns the tablespace name,
|
||||
owner name, size in bytes(if the current user has CREATE privileges on
|
||||
the tablespace, otherwise NULL), internal object ID, the access control
|
||||
list regarding permissions, and any specific tablespace options.
|
||||
```
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"tablespace_name": "name of the tablespace",
|
||||
"owner_username": "owner of the tablespace",
|
||||
"size_in_bytes": "size in bytes if the current user has CREATE privileges on the tablespace, otherwise NULL",
|
||||
"oid": "Object ID of the tablespace",
|
||||
"spcacl": "Access privileges",
|
||||
"spcoptions": "Tablespace-level options (e.g., seq_page_cost, random_page_cost)"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:-------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-tablespaces". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -19,11 +19,11 @@ a Postgres database, excluding those in system schemas (`pg_catalog`,
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-views` lists detailed view information (schemaname, viewname,
|
||||
ownername) as JSON for views in a database. The tool takes the following input
|
||||
ownername, definition) as JSON for views in a database. The tool takes the following input
|
||||
parameters:
|
||||
|
||||
- `viewname` (optional): A string pattern to filter view names. The search uses
|
||||
SQL LIKE operator to filter the views. Default: `""`
|
||||
- `view_name` (optional): A string pattern to filter view names. Default: `""`
|
||||
- `schema_name` (optional): A string pattern to filter schema names. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -9,3 +9,5 @@ description: >
|
||||
- [serverless-spark-get-batch](./serverless-spark-get-batch.md)
|
||||
- [serverless-spark-list-batches](./serverless-spark-list-batches.md)
|
||||
- [serverless-spark-cancel-batch](./serverless-spark-cancel-batch.md)
|
||||
- [serverless-spark-create-pyspark-batch](./serverless-spark-create-pyspark-batch.md)
|
||||
- [serverless-spark-create-spark-batch](./serverless-spark-create-spark-batch.md)
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
---
|
||||
title: "serverless-spark-create-pyspark-batch"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
A "serverless-spark-create-pyspark-batch" tool submits a Spark batch to run asynchronously.
|
||||
aliases:
|
||||
- /resources/tools/serverless-spark-create-pyspark-batch
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `serverless-spark-create-pyspark-batch` tool submits a Spark batch to a Google
|
||||
Cloud Serverless for Apache Spark source. The workload executes asynchronously
|
||||
and takes around a minute to begin executing; status can be polled using the
|
||||
[get batch](serverless-spark-get-batch.md) tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [serverless-spark](../../sources/serverless-spark.md)
|
||||
|
||||
`serverless-spark-create-pyspark-batch` accepts the following parameters:
|
||||
|
||||
- **`mainFile`**: The path to the main Python file, as a gs://... URI.
|
||||
- **`args`** Optional. A list of arguments passed to the main file.
|
||||
- **`version`** Optional. The Serverless [runtime
|
||||
version](https://docs.cloud.google.com/dataproc-serverless/docs/concepts/versions/dataproc-serverless-versions)
|
||||
to execute with.
|
||||
|
||||
## Custom Configuration
|
||||
|
||||
This tool supports custom
|
||||
[`runtimeConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig)
|
||||
and
|
||||
[`environmentConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig)
|
||||
settings, which can be specified in a `tools.yaml` file. These configurations
|
||||
are parsed as YAML and passed to the Dataproc API.
|
||||
|
||||
**Note:** If your project requires custom runtime or environment configuration,
|
||||
you must write a custom `tools.yaml`, you cannot use the `serverless-spark`
|
||||
prebuilt config.
|
||||
|
||||
### Example `tools.yaml`
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
- name: "serverless-spark-create-pyspark-batch"
|
||||
kind: "serverless-spark-create-pyspark-batch"
|
||||
source: "my-serverless-spark-source"
|
||||
runtimeConfig:
|
||||
properties:
|
||||
spark.driver.memory: "1024m"
|
||||
environmentConfig:
|
||||
executionConfig:
|
||||
networkUri: "my-network"
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
The response contains the
|
||||
[operation](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.operations#resource:-operation)
|
||||
metadata JSON object corresponding to [batch operation
|
||||
metadata](https://pkg.go.dev/cloud.google.com/go/dataproc/v2/apiv1/dataprocpb#BatchOperationMetadata),
|
||||
plus additional fields `consoleUrl` and `logsUrl` where a human can go for more
|
||||
detailed information.
|
||||
|
||||
```json
|
||||
{
|
||||
"opMetadata": {
|
||||
"batch": "projects/myproject/locations/us-central1/batches/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"batchUuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"createTime": "2025-11-19T16:36:47.607119Z",
|
||||
"description": "Batch",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"operationType": "BATCH",
|
||||
"warnings": [
|
||||
"No runtime version specified. Using the default runtime version."
|
||||
]
|
||||
},
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/...",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?..."
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark-create-pyspark-batch". |
|
||||
| source | string | true | Name of the source the tool should use. |
|
||||
| description | string | false | Description of the tool that is passed to the LLM. |
|
||||
| runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. |
|
||||
| environmentConfig | map | false | [Environment config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig) for all batches created with this tool. |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool. |
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user