mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-13 09:28:12 -05:00
Compare commits
40 Commits
pgtriggers
...
auth-files
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
271f4d6f2f | ||
|
|
881c3a1ac3 | ||
|
|
914b3eefda | ||
|
|
776a5ca438 | ||
|
|
d08dd144ad | ||
|
|
fbd92c68ba | ||
|
|
af3d3c5204 | ||
|
|
466aef024f | ||
|
|
a6830744fc | ||
|
|
615b5f0130 | ||
|
|
2b45266598 | ||
|
|
26ead2ed78 | ||
|
|
1f31c2c9b2 | ||
|
|
78e015d7df | ||
|
|
c6ccf4bd87 | ||
|
|
5605eabd69 | ||
|
|
e29c0616d6 | ||
|
|
285aa46b88 | ||
|
|
c5a6daa768 | ||
|
|
78b02f08c3 | ||
|
|
18d0440f4e | ||
|
|
7a135ce078 | ||
|
|
ea9e2d12bd | ||
|
|
bea9705450 | ||
|
|
489117d747 | ||
|
|
32367a472f | ||
|
|
3b40fea25e | ||
|
|
f6b6a9fb5d | ||
|
|
1dd971b8d5 | ||
|
|
cb4529cbaa | ||
|
|
ac375114fd | ||
|
|
8a0eba9d62 | ||
|
|
5ad7c6127b | ||
|
|
f4b1f0a680 | ||
|
|
17a979207d | ||
|
|
3bf3fe8fa7 | ||
|
|
1bf0b51f03 | ||
|
|
744214e04c | ||
|
|
155bff80c1 | ||
|
|
e84252feb4 |
@@ -589,6 +589,26 @@ steps:
|
||||
firestore \
|
||||
firestore
|
||||
|
||||
- id: "mongodb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "MONGODB_DATABASE=$_DATABASE_NAME"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["MONGODB_URI", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"MongoDB" \
|
||||
mongodb \
|
||||
mongodb
|
||||
|
||||
- id: "looker"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -867,6 +887,26 @@ steps:
|
||||
singlestore \
|
||||
singlestore
|
||||
|
||||
- id: "mariadb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "MARIADB_DATABASE=$_MARIADB_DATABASE"
|
||||
- "MARIADB_PORT=$_MARIADB_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["MARIADB_USER", "MARIADB_PASS", "MARIADB_HOST", "CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
# skip coverage check as it re-uses current MySQL implementation
|
||||
go test ./tests/mariadb
|
||||
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
@@ -979,6 +1019,14 @@ availableSecrets:
|
||||
env: SINGLESTORE_PASSWORD
|
||||
- versionName: projects/$PROJECT_ID/secrets/singlestore_host/versions/latest
|
||||
env: SINGLESTORE_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_user/versions/latest
|
||||
env: MARIADB_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_pass/versions/latest
|
||||
env: MARIADB_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/mariadb_host/versions/latest
|
||||
env: MARIADB_HOST
|
||||
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
|
||||
env: MONGODB_URI
|
||||
|
||||
options:
|
||||
logging: CLOUD_LOGGING_ONLY
|
||||
@@ -1039,3 +1087,6 @@ substitutions:
|
||||
_SINGLESTORE_PORT: "3308"
|
||||
_SINGLESTORE_DATABASE: "singlestore"
|
||||
_SINGLESTORE_USER: "root"
|
||||
_MARIADB_PORT: "3307"
|
||||
_MARIADB_DATABASE: test_database
|
||||
|
||||
|
||||
@@ -1 +1,9 @@
|
||||
@import 'td/code-dark';
|
||||
@import 'td/code-dark';
|
||||
|
||||
// Make tabs scrollable horizontally instead of wrapping
|
||||
.nav-tabs {
|
||||
flex-wrap: nowrap;
|
||||
white-space: nowrap;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
}
|
||||
@@ -51,6 +51,14 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.23.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.23.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.22.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.21.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"
|
||||
|
||||
62
CHANGELOG.md
62
CHANGELOG.md
@@ -1,5 +1,67 @@
|
||||
# Changelog
|
||||
|
||||
## [0.23.0](https://github.com/googleapis/genai-toolbox/compare/v0.22.0...v0.23.0) (2025-12-11)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **serverless-spark:** add URLs to create batch tool outputs
|
||||
* **serverless-spark:** add URLs to list_batches output
|
||||
* **serverless-spark:** add Cloud Console and Logging URLs to get_batch
|
||||
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033))
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add list-table-stats-tool to list table statistics. ([#2055](https://github.com/googleapis/genai-toolbox/issues/2055)) ([78b02f0](https://github.com/googleapis/genai-toolbox/commit/78b02f08c3cc3062943bb2f91cf60d5149c8d28d))
|
||||
* **looker/tools:** Enhance dashboard creation with dashboard filters ([#2133](https://github.com/googleapis/genai-toolbox/issues/2133)) ([285aa46](https://github.com/googleapis/genai-toolbox/commit/285aa46b887d9acb2da8766e107bbf1ab75b8812))
|
||||
* **serverless-spark:** Add Cloud Console and Logging URLs to get_batch ([e29c061](https://github.com/googleapis/genai-toolbox/commit/e29c0616d6b9ecda2badcaf7b69614e511ac031b))
|
||||
* **serverless-spark:** Add URLs to create batch tool outputs ([c6ccf4b](https://github.com/googleapis/genai-toolbox/commit/c6ccf4bd87026484143a2d0f5527b2edab03b54a))
|
||||
* **serverless-spark:** Add URLs to list_batches output ([5605eab](https://github.com/googleapis/genai-toolbox/commit/5605eabd696696ade07f52431a28ef65c0fb1f77))
|
||||
* **sources/mariadb:** Add MariaDB source and MySQL tools integration ([#1908](https://github.com/googleapis/genai-toolbox/issues/1908)) ([3b40fea](https://github.com/googleapis/genai-toolbox/commit/3b40fea25edae607e02c1e8fc2b0c957fa2c8e9a))
|
||||
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033)) ([489117d](https://github.com/googleapis/genai-toolbox/commit/489117d74711ac9260e7547163ca463eb45eeaa2))
|
||||
* **tools/postgres:** Add list_pg_settings, list_database_stats tools for postgres ([#2030](https://github.com/googleapis/genai-toolbox/issues/2030)) ([32367a4](https://github.com/googleapis/genai-toolbox/commit/32367a472fae9653fed7f126428eba0252978bd5))
|
||||
* **tools/postgres:** Add new postgres-list-roles tool ([#2038](https://github.com/googleapis/genai-toolbox/issues/2038)) ([bea9705](https://github.com/googleapis/genai-toolbox/commit/bea97054502cfa236aa10e2ebc8ff58eb00ad035))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* List tables tools null fix ([#2107](https://github.com/googleapis/genai-toolbox/issues/2107)) ([2b45266](https://github.com/googleapis/genai-toolbox/commit/2b452665983154041d4cd0ed7d82532e4af682eb))
|
||||
* **tools/mongodb:** Removed sortPayload and sortParams ([#1238](https://github.com/googleapis/genai-toolbox/issues/1238)) ([c5a6daa](https://github.com/googleapis/genai-toolbox/commit/c5a6daa7683d2f9be654300d977692c368e55e31))
|
||||
|
||||
|
||||
### Miscellaneous Chores
|
||||
* **looker:** Upgrade to latest go sdk ([#2159](https://github.com/googleapis/genai-toolbox/issues/2159)) ([78e015d](https://github.com/googleapis/genai-toolbox/commit/78e015d7dfd9cce7e2b444ed934da17eb355bc86))
|
||||
|
||||
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **tools/postgres:** Add allowed-origins flag ([#1984](https://github.com/googleapis/genai-toolbox/issues/1984)) ([862868f](https://github.com/googleapis/genai-toolbox/commit/862868f28476ea981575ce412faa7d6a03138f31))
|
||||
* **tools/postgres:** Add list-query-stats and get-column-cardinality functions ([#1976](https://github.com/googleapis/genai-toolbox/issues/1976)) ([9f76026](https://github.com/googleapis/genai-toolbox/commit/9f760269253a8cc92a357e995c6993ccc4a0fb7b))
|
||||
* **tools/spanner:** Add spanner list graphs to prebuiltconfigs ([#2056](https://github.com/googleapis/genai-toolbox/issues/2056)) ([0e7fbf4](https://github.com/googleapis/genai-toolbox/commit/0e7fbf465c488397aa9d8cab2e55165fff4eb53c))
|
||||
* **prebuilt/cloud-sql:** Add clone instance tool for cloud sql ([#1845](https://github.com/googleapis/genai-toolbox/issues/1845)) ([5e43630](https://github.com/googleapis/genai-toolbox/commit/5e43630907aa2d7bc6818142483a33272eab060b))
|
||||
* **serverless-spark:** Add create_pyspark_batch tool ([1bf0b51](https://github.com/googleapis/genai-toolbox/commit/1bf0b51f033c956790be1577bf5310d0b17e9c12))
|
||||
* **serverless-spark:** Add create_spark_batch tool ([17a9792](https://github.com/googleapis/genai-toolbox/commit/17a979207dbc4fe70acd0ebda164d1a8d34c1ed3))
|
||||
* Support alternate accessToken header name ([#1968](https://github.com/googleapis/genai-toolbox/issues/1968)) ([18017d6](https://github.com/googleapis/genai-toolbox/commit/18017d6545335a6fc1c472617101c35254d9a597))
|
||||
* Support for annotations ([#2007](https://github.com/googleapis/genai-toolbox/issues/2007)) ([ac21335](https://github.com/googleapis/genai-toolbox/commit/ac21335f4e88ca52d954d7f8143a551a35661b94))
|
||||
* **tool/mssql:** Set default host and port for MSSQL source ([#1943](https://github.com/googleapis/genai-toolbox/issues/1943)) ([7a9cc63](https://github.com/googleapis/genai-toolbox/commit/7a9cc633768d9ae9a7ff8230002da69d6a36ca86))
|
||||
* **tools/cloudsqlpg:** Add CloudSQL PostgreSQL pre-check tool ([#1722](https://github.com/googleapis/genai-toolbox/issues/1722)) ([8752e05](https://github.com/googleapis/genai-toolbox/commit/8752e05ab6e98812d95673a6f1ff67e9a6ae48d2))
|
||||
* **tools/postgres-list-publication-tables:** Add new postgres-list-publication-tables tool ([#1919](https://github.com/googleapis/genai-toolbox/issues/1919)) ([f4b1f0a](https://github.com/googleapis/genai-toolbox/commit/f4b1f0a68000ca2fc0325f55a1905705417c38a2))
|
||||
* **tools/postgres-list-tablespaces:** Add new postgres-list-tablespaces tool ([#1934](https://github.com/googleapis/genai-toolbox/issues/1934)) ([5ad7c61](https://github.com/googleapis/genai-toolbox/commit/5ad7c6127b3e47504fc4afda0b7f3de1dff78b8b))
|
||||
* **tools/spanner-list-graph:** Tool impl + docs + tests ([#1923](https://github.com/googleapis/genai-toolbox/issues/1923)) ([a0f44d3](https://github.com/googleapis/genai-toolbox/commit/a0f44d34ea3f044dd08501be616f70ddfd63ab45))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Add import for firebirdsql ([#2045](https://github.com/googleapis/genai-toolbox/issues/2045)) ([fb7aae9](https://github.com/googleapis/genai-toolbox/commit/fb7aae9d35b760d3471d8379642f835a0d84ec41))
|
||||
* Correct FAQ to mention HTTP tools ([#2036](https://github.com/googleapis/genai-toolbox/issues/2036)) ([7b44237](https://github.com/googleapis/genai-toolbox/commit/7b44237d4a21bfbf8d3cebe4d32a15affa29584d))
|
||||
* Format BigQuery numeric output as decimal strings ([#2084](https://github.com/googleapis/genai-toolbox/issues/2084)) ([155bff8](https://github.com/googleapis/genai-toolbox/commit/155bff80c1da4fae1e169e425fd82e1dc3373041))
|
||||
* Set default annotations for tools in code if annotation not provided in yaml ([#2049](https://github.com/googleapis/genai-toolbox/issues/2049)) ([565460c](https://github.com/googleapis/genai-toolbox/commit/565460c4ea8953dbe80070a8e469f957c0f7a70c))
|
||||
* **tools/alloydb-postgres-list-tables:** Exclude google_ml schema from list_tables ([#2046](https://github.com/googleapis/genai-toolbox/issues/2046)) ([a03984c](https://github.com/googleapis/genai-toolbox/commit/a03984cc15254c928f30085f8fa509ded6a79a0c))
|
||||
* **tools/alloydbcreateuser:** Remove duplication of project praram ([#2028](https://github.com/googleapis/genai-toolbox/issues/2028)) ([730ac6d](https://github.com/googleapis/genai-toolbox/commit/730ac6d22805fd50b4a675b74c1865f4e7689e7c))
|
||||
* **tools/mongodb:** Remove `required` tag from the `canonical` field ([#2099](https://github.com/googleapis/genai-toolbox/issues/2099)) ([744214e](https://github.com/googleapis/genai-toolbox/commit/744214e04cd12b11d166e6eb7da8ce4714904abc))
|
||||
|
||||
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)
|
||||
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ golangci-lint run --fix
|
||||
Execute unit tests locally:
|
||||
|
||||
```bash
|
||||
go test -race -v ./...
|
||||
go test -race -v ./cmd/... ./internal/...
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
57
README.md
57
README.md
@@ -105,6 +105,21 @@ redeploying your application.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### (Non-production) Running Toolbox
|
||||
|
||||
You can run Toolbox directly with a [configuration file](#configuration):
|
||||
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
This runs the latest version of the toolbox server with your configuration file.
|
||||
|
||||
> [!NOTE]
|
||||
> This method should only be used for non-production use cases such as
|
||||
> experimentation. For any production use-cases, please consider [Installing the
|
||||
> server](#installing-the-server) and then [running it](#running-the-server).
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
@@ -125,7 +140,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.23.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -138,7 +153,7 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.23.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
@@ -151,21 +166,33 @@ To install Toolbox as a binary:
|
||||
>
|
||||
> ```sh
|
||||
> # see releases page for other versions
|
||||
> export VERSION=0.21.0
|
||||
> export VERSION=0.23.0
|
||||
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
> chmod +x toolbox
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
> <details>
|
||||
> <summary>Windows (AMD64)</summary>
|
||||
> <summary>Windows (Command Prompt)</summary>
|
||||
>
|
||||
> To install Toolbox as a binary on Windows (AMD64):
|
||||
> To install Toolbox as a binary on Windows (Command Prompt):
|
||||
>
|
||||
> ```cmd
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.23.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
> <details>
|
||||
> <summary>Windows (PowerShell)</summary>
|
||||
>
|
||||
> To install Toolbox as a binary on Windows (PowerShell):
|
||||
>
|
||||
> ```powershell
|
||||
> :: see releases page for other versions
|
||||
> set VERSION=0.21.0
|
||||
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
> # see releases page for other versions
|
||||
> $VERSION = "0.23.0"
|
||||
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
> ```
|
||||
>
|
||||
> </details>
|
||||
@@ -177,7 +204,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.23.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -201,7 +228,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.23.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -291,6 +318,16 @@ toolbox --tools-file "tools.yaml"
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>NPM</summary>
|
||||
|
||||
To run Toolbox directly without manually downloading the binary (requires Node.js):
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Gemini CLI</summary>
|
||||
|
||||
11
cmd/root.go
11
cmd/root.go
@@ -120,6 +120,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
|
||||
@@ -184,13 +185,19 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
|
||||
@@ -198,6 +205,8 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
|
||||
@@ -365,6 +374,8 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringVar(&cmd.cfg.OAuthProtectedResource, "oauth-protected-resource", "", "Specifies a yaml file that contains what should be returned from /.well-known/oauth-protected-resource")
|
||||
flags.StringVar(&cmd.cfg.OAuthAuthorizationServer, "oauth-authorization-server", "", "Specifies a yaml file that contains what should be returned from /.well-known/oauth-authorization-server")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
|
||||
@@ -1488,7 +1488,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "alloydb_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1518,7 +1518,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1558,7 +1558,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"serverless_spark_tools": tools.ToolsetConfig{
|
||||
Name: "serverless_spark_tools",
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch"},
|
||||
ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1598,7 +1598,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_tools": tools.ToolsetConfig{
|
||||
Name: "looker_tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1618,7 +1618,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres_database_tools": tools.ToolsetConfig{
|
||||
Name: "postgres_database_tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality"},
|
||||
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.21.0
|
||||
0.23.0
|
||||
|
||||
@@ -11,11 +11,11 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
|
||||
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
||||
|
||||
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||
|
||||
3. Click "View raw config" and update the `tools.yaml` path with the full absolute path to your file.
|
||||
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"version = \"0.23.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -71,6 +71,22 @@ redeploying your application.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### (Non-production) Running Toolbox
|
||||
|
||||
You can run Toolbox directly with a [configuration file](../configure.md):
|
||||
|
||||
```sh
|
||||
npx @toolbox-sdk/server --tools-file tools.yaml
|
||||
```
|
||||
|
||||
This runs the latest version of the toolbox server with your configuration file.
|
||||
|
||||
{{< notice note >}}
|
||||
This method should only be used for non-production use cases such as
|
||||
experimentation. For any production use-cases, please consider [Installing the
|
||||
server](#installing-the-server) and then [running it](#running-the-server).
|
||||
{{< /notice >}}
|
||||
|
||||
### Installing the server
|
||||
|
||||
For the latest version, check the [releases page][releases] and use the
|
||||
@@ -87,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.23.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -98,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.23.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -109,19 +125,29 @@ To install Toolbox as a binary on macOS (Intel):
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.23.0
|
||||
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windows (AMD64)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (AMD64):
|
||||
{{% tab header="Windows (Command Prompt)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (Command Prompt):
|
||||
|
||||
```cmd
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.23.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Windows (PowerShell)" lang="en" %}}
|
||||
To install Toolbox as a binary on Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
:: see releases page for other versions
|
||||
set VERSION=0.21.0
|
||||
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
|
||||
# see releases page for other versions
|
||||
$VERSION = "0.23.0"
|
||||
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -132,7 +158,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.21.0
|
||||
export VERSION=0.23.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -151,7 +177,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.21.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.23.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
245
docs/en/getting-started/prompts_quickstart_gemini_cli.md
Normal file
245
docs/en/getting-started/prompts_quickstart_gemini_cli.md
Normal file
@@ -0,0 +1,245 @@
|
||||
---
|
||||
title: "Prompts using Gemini CLI"
|
||||
type: docs
|
||||
weight: 5
|
||||
description: >
|
||||
How to get started using Toolbox prompts locally with PostgreSQL and [Gemini CLI](https://pypi.org/project/gemini-cli/).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create the required tables using the following commands:
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username VARCHAR(50) NOT NULL,
|
||||
email VARCHAR(100) UNIQUE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE restaurants (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
location VARCHAR(100)
|
||||
);
|
||||
|
||||
CREATE TABLE reviews (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INT REFERENCES users(id),
|
||||
restaurant_id INT REFERENCES restaurants(id),
|
||||
rating INT CHECK (rating >= 1 AND rating <= 5),
|
||||
review_text TEXT,
|
||||
is_published BOOLEAN DEFAULT false,
|
||||
moderation_status VARCHAR(50) DEFAULT 'pending_manual_review',
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert dummy data into the tables.
|
||||
|
||||
```sql
|
||||
INSERT INTO users (id, username, email) VALUES
|
||||
(123, 'jane_d', 'jane.d@example.com'),
|
||||
(124, 'john_s', 'john.s@example.com'),
|
||||
(125, 'sam_b', 'sam.b@example.com');
|
||||
|
||||
INSERT INTO restaurants (id, name, location) VALUES
|
||||
(455, 'Pizza Palace', '123 Main St'),
|
||||
(456, 'The Corner Bistro', '456 Oak Ave'),
|
||||
(457, 'Sushi Spot', '789 Pine Ln');
|
||||
|
||||
INSERT INTO reviews (user_id, restaurant_id, rating, review_text, is_published, moderation_status) VALUES
|
||||
(124, 455, 5, 'Best pizza in town! The crust was perfect.', true, 'approved'),
|
||||
(125, 457, 4, 'Great sushi, very fresh. A bit pricey but worth it.', true, 'approved'),
|
||||
(123, 457, 5, 'Absolutely loved the dragon roll. Will be back!', true, 'approved'),
|
||||
(123, 456, 4, 'The atmosphere was lovely and the food was great. My photo upload might have been weird though.', false, 'pending_manual_review'),
|
||||
(125, 456, 1, 'This review contains inappropriate language.', false, 'rejected');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
## Step 2: Configure Toolbox
|
||||
|
||||
Create a file named `tools.yaml`. This file defines the database connection, the
|
||||
SQL tools available, and the prompts the agents will use.
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-foodiefind-db:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: toolbox_user
|
||||
password: my-password
|
||||
tools:
|
||||
find_user_by_email:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a user's ID by their email address.
|
||||
parameters:
|
||||
- name: email
|
||||
type: string
|
||||
description: The email address of the user to find.
|
||||
statement: SELECT id FROM users WHERE email = $1;
|
||||
find_restaurant_by_name:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find a restaurant's ID by its exact name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the restaurant to find.
|
||||
statement: SELECT id FROM restaurants WHERE name = $1;
|
||||
find_review_by_user_and_restaurant:
|
||||
kind: postgres-sql
|
||||
source: my-foodiefind-db
|
||||
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
|
||||
parameters:
|
||||
- name: user_id
|
||||
type: integer
|
||||
description: The numerical ID of the user.
|
||||
- name: restaurant_id
|
||||
type: integer
|
||||
description: The numerical ID of the restaurant.
|
||||
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
|
||||
prompts:
|
||||
investigate_missing_review:
|
||||
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
|
||||
arguments:
|
||||
- name: "user_email"
|
||||
description: "The email of the user who wrote the review."
|
||||
- name: "restaurant_name"
|
||||
description: "The name of the restaurant being reviewed."
|
||||
messages:
|
||||
- content: >-
|
||||
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
|
||||
**Workflow:**
|
||||
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
|
||||
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
|
||||
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
|
||||
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
|
||||
```
|
||||
|
||||
## Step 3: Connect to Gemini CLI
|
||||
|
||||
Configure the Gemini CLI to talk to your local Toolbox MCP server.
|
||||
|
||||
1. Open or create your Gemini settings file: `~/.gemini/settings.json`.
|
||||
2. Add the following configuration to the file:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"MCPToolbox": {
|
||||
"httpUrl": "http://localhost:5000/mcp"
|
||||
}
|
||||
},
|
||||
"mcp": {
|
||||
"allowed": ["MCPToolbox"]
|
||||
}
|
||||
}
|
||||
```
|
||||
3. Start Gemini CLI using
|
||||
```sh
|
||||
gemini
|
||||
```
|
||||
In case Gemini CLI is already running, use `/mcp refresh` to refresh the MCP server.
|
||||
|
||||
4. Use gemini slash commands to run your prompt:
|
||||
```sh
|
||||
/investigate_missing_review --user_email="jane.d@example.com" --restaurant_name="The Corner Bistro"
|
||||
```
|
||||
@@ -5,7 +5,7 @@ go 1.24.4
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/adk v0.1.0
|
||||
google.golang.org/genai v1.35.0
|
||||
google.golang.org/genai v1.36.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -108,8 +108,8 @@ google.golang.org/adk v0.1.0 h1:+w/fHuqRVolotOATlujRA+2DKUuDrFH2poRdEX2QjB8=
|
||||
google.golang.org/adk v0.1.0/go.mod h1:NvtSLoNx7UzZIiUAI1KoJQLMmt9sG3oCgiCx1TLqKFw=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
|
||||
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
|
||||
google.golang.org/genai v1.35.0
|
||||
google.golang.org/genai v1.36.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -102,8 +102,8 @@ gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
|
||||
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
|
||||
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
|
||||
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
|
||||
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=
|
||||
|
||||
@@ -33,12 +33,12 @@ require (
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/crypto v0.43.0 // indirect
|
||||
golang.org/x/net v0.46.0 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.32.0 // indirect
|
||||
golang.org/x/sync v0.17.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
google.golang.org/api v0.255.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f // indirect
|
||||
|
||||
@@ -100,18 +100,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
llama-index==0.14.8
|
||||
llama-index==0.14.10
|
||||
llama-index-llms-google-genai==0.7.3
|
||||
toolbox-llamaindex==0.5.3
|
||||
pytest==9.0.1
|
||||
|
||||
@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -49,19 +49,19 @@ to expose your developer assistant tools to a Looker instance:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
@@ -323,6 +323,8 @@ instance and create new saved content.
|
||||
data
|
||||
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
|
||||
1. **add_dashboard_element**: Add a tile to a dashboard
|
||||
1. **add_dashboard_filter**: Add a filter to a dashboard
|
||||
1. **generate_embed_url**: Generate an embed url for content
|
||||
|
||||
### Looker Instance Health Tools
|
||||
|
||||
|
||||
@@ -45,19 +45,19 @@ instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/linux/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/arm64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/darwin/amd64/toolbox
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/windows/amd64/toolbox.exe
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -50,6 +50,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the AlloyDB instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## AlloyDB Postgres Admin
|
||||
|
||||
@@ -227,6 +233,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the postgreSQL instance.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## Cloud SQL for PostgreSQL Observability
|
||||
|
||||
@@ -404,6 +416,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `run_dashboard`: Runs the queries associated with a dashboard.
|
||||
* `make_dashboard`: Creates a new dashboard.
|
||||
* `add_dashboard_element`: Adds a tile to a dashboard.
|
||||
* `add_dashboard_filter`: Adds a filter to a dashboard.
|
||||
* `generate_embed_url`: Generate an embed url for content.
|
||||
* `health_pulse`: Test the health of a Looker instance.
|
||||
* `health_analyze`: Analyze the LookML usage of a Looker instance.
|
||||
* `health_vacuum`: Suggest LookML elements that can be removed.
|
||||
@@ -532,6 +546,12 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_triggers`: Lists triggers in the database.
|
||||
* `list_indexes`: List available user indexes in a PostgreSQL database.
|
||||
* `list_sequences`: List sequences in a PostgreSQL database.
|
||||
* `list_publication_tables`: List publication tables in a PostgreSQL database.
|
||||
* `list_tablespaces`: Lists tablespaces in the database.
|
||||
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
|
||||
* `list_database_stats`: Lists the key performance and activity statistics for
|
||||
each database in the PostgreSQL server.
|
||||
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
|
||||
|
||||
## Google Cloud Serverless for Apache Spark
|
||||
|
||||
|
||||
@@ -77,6 +77,25 @@ cluster][alloydb-free-trial].
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in an AlloyDB for PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the AlloyDB
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)
|
||||
|
||||
@@ -88,13 +88,40 @@ mTLS.
|
||||
[public-ip]: https://cloud.google.com/sql/docs/mysql/configure-ip
|
||||
[conn-overview]: https://cloud.google.com/sql/docs/mysql/connect-overview
|
||||
|
||||
### Database User
|
||||
### Authentication
|
||||
|
||||
Currently, this source only uses standard authentication. You will need to [create
|
||||
a MySQL user][cloud-sql-users] to login to the database with.
|
||||
This source supports both password-based authentication and IAM
|
||||
authentication (using your [Application Default Credentials][adc]).
|
||||
|
||||
#### Standard Authentication
|
||||
|
||||
To connect using user/password, [create
|
||||
a MySQL user][cloud-sql-users] and input your credentials in the `user` and
|
||||
`password` fields.
|
||||
|
||||
```yaml
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
```
|
||||
|
||||
[cloud-sql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
|
||||
|
||||
#### IAM Authentication
|
||||
|
||||
To connect using IAM authentication:
|
||||
|
||||
1. Prepare your database instance and user following this [guide][iam-guide].
|
||||
2. You could choose one of the two ways to log in:
|
||||
- Specify your IAM email as the `user`.
|
||||
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
|
||||
automatically and log in using the email associated with it.
|
||||
|
||||
3. Leave the `password` field blank.
|
||||
|
||||
[iam-guide]: https://cloud.google.com/sql/docs/mysql/iam-logins
|
||||
[cloudsql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -124,6 +151,6 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
|
||||
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
|
||||
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MySQL user to connect as (e.g. "my-pg-user"). |
|
||||
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
|
||||
| user | string | false | Name of the MySQL user to connect as (e.g "my-mysql-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
|
||||
| password | string | false | Password of the MySQL user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
|
||||
| ipType | string | false | IP Type of the Cloud SQL instance, must be either `public`, `private`, or `psc`. Default: `public`. |
|
||||
|
||||
@@ -58,6 +58,7 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
|
||||
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
|
||||
List sequences in a PostgreSQL database.
|
||||
|
||||
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
|
||||
List long running transactions in a PostgreSQL database.
|
||||
|
||||
@@ -73,6 +74,25 @@ to a database by following these instructions][csql-pg-quickstart].
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the postgreSQL
|
||||
instance.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [Cloud SQL for Postgres using
|
||||
|
||||
@@ -91,18 +91,17 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header |
|
||||
| | | | name is provided, it will be used instead of "Authorization". |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "looker". |
|
||||
| base_url | string | true | The URL of your Looker server with no trailing /. |
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header name is provided, it will be used instead of "Authorization". |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
|
||||
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
|
||||
78
docs/en/resources/sources/mariadb.md
Normal file
78
docs/en/resources/sources/mariadb.md
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
title: "MariaDB"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
MariaDB is an open-source relational database compatible with MySQL.
|
||||
|
||||
---
|
||||
## About
|
||||
|
||||
MariaDB is a relational database management system derived from MySQL. It
|
||||
implements the MySQL protocol and client libraries and supports modern SQL
|
||||
features with a focus on performance and reliability.
|
||||
|
||||
**Note**: MariaDB is supported using the MySQL source.
|
||||
## Available Tools
|
||||
|
||||
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
|
||||
Execute pre-defined prepared SQL queries in MariaDB.
|
||||
|
||||
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
|
||||
Run parameterized SQL queries in MariaDB.
|
||||
|
||||
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
|
||||
List active queries in MariaDB.
|
||||
|
||||
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
|
||||
List tables in a MariaDB database.
|
||||
|
||||
- [`mysql-list-tables-missing-unique-indexes`](../tools/mysql/mysql-list-tables-missing-unique-indexes.md)
|
||||
List tables in a MariaDB database that do not have primary or unique indices.
|
||||
|
||||
- [`mysql-list-table-fragmentation`](../tools/mysql/mysql-list-table-fragmentation.md)
|
||||
List table fragmentation in MariaDB tables.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
|
||||
This source only uses standard authentication. You will need to [create a
|
||||
MariaDB user][mariadb-users] to log in to the database.
|
||||
|
||||
[mariadb-users]: https://mariadb.com/kb/en/create-user/
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my_mariadb_db:
|
||||
kind: mysql
|
||||
host: 127.0.0.1
|
||||
port: 3306
|
||||
database: my_db
|
||||
user: ${MARIADB_USER}
|
||||
password: ${MARIADB_PASS}
|
||||
# Optional TLS and other driver parameters. For example, enable preferred TLS:
|
||||
# queryParams:
|
||||
# tls: preferred
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variables instead of committing credentials to source files.
|
||||
{{< /notice >}}
|
||||
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be `mysql`. |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3307"). |
|
||||
| database | string | true | Name of the MariaDB database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MariaDB user to connect as (e.g. "my-mysql-user"). |
|
||||
| password | string | true | Password of the MariaDB user (e.g. "my-password"). |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
| queryParams | map<string,string> | false | Arbitrary DSN parameters passed to the driver (e.g. `tls: preferred`, `charset: utf8mb4`). Useful for enabling TLS or other connection options. |
|
||||
@@ -68,6 +68,25 @@ reputation for reliability, feature robustness, and performance.
|
||||
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
|
||||
List cardinality of columns in a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
|
||||
List statistics of a table in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
|
||||
List publication tables in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
|
||||
List tablespaces in a PostgreSQL database.
|
||||
|
||||
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
|
||||
List configuration parameters for the PostgreSQL server.
|
||||
|
||||
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
|
||||
Lists the key performance and activity statistics for each database in the postgreSQL
|
||||
server.
|
||||
|
||||
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
|
||||
Lists all the user-created roles in PostgreSQL database..
|
||||
|
||||
### Pre-built Configurations
|
||||
|
||||
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)
|
||||
|
||||
@@ -21,6 +21,10 @@ Apache Spark.
|
||||
Get a Serverless Spark batch.
|
||||
- [`serverless-spark-cancel-batch`](../tools/serverless-spark/serverless-spark-cancel-batch.md)
|
||||
Cancel a running Serverless Spark batch operation.
|
||||
- [`serverless-spark-create-pyspark-batch`](../tools/serverless-spark/serverless-spark-create-pyspark-batch.md)
|
||||
Create a Serverless Spark PySpark batch operation.
|
||||
- [`serverless-spark-create-spark-batch`](../tools/serverless-spark/serverless-spark-create-spark-batch.md)
|
||||
Create a Serverless Spark Java batch operation.
|
||||
|
||||
## Requirements
|
||||
|
||||
|
||||
@@ -10,27 +10,18 @@ aliases:
|
||||
|
||||
## About
|
||||
|
||||
The `looker-add-dashboard-element` creates a dashboard element
|
||||
in the given dashboard.
|
||||
The `looker-add-dashboard-element` tool creates a new tile (element) within an existing Looker dashboard.
|
||||
Tiles are added in the order this tool is called for a given `dashboard_id`.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create the dashboard using `make_dashboard`.
|
||||
2. Add any dashboard-level filters using `add_dashboard_filter`.
|
||||
3. Then, add elements (tiles) using this tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-add-dashboard-element` takes eleven parameters:
|
||||
|
||||
1. the `model`
|
||||
2. the `explore`
|
||||
3. the `fields` list
|
||||
4. an optional set of `filters`
|
||||
5. an optional set of `pivots`
|
||||
6. an optional set of `sorts`
|
||||
7. an optional `limit`
|
||||
8. an optional `tz`
|
||||
9. an optional `vis_config`
|
||||
10. the `title`
|
||||
11. the `dashboard_id`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -39,24 +30,37 @@ tools:
|
||||
kind: looker-add-dashboard-element
|
||||
source: looker-source
|
||||
description: |
|
||||
add_dashboard_element Tool
|
||||
This tool creates a new tile (element) within an existing Looker dashboard.
|
||||
Tiles are added in the order this tool is called for a given `dashboard_id`.
|
||||
|
||||
This tool creates a new tile in a Looker dashboard using
|
||||
the query parameters and the vis_config specified.
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create the dashboard using `make_dashboard`.
|
||||
2. Add any dashboard-level filters using `add_dashboard_filter`.
|
||||
3. Then, add elements (tiles) using this tool.
|
||||
|
||||
Most of the parameters are the same as the query_url
|
||||
tool. In addition, there is a title that may be provided.
|
||||
The dashboard_id must be specified. That is obtained
|
||||
from calling make_dashboard.
|
||||
Required Parameters:
|
||||
- dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`.
|
||||
- model_name, explore_name, fields: These query parameters are inherited
|
||||
from the `query` tool and are required to define the data for the tile.
|
||||
|
||||
This tool can be called many times for one dashboard_id
|
||||
and the resulting tiles will be added in order.
|
||||
Optional Parameters:
|
||||
- title: An optional title for the dashboard tile.
|
||||
- pivots, filters, sorts, limit, query_timezone: These query parameters are
|
||||
inherited from the `query` tool and can be used to customize the tile's query.
|
||||
- vis_config: A JSON object defining the visualization settings for this tile.
|
||||
The structure and options are the same as for the `query_url` tool's `vis_config`.
|
||||
|
||||
Connecting to Dashboard Filters:
|
||||
A dashboard element can be connected to one or more dashboard filters (created with
|
||||
`add_dashboard_filter`). To do this, specify the `name` of the dashboard filter
|
||||
and the `field` from the element's query that the filter should apply to.
|
||||
The format for specifying the field is `view_name.field_name`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-element" |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
|:------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-element". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -0,0 +1,75 @@
|
||||
---
|
||||
title: "looker-add-dashboard-filter"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "looker-add-dashboard-filter" tool adds a filter to a specified dashboard.
|
||||
aliases:
|
||||
- /resources/tools/looker-add-dashboard-filter
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-add-dashboard-filter` tool adds a filter to a specified Looker dashboard.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create a dashboard using `make_dashboard`.
|
||||
2. Add all desired filters using this tool (`add_dashboard_filter`).
|
||||
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
## Parameters
|
||||
|
||||
| **parameter** | **type** | **required** | **default** | **description** |
|
||||
|:----------------------|:--------:|:-----------------:|:--------------:|-------------------------------------------------------------------------------------------------------------------------------|
|
||||
| dashboard_id | string | true | none | The ID of the dashboard to add the filter to, obtained from `make_dashboard`. |
|
||||
| name | string | true | none | A unique internal identifier for the filter. This name is used later in `add_dashboard_element` to bind tiles to this filter. |
|
||||
| title | string | true | none | The label displayed to users in the Looker UI. |
|
||||
| filter_type | string | true | `field_filter` | The filter type of filter. Can be `date_filter`, `number_filter`, `string_filter`, or `field_filter`. |
|
||||
| default_value | string | false | none | The initial value for the filter. |
|
||||
| model | string | if `field_filter` | none | The name of the LookML model, obtained from `get_models`. |
|
||||
| explore | string | if `field_filter` | none | The name of the explore within the model, obtained from `get_explores`. |
|
||||
| dimension | string | if `field_filter` | none | The name of the field (e.g., `view_name.field_name`) to base the filter on, obtained from `get_dimensions`. |
|
||||
| allow_multiple_values | boolean | false | true | The Dashboard Filter should allow multiple values |
|
||||
| required | boolean | false | false | The Dashboard Filter is required to run dashboard |
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
add_dashboard_filter:
|
||||
kind: looker-add-dashboard-filter
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool adds a filter to a Looker dashboard.
|
||||
|
||||
CRITICAL ORDER OF OPERATIONS:
|
||||
1. Create a dashboard using `make_dashboard`.
|
||||
2. Add all desired filters using this tool (`add_dashboard_filter`).
|
||||
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
|
||||
|
||||
Parameters:
|
||||
- dashboard_id (required): The ID from `make_dashboard`.
|
||||
- name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter.
|
||||
- title (required): The label displayed to users in the UI.
|
||||
- filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`.
|
||||
- default_value (optional): The initial value for the filter.
|
||||
|
||||
Field Filters (`flter_type: field_filter`):
|
||||
If creating a field filter, you must also provide:
|
||||
- model
|
||||
- explore
|
||||
- dimension
|
||||
The filter will inherit suggestions and type information from this LookML field.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "looker-add-dashboard-filter". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -34,9 +34,10 @@ tools:
|
||||
kind: looker-conversational-analytics
|
||||
source: looker-source
|
||||
description: |
|
||||
Use this tool to perform data analysis, get insights,
|
||||
or answer complex questions about the contents of specific
|
||||
Looker explores.
|
||||
Use this tool to ask questions about your data using the Looker Conversational
|
||||
Analytics API. You must provide a natural language query and a list of
|
||||
1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]).
|
||||
Use the 'get_models' and 'get_explores' tools to discover available models and explores.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,13 +27,18 @@ tools:
|
||||
kind: looker-create-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
create_project_file Tool
|
||||
This tool creates a new LookML file within a specified project, populating
|
||||
it with the provided content.
|
||||
|
||||
Given a project_id and a file path within the project, as well as the content
|
||||
of a LookML file, this tool will create a new file within the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The desired path and filename for the new file within the project.
|
||||
- content (required): The full LookML content to write into the new file.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file creation.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,13 +26,17 @@ tools:
|
||||
kind: looker-delete-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
delete_project_file Tool
|
||||
This tool permanently deletes a specified LookML file from within a project.
|
||||
Use with caution, as this action cannot be undone through the API.
|
||||
|
||||
Given a project_id and a file path within the project, this tool will delete
|
||||
the file from the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The exact path to the LookML file to delete within the project.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file deletion.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,10 +27,13 @@ tools:
|
||||
kind: looker-dev-mode
|
||||
source: looker-source
|
||||
description: |
|
||||
dev_mode Tool
|
||||
This tool allows toggling the Looker IDE session between Development Mode and Production Mode.
|
||||
Development Mode enables making and testing changes to LookML projects.
|
||||
|
||||
Passing true to this tool switches the session to dev mode. Passing false to this tool switches the
|
||||
session to production mode.
|
||||
Parameters:
|
||||
- enable (required): A boolean value.
|
||||
- `true`: Switches the current session to Development Mode.
|
||||
- `false`: Switches the current session to Production Mode.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -36,11 +36,17 @@ tools:
|
||||
kind: looker-generate-embed-url
|
||||
source: looker-source
|
||||
description: |
|
||||
generate_embed_url Tool
|
||||
This tool generates a signed, private embed URL for specific Looker content,
|
||||
allowing users to access it directly.
|
||||
|
||||
This tool generates an embeddable URL for Looker content.
|
||||
You need to provide the type of content (e.g., 'dashboards', 'looks', 'query-visualization')
|
||||
and the ID of the content.
|
||||
Parameters:
|
||||
- type (required): The type of content to embed. Common values include:
|
||||
- `dashboards`
|
||||
- `looks`
|
||||
- `explore`
|
||||
- id (required): The unique identifier for the content.
|
||||
- For dashboards and looks, use the numeric ID (e.g., "123").
|
||||
- For explores, use the format "model_name/explore_name".
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-connection-databases
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_databases Tool
|
||||
This tool retrieves a list of databases available through a specified Looker connection.
|
||||
This is only applicable for connections that support multiple databases.
|
||||
Use `get_connections` to check if a connection supports multiple databases.
|
||||
|
||||
This tool will list the databases available from a connection if the connection
|
||||
supports multiple databases.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available database.
|
||||
If the connection does not support multiple databases, an empty list or an error will be returned.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-connection-schemas
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_schemas Tool
|
||||
This tool retrieves a list of database schemas available through a specified
|
||||
Looker connection.
|
||||
|
||||
This tool will list the schemas available from a connection, filtered by
|
||||
an optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- database (optional): An optional database name to filter the schemas.
|
||||
Only applicable for connections that support multiple databases.
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available schema.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,11 +26,20 @@ tools:
|
||||
kind: looker-get-connection-table-columns
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_table_columns Tool
|
||||
This tool retrieves a list of columns for one or more specified tables within a
|
||||
given database schema and connection.
|
||||
|
||||
This tool will list the columns available from a connection, for all the tables
|
||||
given in a comma separated list of table names, filtered by the
|
||||
schema name and optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`.
|
||||
- tables (required): A comma-separated string of table names for which to retrieve columns
|
||||
(e.g., "users,orders,products"), obtained from `get_connection_tables`.
|
||||
- database (optional): The name of the database to filter by. Only applicable for connections
|
||||
that support multiple databases (check with `get_connections`).
|
||||
|
||||
Output:
|
||||
A JSON array of objects, where each object represents a column and contains details
|
||||
such as `table_name`, `column_name`, `data_type`, and `is_nullable`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,10 +27,17 @@ tools:
|
||||
kind: looker-get-connection-tables
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connection_tables Tool
|
||||
This tool retrieves a list of tables available within a specified database schema
|
||||
through a Looker connection.
|
||||
|
||||
This tool will list the tables available from a connection, filtered by the
|
||||
schema name and optional database name.
|
||||
Parameters:
|
||||
- connection_name (required): The name of the database connection, obtained from `get_connections`.
|
||||
- schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`.
|
||||
- database (optional): The name of the database to filter by. Only applicable for connections
|
||||
that support multiple databases (check with `get_connections`).
|
||||
|
||||
Output:
|
||||
A JSON array of strings, where each string is the name of an available table.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,11 +26,18 @@ tools:
|
||||
kind: looker-get-connections
|
||||
source: looker-source
|
||||
description: |
|
||||
get_connections Tool
|
||||
This tool retrieves a list of all database connections configured in the Looker system.
|
||||
|
||||
This tool will list all the connections available in the Looker system, as
|
||||
well as the dialect name, the default schema, the database if applicable,
|
||||
and whether the connection supports multiple databases.
|
||||
Parameters:
|
||||
This tool takes no parameters.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each representing a database connection and including details such as:
|
||||
- `name`: The connection's unique identifier.
|
||||
- `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery").
|
||||
- `default_schema`: The default schema for the connection.
|
||||
- `database`: The associated database name (if applicable).
|
||||
- `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -29,25 +29,29 @@ default to 100 and 0.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_dashboards:
|
||||
kind: looker-get-dashboards
|
||||
source: looker-source
|
||||
description: |
|
||||
get_dashboards Tool
|
||||
|
||||
This tool is used to search for saved dashboards in a Looker instance.
|
||||
String search params use case-insensitive matching. String search
|
||||
params can contain % and '_' as SQL LIKE pattern match wildcard
|
||||
expressions. example="dan%" will match "danger" and "Danzig" but
|
||||
not "David" example="D_m%" will match "Damage" and "dump".
|
||||
|
||||
Most search params can accept "IS NULL" and "NOT NULL" as special
|
||||
expressions to match or exclude (respectively) rows where the
|
||||
column is null.
|
||||
|
||||
The limit and offset are used to paginate the results.
|
||||
|
||||
The result of the get_dashboards tool is a list of json objects.
|
||||
get_dashboards:
|
||||
kind: looker-get-dashboards
|
||||
source: looker-source
|
||||
description: |
|
||||
This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard.
|
||||
|
||||
Search Parameters:
|
||||
- title (optional): Filter by dashboard title (supports wildcards).
|
||||
- folder_id (optional): Filter by the ID of the folder where the dashboard is saved.
|
||||
- user_id (optional): Filter by the ID of the user who created the dashboard.
|
||||
- description (optional): Filter by description content (supports wildcards).
|
||||
- id (optional): Filter by specific dashboard ID.
|
||||
- limit (optional): Maximum number of results to return. Defaults to a system limit.
|
||||
- offset (optional): Starting point for pagination.
|
||||
|
||||
String Search Behavior:
|
||||
- Case-insensitive matching.
|
||||
- Supports SQL LIKE pattern match wildcards:
|
||||
- `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance")
|
||||
- `_`: Matches any single character. (e.g., `"s_les"` matches "sales")
|
||||
- Special expressions for null checks:
|
||||
- `"IS NULL"`: Matches dashboards where the field is null.
|
||||
- `"NOT NULL"`: Excludes dashboards where the field is null.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,16 +28,20 @@ tools:
|
||||
kind: looker-get-dimensions
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_dimensions tool retrieves the list of dimensions defined in
|
||||
an explore.
|
||||
This tool retrieves a list of dimensions defined within a specific Looker explore.
|
||||
Dimensions are non-aggregatable attributes or characteristics of your data
|
||||
(e.g., product name, order date, customer city) that can be used for grouping,
|
||||
filtering, or segmenting query results.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
|
||||
If this returns a suggestions field for a dimension, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
Output Details:
|
||||
- If a dimension includes a `suggestions` field, its contents are valid values
|
||||
that can be used directly as filters for that dimension.
|
||||
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
|
||||
that specified explore and dimension to retrieve a list of valid filter values.
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -40,10 +40,13 @@ tools:
|
||||
kind: looker-get-explores
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_explores tool retrieves the list of explores defined in a LookML model
|
||||
in the Looker system.
|
||||
This tool retrieves a list of explores defined within a specific LookML model.
|
||||
Explores represent a curated view of your data, typically joining several
|
||||
tables together to allow for focused analysis on a particular subject area.
|
||||
The output provides details like the explore's `name` and `label`.
|
||||
|
||||
It takes one parameter, the model_name looked up from get_models.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -24,15 +24,22 @@ It's compatible with the following sources:
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_dimensions:
|
||||
get_filters:
|
||||
kind: looker-get-filters
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_filters tool retrieves the list of filters defined in
|
||||
an explore.
|
||||
This tool retrieves a list of "filter-only fields" defined within a specific
|
||||
Looker explore. These are special fields defined in LookML specifically to
|
||||
create user-facing filter controls that do not directly affect the `GROUP BY`
|
||||
clause of the SQL query. They are often used in conjunction with liquid templating
|
||||
to create dynamic queries.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Note: Regular dimensions and measures can also be used as filters in a query.
|
||||
This tool *only* returns fields explicitly defined as `filter:` in LookML.
|
||||
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
@@ -34,21 +34,26 @@ tools:
|
||||
kind: looker-get-looks
|
||||
source: looker-source
|
||||
description: |
|
||||
get_looks Tool
|
||||
This tool searches for saved Looks (pre-defined queries and visualizations)
|
||||
in a Looker instance. It returns a list of JSON objects, each representing a Look.
|
||||
|
||||
This tool is used to search for saved looks in a Looker instance.
|
||||
String search params use case-insensitive matching. String search
|
||||
params can contain % and '_' as SQL LIKE pattern match wildcard
|
||||
expressions. example="dan%" will match "danger" and "Danzig" but
|
||||
not "David" example="D_m%" will match "Damage" and "dump".
|
||||
Search Parameters:
|
||||
- title (optional): Filter by Look title (supports wildcards).
|
||||
- folder_id (optional): Filter by the ID of the folder where the Look is saved.
|
||||
- user_id (optional): Filter by the ID of the user who created the Look.
|
||||
- description (optional): Filter by description content (supports wildcards).
|
||||
- id (optional): Filter by specific Look ID.
|
||||
- limit (optional): Maximum number of results to return. Defaults to a system limit.
|
||||
- offset (optional): Starting point for pagination.
|
||||
|
||||
Most search params can accept "IS NULL" and "NOT NULL" as special
|
||||
expressions to match or exclude (respectively) rows where the
|
||||
column is null.
|
||||
|
||||
The limit and offset are used to paginate the results.
|
||||
|
||||
The result of the get_looks tool is a list of json objects.
|
||||
String Search Behavior:
|
||||
- Case-insensitive matching.
|
||||
- Supports SQL LIKE pattern match wildcards:
|
||||
- `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig")
|
||||
- `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump")
|
||||
- Special expressions for null checks:
|
||||
- `"IS NULL"`: Matches Looks where the field is null.
|
||||
- `"NOT NULL"`: Excludes Looks where the field is null.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,16 +28,19 @@ tools:
|
||||
kind: looker-get-measures
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_measures tool retrieves the list of measures defined in
|
||||
an explore.
|
||||
This tool retrieves a list of measures defined within a specific Looker explore.
|
||||
Measures are aggregatable metrics (e.g., total sales, average price, count of users)
|
||||
that are used for calculations and quantitative analysis in your queries.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
|
||||
If this returns a suggestions field for a measure, the contents of suggestions
|
||||
can be used as filters for this field. If this returns a suggest_explore and
|
||||
suggest_dimension, a query against that explore and dimension can be used to find
|
||||
valid filters for this field.
|
||||
Output Details:
|
||||
- If a measure includes a `suggestions` field, its contents are valid values
|
||||
that can be used directly as filters for that measure.
|
||||
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
|
||||
that specified explore and dimension to retrieve a list of valid filter values.
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -26,9 +26,12 @@ tools:
|
||||
kind: looker-get-models
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_models tool retrieves the list of LookML models in the Looker system.
|
||||
This tool retrieves a list of available LookML models in the Looker instance.
|
||||
LookML models define the data structure and relationships that users can query.
|
||||
The output includes details like the model's `name` and `label`, which are
|
||||
essential for subsequent calls to tools like `get_explores` or `query`.
|
||||
|
||||
It takes no parameters.
|
||||
This tool takes no parameters.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -28,11 +28,15 @@ tools:
|
||||
kind: looker-get-parameters
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_parameters tool retrieves the list of parameters defined in
|
||||
an explore.
|
||||
This tool retrieves a list of parameters defined within a specific Looker explore.
|
||||
LookML parameters are dynamic input fields that allow users to influence query
|
||||
behavior without directly modifying the underlying LookML. They are often used
|
||||
with `liquid` templating to create flexible dashboards and reports, enabling
|
||||
users to choose dimensions, measures, or other query components at runtime.
|
||||
|
||||
It takes two parameters, the model_name looked up from get_models and the
|
||||
explore_name looked up from get_explores.
|
||||
Parameters:
|
||||
- model_name (required): The name of the LookML model, obtained from `get_models`.
|
||||
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
@@ -26,10 +26,15 @@ tools:
|
||||
kind: looker-get-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
get_project_file Tool
|
||||
This tool retrieves the raw content of a specific LookML file from within a project.
|
||||
|
||||
Given a project_id and a file path within the project, this tool returns
|
||||
the contents of the LookML file.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
|
||||
- file_path (required): The path to the LookML file within the project,
|
||||
typically obtained from `get_project_files`.
|
||||
|
||||
Output:
|
||||
The raw text content of the specified LookML file.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,15 @@ tools:
|
||||
kind: looker-get-project-files
|
||||
source: looker-source
|
||||
description: |
|
||||
get_project_files Tool
|
||||
This tool retrieves a list of all LookML files within a specified project,
|
||||
providing details about each file.
|
||||
|
||||
Given a project_id this tool returns the details about
|
||||
the LookML files that make up that project.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each representing a LookML file and containing
|
||||
details such as `path`, `id`, `type`, and `git_status`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -26,10 +26,16 @@ tools:
|
||||
kind: looker-get-projects
|
||||
source: looker-source
|
||||
description: |
|
||||
get_projects Tool
|
||||
This tool retrieves a list of all LookML projects available on the Looker instance.
|
||||
It is useful for identifying projects before performing actions like retrieving
|
||||
project files or making modifications.
|
||||
|
||||
This tool returns the project_id and project_name for
|
||||
all the LookML projects on the looker instance.
|
||||
Parameters:
|
||||
This tool takes no parameters.
|
||||
|
||||
Output:
|
||||
A JSON array of objects, each containing the `project_id` and `project_name`
|
||||
for a LookML project.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -42,17 +42,18 @@ tools:
|
||||
kind: looker-health-analyze
|
||||
source: looker-source
|
||||
description: |
|
||||
health-analyze Tool
|
||||
This tool calculates the usage statistics for Looker projects, models, and explores.
|
||||
|
||||
This tool calculates the usage of projects, models and explores.
|
||||
Parameters:
|
||||
- action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`.
|
||||
- project (optional): The specific project ID to analyze.
|
||||
- model (optional): The specific model name to analyze. Requires `project` if used without `explore`.
|
||||
- explore (optional): The specific explore name to analyze. Requires `model` if used.
|
||||
- timeframe (optional): The lookback period in days for usage data. Defaults to `90` days.
|
||||
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "projects", "models", or "explores"
|
||||
2. `project`: the project to analyze (optional)
|
||||
3. `model`: the model to analyze (optional)
|
||||
4. `explore`: the explore to analyze (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
Output:
|
||||
The result is a JSON object containing usage metrics for the specified resources.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -49,20 +49,22 @@ tools:
|
||||
kind: looker-health-pulse
|
||||
source: looker-source
|
||||
description: |
|
||||
health-pulse Tool
|
||||
This tool performs various health checks on a Looker instance.
|
||||
|
||||
This tool takes the pulse of a Looker instance by taking
|
||||
one of the following actions:
|
||||
1. `check_db_connections`,
|
||||
2. `check_dashboard_performance`,
|
||||
3. `check_dashboard_errors`,
|
||||
4. `check_explore_performance`,
|
||||
5. `check_schedule_failures`, or
|
||||
6. `check_legacy_features`
|
||||
|
||||
The `check_legacy_features` action is only available in Looker Core. If
|
||||
it is called on a Looker Core instance, you will get a notice. That notice
|
||||
should not be reported as an error.
|
||||
Parameters:
|
||||
- action (required): Specifies the type of health check to perform.
|
||||
Choose one of the following:
|
||||
- `check_db_connections`: Verifies database connectivity.
|
||||
- `check_dashboard_performance`: Assesses dashboard loading performance.
|
||||
- `check_dashboard_errors`: Identifies errors within dashboards.
|
||||
- `check_explore_performance`: Evaluates explore query performance.
|
||||
- `check_schedule_failures`: Reports on failed scheduled deliveries.
|
||||
- `check_legacy_features`: Checks for the usage of legacy features.
|
||||
|
||||
Note on `check_legacy_features`:
|
||||
This action is exclusively available in Looker Core instances. If invoked
|
||||
on a non-Looker Core instance, it will return a notice rather than an error.
|
||||
This notice should be considered normal behavior and not an indication of an issue.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -39,20 +39,19 @@ tools:
|
||||
kind: looker-health-vacuum
|
||||
source: looker-source
|
||||
description: |
|
||||
health-vacuum Tool
|
||||
This tool identifies and suggests LookML models or explores that can be
|
||||
safely removed due to inactivity or low usage.
|
||||
|
||||
This tool suggests models or explores that can removed
|
||||
because they are unused.
|
||||
Parameters:
|
||||
- action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`.
|
||||
- project (optional): The specific project ID to consider.
|
||||
- model (optional): The specific model name to consider. Requires `project` if used without `explore`.
|
||||
- explore (optional): The specific explore name to consider. Requires `model` if used.
|
||||
- timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days.
|
||||
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "models" or "explores"
|
||||
2. `project`: the project to vacuum (optional)
|
||||
3. `model`: the model to vacuum (optional)
|
||||
4. `explore`: the explore to vacuum (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
|
||||
The result is a list of objects that are candidates for deletion.
|
||||
Output:
|
||||
A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage.
|
||||
```
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|
||||
@@ -30,18 +30,19 @@ tools:
|
||||
kind: looker-make-dashboard
|
||||
source: looker-source
|
||||
description: |
|
||||
make_dashboard Tool
|
||||
This tool creates a new, empty dashboard in Looker. Dashboards are stored
|
||||
in the user's personal folder, and the dashboard name must be unique.
|
||||
After creation, use `add_dashboard_filter` to add filters and
|
||||
`add_dashboard_element` to add content tiles.
|
||||
|
||||
This tool creates a new dashboard in Looker. The dashboard is
|
||||
initially empty and the add_dashboard_element tool is used to
|
||||
add content to the dashboard.
|
||||
Required Parameters:
|
||||
- title (required): A unique title for the new dashboard.
|
||||
- description (required): A brief description of the dashboard's purpose.
|
||||
|
||||
The newly created dashboard will be created in the user's
|
||||
personal folder in looker. The dashboard name must be unique.
|
||||
|
||||
The result is a json document with a link to the newly
|
||||
created dashboard and the id of the dashboard. Use the id
|
||||
when calling add_dashboard_element.
|
||||
Output:
|
||||
A JSON object containing a link (`url`) to the newly created dashboard and
|
||||
its unique `id`. This `dashboard_id` is crucial for subsequent calls to
|
||||
`add_dashboard_filter` and `add_dashboard_element`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -40,20 +40,24 @@ tools:
|
||||
kind: looker-make-look
|
||||
source: looker-source
|
||||
description: |
|
||||
make_look Tool
|
||||
This tool creates a new Look (saved query with visualization) in Looker.
|
||||
The Look will be saved in the user's personal folder, and its name must be unique.
|
||||
|
||||
This tool creates a new look in Looker, using the query
|
||||
parameters and the vis_config specified.
|
||||
Required Parameters:
|
||||
- title: A unique title for the new Look.
|
||||
- description: A brief description of the Look's purpose.
|
||||
- model_name: The name of the LookML model (from `get_models`).
|
||||
- explore_name: The name of the explore (from `get_explores`).
|
||||
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
|
||||
|
||||
Most of the parameters are the same as the query_url
|
||||
tool. In addition, there is a title and a description
|
||||
that must be provided.
|
||||
Optional Parameters:
|
||||
- pivots, filters, sorts, limit, query_timezone: These parameters are identical
|
||||
to those described for the `query` tool.
|
||||
- vis_config: A JSON object defining the visualization settings for the Look.
|
||||
The structure and options are the same as for the `query_url` tool's `vis_config`.
|
||||
|
||||
The newly created look will be created in the user's
|
||||
personal folder in looker. The look name must be unique.
|
||||
|
||||
The result is a json document with a link to the newly
|
||||
created look.
|
||||
Output:
|
||||
A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -41,38 +41,17 @@ tools:
|
||||
kind: looker-query-sql
|
||||
source: looker-source
|
||||
description: |
|
||||
Query SQL Tool
|
||||
This tool generates the underlying SQL query that Looker would execute
|
||||
against the database for a given set of parameters. It is useful for
|
||||
understanding how Looker translates a request into SQL.
|
||||
|
||||
This tool is used to generate a sql query against the LookML model. The
|
||||
model, explore, and fields list must be specified. Pivots,
|
||||
filters and sorts are optional.
|
||||
Parameters:
|
||||
All parameters for this tool are identical to those of the `query` tool.
|
||||
This includes `model_name`, `explore_name`, `fields` (required),
|
||||
and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`.
|
||||
|
||||
The model can be found from the get_models tool. The explore
|
||||
can be found from the get_explores tool passing in the model.
|
||||
The fields can be found from the get_dimensions, get_measures,
|
||||
get_filters, and get_parameters tools, passing in the model
|
||||
and the explore.
|
||||
|
||||
Provide a model_id and explore_name, then a list
|
||||
of fields. Optionally a list of pivots can be provided.
|
||||
The pivots must also be included in the fields list.
|
||||
|
||||
Filters are provided as a map of {"field.id": "condition",
|
||||
"field.id2": "condition2", ...}. Do not put the field.id in
|
||||
quotes. Filter expressions can be found at
|
||||
https://cloud.google.com/looker/docs/filter-expressions.
|
||||
|
||||
Sorts can be specified like [ "field.id desc 0" ].
|
||||
|
||||
An optional row limit can be added. If not provided the limit
|
||||
will default to 500. "-1" can be specified for unlimited.
|
||||
|
||||
An optional query timezone can be added. The query_timezone to
|
||||
will default to that of the workstation where this MCP server
|
||||
is running, or Etc/UTC if that can't be determined. Not all
|
||||
models support custom timezones.
|
||||
|
||||
The result of the query tool is the sql string.
|
||||
Output:
|
||||
The result of this tool is the raw SQL text.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -37,17 +37,21 @@ tools:
|
||||
kind: looker-query-url
|
||||
source: looker-source
|
||||
description: |
|
||||
Query URL Tool
|
||||
This tool generates a shareable URL for a Looker query, allowing users to
|
||||
explore the query further within the Looker UI. It returns the generated URL,
|
||||
along with the `query_id` and `slug`.
|
||||
|
||||
This tool is used to generate the URL of a query in Looker.
|
||||
The user can then explore the query further inside Looker.
|
||||
The tool also returns the query_id and slug. The parameters
|
||||
are the same as the query tool with an additional vis_config
|
||||
parameter.
|
||||
Parameters:
|
||||
All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`,
|
||||
`filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool.
|
||||
|
||||
The vis_config is optional. If provided, it will be used to
|
||||
control the default visualization for the query. Here are
|
||||
some notes on making visualizations.
|
||||
Additionally, it accepts an optional `vis_config` parameter:
|
||||
- vis_config (optional): A JSON object that controls the default visualization
|
||||
settings for the generated query.
|
||||
|
||||
vis_config Details:
|
||||
The `vis_config` object supports a wide range of properties for various chart types.
|
||||
Here are some notes on making visualizations.
|
||||
|
||||
### Cartesian Charts (Area, Bar, Column, Line, Scatter)
|
||||
|
||||
|
||||
@@ -41,38 +41,24 @@ tools:
|
||||
kind: looker-query
|
||||
source: looker-source
|
||||
description: |
|
||||
Query Tool
|
||||
This tool runs a query against a LookML model and returns the results in JSON format.
|
||||
|
||||
This tool is used to run a query against the LookML model. The
|
||||
model, explore, and fields list must be specified. Pivots,
|
||||
filters and sorts are optional.
|
||||
Required Parameters:
|
||||
- model_name: The name of the LookML model (from `get_models`).
|
||||
- explore_name: The name of the explore (from `get_explores`).
|
||||
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
|
||||
|
||||
The model can be found from the get_models tool. The explore
|
||||
can be found from the get_explores tool passing in the model.
|
||||
The fields can be found from the get_dimensions, get_measures,
|
||||
get_filters, and get_parameters tools, passing in the model
|
||||
and the explore.
|
||||
Optional Parameters:
|
||||
- pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list.
|
||||
- filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`.
|
||||
- Do not quote field names.
|
||||
- Use `not null` instead of `-NULL`.
|
||||
- If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'").
|
||||
- sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`).
|
||||
- limit: Row limit (default 500). Use "-1" for unlimited.
|
||||
- query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`).
|
||||
|
||||
Provide a model_id and explore_name, then a list
|
||||
of fields. Optionally a list of pivots can be provided.
|
||||
The pivots must also be included in the fields list.
|
||||
|
||||
Filters are provided as a map of {"field.id": "condition",
|
||||
"field.id2": "condition2", ...}. Do not put the field.id in
|
||||
quotes. Filter expressions can be found at
|
||||
https://cloud.google.com/looker/docs/filter-expressions.
|
||||
If the condition is a string that contains a comma, use a second
|
||||
set of quotes. For example, {"user.city": "'New York, NY'"}.
|
||||
|
||||
Sorts can be specified like [ "field.id desc 0" ].
|
||||
|
||||
An optional row limit can be added. If not provided the limit
|
||||
will default to 500. "-1" can be specified for unlimited.
|
||||
|
||||
An optional query timezone can be added. The query_timezone to
|
||||
will default to that of the workstation where this MCP server
|
||||
is running, or Etc/UTC if that can't be determined. Not all
|
||||
models support custom timezones.
|
||||
Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields.
|
||||
|
||||
The result of the query tool is JSON
|
||||
```
|
||||
|
||||
@@ -27,11 +27,15 @@ tools:
|
||||
kind: looker-run-dashboard
|
||||
source: looker-source
|
||||
description: |
|
||||
run_dashboard Tool
|
||||
This tool executes the queries associated with each tile in a specified dashboard
|
||||
and returns the aggregated data in a JSON structure.
|
||||
|
||||
This tools runs the query associated with each tile in a dashboard
|
||||
and returns the data in a JSON structure. It accepts the dashboard_id
|
||||
as the parameter.
|
||||
Parameters:
|
||||
- dashboard_id (required): The unique identifier of the dashboard to run,
|
||||
typically obtained from the `get_dashboards` tool.
|
||||
|
||||
Output:
|
||||
The data from all dashboard tiles is returned as a JSON object.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,11 +27,15 @@ tools:
|
||||
kind: looker-run-look
|
||||
source: looker-source
|
||||
description: |
|
||||
run_look Tool
|
||||
This tool executes the query associated with a saved Look and
|
||||
returns the resulting data in a JSON structure.
|
||||
|
||||
This tool runs the query associated with a look and returns
|
||||
the data in a JSON structure. It accepts the look_id as the
|
||||
parameter.
|
||||
Parameters:
|
||||
- look_id (required): The unique identifier of the Look to run,
|
||||
typically obtained from the `get_looks` tool.
|
||||
|
||||
Output:
|
||||
The query results are returned as a JSON object.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -27,13 +27,17 @@ tools:
|
||||
kind: looker-update-project-file
|
||||
source: looker-source
|
||||
description: |
|
||||
update_project_file Tool
|
||||
This tool modifies the content of an existing LookML file within a specified project.
|
||||
|
||||
Given a project_id and a file path within the project, as well as the content
|
||||
of a LookML file, this tool will modify the file within the project.
|
||||
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
|
||||
|
||||
This tool must be called after the dev_mode tool has changed the session to
|
||||
dev mode.
|
||||
Parameters:
|
||||
- project_id (required): The unique ID of the LookML project.
|
||||
- file_path (required): The exact path to the LookML file to modify within the project.
|
||||
- content (required): The new, complete LookML content to overwrite the existing file.
|
||||
|
||||
Output:
|
||||
A confirmation message upon successful file modification.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
@@ -64,5 +64,3 @@ tools:
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| projectPayload | string | false | An optional MongoDB projection document to specify which fields to include (1) or exclude (0) in the result. |
|
||||
| projectParams | list | false | A list of parameter objects for the `projectPayload`. |
|
||||
| sortPayload | string | false | An optional MongoDB sort document. Useful for selecting which document to return if the filter matches multiple (e.g., get the most recent). |
|
||||
| sortParams | list | false | A list of parameter objects for the `sortPayload`. |
|
||||
|
||||
@@ -48,11 +48,11 @@ in the `data` parameter, like this:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the documents will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
|
||||
@@ -43,11 +43,11 @@ An LLM would call this tool by providing the document as a JSON string in the
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:---------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | true | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:------------|:---------|:-------------|:------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-insert-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection into which the document will be inserted. |
|
||||
| canonical | bool | false | Determines if the data string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. Defaults to `false`. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-many`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection in which to update documents. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the documents for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `filterPayload` and `updatePayload` strings are parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation, while **Relaxed** is more lenient. Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
@@ -57,16 +57,16 @@ tools:
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | true | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|:--------------|:---------|:-------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be `mongodb-update-one`. |
|
||||
| source | string | true | The name of the `mongodb` source to use. |
|
||||
| description | string | true | A description of the tool that is passed to the LLM. |
|
||||
| database | string | true | The name of the MongoDB database containing the collection. |
|
||||
| collection | string | true | The name of the MongoDB collection to update a document in. |
|
||||
| filterPayload | string | true | The MongoDB query filter document to select the document for updating. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| filterParams | list | false | A list of parameter objects that define the variables used in the `filterPayload`. |
|
||||
| updatePayload | string | true | The MongoDB update document, which specifies the modifications. This often uses update operators like `$set`. It's written as a Go template, using `{{json .param_name}}` to insert parameters. |
|
||||
| updateParams | list | true | A list of parameter objects that define the variables used in the `updatePayload`. |
|
||||
| canonical | bool | false | Determines if the `updatePayload` string is parsed using MongoDB's Canonical or Relaxed Extended JSON format. **Canonical** is stricter about type representation (e.g., `{"$numberInt": "42"}`), while **Relaxed** is more lenient (e.g., `42`). Defaults to `false`. |
|
||||
| upsert | bool | false | If `true`, a new document is created if no document matches the `filterPayload`. Defaults to `false`. |
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
---
|
||||
title: "postgres-list-database-stats"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-database-stats" tool lists lists key performance and activity statistics of PostgreSQL databases.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-database-stats
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-database-stats` lists the key performance and activity statistics for each PostgreSQL database in the instance, offering insights into cache efficiency, transaction throughput, row-level activity, temporary file usage, and contention. It's compatible with
|
||||
any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-database-stats` lists detailed information as JSON for each database. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `database_name` (optional): A text to filter results by database name. Default: `""`
|
||||
- `include_templates` (optional): Boolean, set to `true` to include template databases in the results. Default: `false`
|
||||
- `database_owner` (optional): A text to filter results by database owner. Default: `""`
|
||||
- `default_tablespace` (optional): A text to filter results by the default tablespace name. Default: `""`
|
||||
- `order_by` (optional): Specifies the sorting order. Valid values are `'size'` (descending) or `'commit'` (descending). Default: `database_name` ascending.
|
||||
- `limit` (optional): The maximum number of databases to return. Default: `10`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_database_stats:
|
||||
kind: postgres-list-database-stats
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists the key performance and activity statistics for each PostgreSQL
|
||||
database in the instance, offering insights into cache efficiency,
|
||||
transaction throughput row-level activity, temporary file usage, and
|
||||
contention. It returns: the database name, whether the database is
|
||||
connectable, database owner, default tablespace name, the percentage of
|
||||
data blocks found in the buffer cache rather than being read from disk
|
||||
(a higher value indicates better cache performance), the total number of
|
||||
disk blocks read from disk, the total number of times disk blocks were
|
||||
found already in the cache; the total number of committed transactions,
|
||||
the total number of rolled back transactions, the percentage of rolled
|
||||
back transactions compared to the total number of completed
|
||||
transactions, the total number of rows returned by queries, the total
|
||||
number of live rows fetched by scans, the total number of rows inserted,
|
||||
the total number of rows updated, the total number of rows deleted, the
|
||||
number of temporary files created by queries, the total size of
|
||||
temporary files used by queries in bytes, the number of query
|
||||
cancellations due to conflicts with recovery, the number of deadlocks
|
||||
detected, the current number of active backend connections, the
|
||||
timestamp when the database statistics were last reset, and the total
|
||||
database size in bytes.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"database_name": "Name of the database",
|
||||
"is_connectable": "Boolean indicating Whether the database allows connections",
|
||||
"database_owner": "Username of the database owner",
|
||||
"default_tablespace": "Name of the default tablespace for the database",
|
||||
"cache_hit_ratio_percent": "The percentage of data blocks found in the buffer cache rather than being read from disk",
|
||||
"blocks_read_from_disk": "The total number of disk blocks read for this database",
|
||||
"blocks_hit_in_cache": "The total number of times disk blocks were found already in the cache.",
|
||||
"xact_commit": "The total number of committed transactions",
|
||||
"xact_rollback": "The total number of rolled back transactions",
|
||||
"rollback_ratio_percent": "The percentage of rolled back transactions compared to the total number of completed transactions",
|
||||
"rows_returned_by_queries": "The total number of rows returned by queries",
|
||||
"rows_fetched_by_scans": "The total number of live rows fetched by scans",
|
||||
"tup_inserted": "The total number of rows inserted",
|
||||
"tup_updated": "The total number of rows updated",
|
||||
"tup_deleted": "The total number of rows deleted",
|
||||
"temp_files": "The number of temporary files created by queries",
|
||||
"temp_size_bytes": "The total size of temporary files used by queries in bytes",
|
||||
"conflicts": "Number of query cancellations due to conflicts",
|
||||
"deadlocks": "Number of deadlocks detected",
|
||||
"active_connections": "The current number of active backend connections",
|
||||
"statistics_last_reset": "The timestamp when the database statistics were last reset",
|
||||
"database_size_bytes": "The total disk size of the database in bytes"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-database-stats". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -21,12 +21,10 @@ any of the following sources:
|
||||
`postgres-list-indexes` lists detailed information as JSON for indexes. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `table_name` (optional): A text to filter results by table name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `index_name` (optional): A text to filter results by index name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `schema_name` (optional): A text to filter results by schema name. The input
|
||||
is used within a LIKE clause. Default: `""`
|
||||
- `table_name` (optional): A text to filter results by table name. Default: `""`
|
||||
- `index_name` (optional): A text to filter results by index name. Default: `""`
|
||||
- `schema_name` (optional): A text to filter results by schema name. Default: `""`
|
||||
- `only_unused` (optional): If true, returns indexes that have never been used.
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
---
|
||||
title: "postgres-list-pg-settings"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-pg-settings" tool lists PostgreSQL run-time configuration settings.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-pg-settings
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-pg-settings` tool lists the configuration parameters for the postgres server, their current values, and related information. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-pg-settings` lists detailed information as JSON for each setting. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `setting_name` (optional): A text to filter results by setting name. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-pg-settings
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists configuration parameters for the postgres server ordered lexicographically,
|
||||
with a default limit of 50 rows. It returns the parameter name, its current setting,
|
||||
unit of measurement, a short description, the source of the current setting (e.g.,
|
||||
default, configuration file, session), and whether a restart is required when the
|
||||
parameter value is changed."
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Setting name",
|
||||
"current_value": "Current value of the setting",
|
||||
"unit": "Unit of the setting",
|
||||
"short_desc": "Short description of the setting",
|
||||
"source": "Source of the current value (e.g., default, configuration file, session)",
|
||||
"requires_restart": "Indicates if a server restart is required to apply a change ('Yes', 'No', or 'No (Reload sufficient)')"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-pg-settings". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -0,0 +1,66 @@
|
||||
---
|
||||
title: "postgres-list-publication-tables"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-publication-tables" tool lists publication tables in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-publication-tables
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-publication-tables` tool lists all publication tables in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-publication-tables` lists detailed information as JSON for publication tables. A publication table in PostgreSQL is a
|
||||
table that is explicitly included as a source for replication within a publication (a set of changes generated from a table or group
|
||||
of tables) as part of the logical replication feature. The tool takes the following input parameters:
|
||||
|
||||
- `table_names` (optional): Filters by a comma-separated list of table names. Default: `""`
|
||||
- `publication_names` (optional): Filters by a comma-separated list of publication names. Default: `""`
|
||||
- `schema_names` (optional): Filters by a comma-separated list of schema names. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-publication-tables
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tables that are explicitly part of a publication in the database.
|
||||
Tables that are part of a publication via 'FOR ALL TABLES' are not included,
|
||||
unless they are also explicitly added to the publication.
|
||||
Returns the publication name, schema name, and table name, along with
|
||||
definition details indicating if it publishes all tables, whether it
|
||||
replicates inserts, updates, deletes, or truncates, and the publication
|
||||
owner.
|
||||
```
|
||||
|
||||
The response is a JSON array with the following elements:
|
||||
```json
|
||||
{
|
||||
"publication_name": "Name of the publication",
|
||||
"schema_name": "Name of the schema the table belongs to",
|
||||
"table_name": "Name of the table",
|
||||
"publishes_all_tables": "boolean indicating if the publication was created with FOR ALL TABLES",
|
||||
"publishes_inserts": "boolean indicating if INSERT operations are replicated",
|
||||
"publishes_updates": "boolean indicating if UPDATE operations are replicated",
|
||||
"publishes_deletes": "boolean indicating if DELETE operations are replicated",
|
||||
"publishes_truncates": "boolean indicating if TRUNCATE operations are replicated",
|
||||
"publication_owner": "Username of the database role that owns the publication"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-publication-tables". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
70
docs/en/resources/tools/postgres/postgres-list-roles.md
Normal file
70
docs/en/resources/tools/postgres/postgres-list-roles.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "postgres-list-roles"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-roles" tool lists user-created roles in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-roles
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-roles` tool lists all the user-created roles in the instance, excluding system roles (like `cloudsql%` or `pg_%`). It provides details about each role's attributes and memberships. It's compatible with
|
||||
any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-roles` lists detailed information as JSON for each role. The tool
|
||||
takes the following input parameters:
|
||||
|
||||
- `role_name` (optional): A text to filter results by role name. Default: `""`
|
||||
- `limit` (optional): The maximum number of roles to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_indexes:
|
||||
kind: postgres-list-roles
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all the user-created roles in the instance . It returns the role name,
|
||||
Object ID, the maximum number of concurrent connections the role can make,
|
||||
along with boolean indicators for: superuser status, privilege inheritance
|
||||
from member roles, ability to create roles, ability to create databases,
|
||||
ability to log in, replication privilege, and the ability to bypass
|
||||
row-level security, the password expiration timestamp, a list of direct
|
||||
members belonging to this role, and a list of other roles/groups that this
|
||||
role is a member of.
|
||||
```
|
||||
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"role_name": "Name of the role",
|
||||
"oid": "Object ID of the role",
|
||||
"connection_limit": "Maximum concurrent connections allowed (-1 for no limit)",
|
||||
"is_superuser": "Boolean, true if the role is a superuser",
|
||||
"inherits_privileges": "Boolean, true if the role inherits privileges of roles it is a member of",
|
||||
"can_create_roles": "Boolean, true if the role can create other roles",
|
||||
"can_create_db": "Boolean, true if the role can create databases",
|
||||
"can_login": "Boolean, true if the role can log in",
|
||||
"is_replication_role": "Boolean, true if this is a replication role",
|
||||
"bypass_rls": "Boolean, true if the role bypasses row-level security policies",
|
||||
"valid_until": "Timestamp until the password is valid (null if forever)",
|
||||
"direct_members": ["Array of role names that are direct members of this role"],
|
||||
"member_of": ["Array of role names that this role is a member of"]
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-roles". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -21,9 +21,9 @@ the following sources:
|
||||
`postgres-list-schemas` lists detailed information as JSON for each schema. The
|
||||
tool takes the following input parameters:
|
||||
|
||||
- `schema_name` (optional): A pattern to filter schema names using SQL LIKE
|
||||
operator.
|
||||
If omitted, all user-defined schemas are returned.
|
||||
- `schema_name` (optional): A text to filter results by schema name. Default: `""`
|
||||
- `owner` (optional): A text to filter results by owner name. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -20,9 +20,9 @@ Postgres database. It's compatible with any of the following sources:
|
||||
`postgres-list-sequences` lists detailed information as JSON for all sequences.
|
||||
The tool takes the following input parameters:
|
||||
|
||||
- `sequencename` (optional): A text to filter results by sequence name. The
|
||||
- `sequence_name` (optional): A text to filter results by sequence name. The
|
||||
input is used within a LIKE clause. Default: `""`
|
||||
- `schemaname` (optional): A text to filter results by schema name. The input is
|
||||
- `schema_name` (optional): A text to filter results by schema name. The input is
|
||||
used within a LIKE clause. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
@@ -45,9 +45,9 @@ The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"sequencename": "sequence name",
|
||||
"schemaname": "schema name",
|
||||
"sequenceowner": "owner of the sequence",
|
||||
"sequence_name": "sequence name",
|
||||
"schema_name": "schema name",
|
||||
"sequence_owner": "owner of the sequence",
|
||||
"data_type": "data type of the sequence",
|
||||
"start_value": "starting value of the sequence",
|
||||
"min_value": "minimum value of the sequence",
|
||||
|
||||
171
docs/en/resources/tools/postgres/postgres-list-table-stats.md
Normal file
171
docs/en/resources/tools/postgres/postgres-list-table-stats.md
Normal file
@@ -0,0 +1,171 @@
|
||||
---
|
||||
title: "postgres-list-table-stats"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-table-stats" tool reports table statistics including size, scan metrics, and bloat indicators for PostgreSQL tables.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-table-stats
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-table-stats` tool queries `pg_stat_all_tables` to provide comprehensive statistics about tables in the database. It calculates useful metrics like index scan ratio and dead row ratio to help identify performance issues and table bloat.
|
||||
|
||||
Compatible sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
The tool returns a JSON array where each element represents statistics for a table, including scan metrics, row counts, and vacuum history. Results are sorted by sequential scans by default and limited to 50 rows.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_table_stats:
|
||||
kind: postgres-list-table-stats
|
||||
source: postgres-source
|
||||
description: "Lists table statistics including size, scans, and bloat metrics."
|
||||
```
|
||||
|
||||
### Example Requests
|
||||
|
||||
**List default tables in public schema:**
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
**Filter by specific table name:**
|
||||
```json
|
||||
{
|
||||
"table_name": "users"
|
||||
}
|
||||
```
|
||||
|
||||
**Filter by owner and sort by size:**
|
||||
```json
|
||||
{
|
||||
"owner": "app_user",
|
||||
"sort_by": "size",
|
||||
"limit": 10
|
||||
}
|
||||
```
|
||||
|
||||
**Find tables with high dead row ratio:**
|
||||
```json
|
||||
{
|
||||
"sort_by": "dead_rows",
|
||||
"limit": 20
|
||||
}
|
||||
```
|
||||
|
||||
### Example Response
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"schema_name": "public",
|
||||
"table_name": "users",
|
||||
"owner": "postgres",
|
||||
"total_size_bytes": 8388608,
|
||||
"seq_scan": 150,
|
||||
"idx_scan": 450,
|
||||
"idx_scan_ratio_percent": 75.0,
|
||||
"live_rows": 50000,
|
||||
"dead_rows": 1200,
|
||||
"dead_row_ratio_percent": 2.34,
|
||||
"n_tup_ins": 52000,
|
||||
"n_tup_upd": 12500,
|
||||
"n_tup_del": 800,
|
||||
"last_vacuum": "2025-11-27T10:30:00Z",
|
||||
"last_autovacuum": "2025-11-27T09:15:00Z",
|
||||
"last_autoanalyze": "2025-11-27T09:16:00Z"
|
||||
},
|
||||
{
|
||||
"schema_name": "public",
|
||||
"table_name": "orders",
|
||||
"owner": "postgres",
|
||||
"total_size_bytes": 16777216,
|
||||
"seq_scan": 50,
|
||||
"idx_scan": 1200,
|
||||
"idx_scan_ratio_percent": 96.0,
|
||||
"live_rows": 100000,
|
||||
"dead_rows": 5000,
|
||||
"dead_row_ratio_percent": 4.76,
|
||||
"n_tup_ins": 120000,
|
||||
"n_tup_upd": 45000,
|
||||
"n_tup_del": 15000,
|
||||
"last_vacuum": "2025-11-26T14:22:00Z",
|
||||
"last_autovacuum": "2025-11-27T02:30:00Z",
|
||||
"last_autoanalyze": "2025-11-27T02:31:00Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | type | required | default | description |
|
||||
|-------------|---------|----------|---------|-------------|
|
||||
| schema_name | string | false | "public" | Optional: A specific schema name to filter by (supports partial matching) |
|
||||
| table_name | string | false | null | Optional: A specific table name to filter by (supports partial matching) |
|
||||
| owner | string | false | null | Optional: A specific owner to filter by (supports partial matching) |
|
||||
| sort_by | string | false | null | Optional: The column to sort by. Valid values: `size`, `dead_rows`, `seq_scan`, `idx_scan` (defaults to `seq_scan`) |
|
||||
| limit | integer | false | 50 | Optional: The maximum number of results to return |
|
||||
|
||||
## Output Fields Reference
|
||||
|
||||
| field | type | description |
|
||||
|------------------------|-----------|-------------|
|
||||
| schema_name | string | Name of the schema containing the table. |
|
||||
| table_name | string | Name of the table. |
|
||||
| owner | string | PostgreSQL user who owns the table. |
|
||||
| total_size_bytes | integer | Total size of the table including all indexes in bytes. |
|
||||
| seq_scan | integer | Number of sequential (full table) scans performed on this table. |
|
||||
| idx_scan | integer | Number of index scans performed on this table. |
|
||||
| idx_scan_ratio_percent | decimal | Percentage of total scans (seq_scan + idx_scan) that used an index. A low ratio may indicate missing or ineffective indexes. |
|
||||
| live_rows | integer | Number of live (non-deleted) rows in the table. |
|
||||
| dead_rows | integer | Number of dead (deleted but not yet vacuumed) rows in the table. |
|
||||
| dead_row_ratio_percent | decimal | Percentage of dead rows relative to total rows. High values indicate potential table bloat. |
|
||||
| n_tup_ins | integer | Total number of rows inserted into this table. |
|
||||
| n_tup_upd | integer | Total number of rows updated in this table. |
|
||||
| n_tup_del | integer | Total number of rows deleted from this table. |
|
||||
| last_vacuum | timestamp | Timestamp of the last manual VACUUM operation on this table (null if never manually vacuumed). |
|
||||
| last_autovacuum | timestamp | Timestamp of the last automatic vacuum operation on this table. |
|
||||
| last_autoanalyze | timestamp | Timestamp of the last automatic analyze operation on this table. |
|
||||
|
||||
## Interpretation Guide
|
||||
|
||||
### Index Scan Ratio (`idx_scan_ratio_percent`)
|
||||
|
||||
- **High ratio (> 80%)**: Table queries are efficiently using indexes. This is typically desirable.
|
||||
- **Low ratio (< 20%)**: Many sequential scans indicate missing indexes or queries that cannot use existing indexes effectively. Consider adding indexes to frequently searched columns.
|
||||
- **0%**: No index scans performed; all queries performed sequential scans. May warrant index investigation.
|
||||
|
||||
### Dead Row Ratio (`dead_row_ratio_percent`)
|
||||
|
||||
- **< 2%**: Healthy table with minimal bloat.
|
||||
- **2-5%**: Moderate bloat; consider running VACUUM if not recent.
|
||||
- **> 5%**: High bloat; may benefit from manual VACUUM or VACUUM FULL.
|
||||
|
||||
### Vacuum History
|
||||
|
||||
- **Null `last_vacuum`**: Table has never been manually vacuumed; relies on autovacuum.
|
||||
- **Recent `last_autovacuum`**: Autovacuum is actively managing the table.
|
||||
- **Stale timestamps**: Consider running manual VACUUM and ANALYZE if maintenance windows exist.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- Statistics are collected from `pg_stat_all_tables`, which resets on PostgreSQL restart.
|
||||
- Run `ANALYZE` on tables to update statistics for accurate query planning.
|
||||
- The tool defaults to limiting results to 50 rows; adjust the `limit` parameter for larger result sets.
|
||||
- Filtering by schema, table name, or owner uses `LIKE` pattern matching (supports partial matches).
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Finding ineffective indexes**: Identify tables with low `idx_scan_ratio_percent` to evaluate index strategy.
|
||||
- **Detecting table bloat**: Sort by `dead_rows` to find tables needing VACUUM.
|
||||
- **Monitoring growth**: Track `total_size_bytes` over time for capacity planning.
|
||||
- **Audit maintenance**: Check `last_autovacuum` and `last_autoanalyze` timestamps to ensure maintenance tasks are running.
|
||||
- **Understanding workload**: Examine `seq_scan` vs `idx_scan` ratios to understand query patterns.
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
title: "postgres-list-tablespaces"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "postgres-list-tablespaces" tool lists tablespaces in a Postgres database.
|
||||
aliases:
|
||||
- /resources/tools/postgres-list-tablespaces
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `postgres-list-tablespaces` tool lists available tablespaces in the database. It's compatible with any of the following sources:
|
||||
|
||||
- [alloydb-postgres](../../sources/alloydb-pg.md)
|
||||
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-tablespaces` lists detailed information as JSON for tablespaces. The tool takes the following input parameters:
|
||||
|
||||
- `tablespace_name` (optional): A text to filter results by tablespace name. Default: `""`
|
||||
- `limit` (optional): The maximum number of tablespaces to return. Default: `50`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: postgres-source
|
||||
description: |
|
||||
Lists all tablespaces in the database. Returns the tablespace name,
|
||||
owner name, size in bytes(if the current user has CREATE privileges on
|
||||
the tablespace, otherwise NULL), internal object ID, the access control
|
||||
list regarding permissions, and any specific tablespace options.
|
||||
```
|
||||
The response is a json array with the following elements:
|
||||
|
||||
```json
|
||||
{
|
||||
"tablespace_name": "name of the tablespace",
|
||||
"owner_username": "owner of the tablespace",
|
||||
"size_in_bytes": "size in bytes if the current user has CREATE privileges on the tablespace, otherwise NULL",
|
||||
"oid": "Object ID of the tablespace",
|
||||
"spcacl": "Access privileges",
|
||||
"spcoptions": "Tablespace-level options (e.g., seq_page_cost, random_page_cost)"
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:-------------:|------------------------------------------------------|
|
||||
| kind | string | true | Must be "postgres-list-tablespaces". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | false | Description of the tool that is passed to the agent. |
|
||||
@@ -19,11 +19,11 @@ a Postgres database, excluding those in system schemas (`pg_catalog`,
|
||||
- [postgres](../../sources/postgres.md)
|
||||
|
||||
`postgres-list-views` lists detailed view information (schemaname, viewname,
|
||||
ownername) as JSON for views in a database. The tool takes the following input
|
||||
ownername, definition) as JSON for views in a database. The tool takes the following input
|
||||
parameters:
|
||||
|
||||
- `viewname` (optional): A string pattern to filter view names. The search uses
|
||||
SQL LIKE operator to filter the views. Default: `""`
|
||||
- `view_name` (optional): A string pattern to filter view names. Default: `""`
|
||||
- `schema_name` (optional): A string pattern to filter schema names. Default: `""`
|
||||
- `limit` (optional): The maximum number of rows to return. Default: `50`.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -9,3 +9,5 @@ description: >
|
||||
- [serverless-spark-get-batch](./serverless-spark-get-batch.md)
|
||||
- [serverless-spark-list-batches](./serverless-spark-list-batches.md)
|
||||
- [serverless-spark-cancel-batch](./serverless-spark-cancel-batch.md)
|
||||
- [serverless-spark-create-pyspark-batch](./serverless-spark-create-pyspark-batch.md)
|
||||
- [serverless-spark-create-spark-batch](./serverless-spark-create-spark-batch.md)
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
---
|
||||
title: "serverless-spark-create-pyspark-batch"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
A "serverless-spark-create-pyspark-batch" tool submits a Spark batch to run asynchronously.
|
||||
aliases:
|
||||
- /resources/tools/serverless-spark-create-pyspark-batch
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `serverless-spark-create-pyspark-batch` tool submits a Spark batch to a Google
|
||||
Cloud Serverless for Apache Spark source. The workload executes asynchronously
|
||||
and takes around a minute to begin executing; status can be polled using the
|
||||
[get batch](serverless-spark-get-batch.md) tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [serverless-spark](../../sources/serverless-spark.md)
|
||||
|
||||
`serverless-spark-create-pyspark-batch` accepts the following parameters:
|
||||
|
||||
- **`mainFile`**: The path to the main Python file, as a gs://... URI.
|
||||
- **`args`** Optional. A list of arguments passed to the main file.
|
||||
- **`version`** Optional. The Serverless [runtime
|
||||
version](https://docs.cloud.google.com/dataproc-serverless/docs/concepts/versions/dataproc-serverless-versions)
|
||||
to execute with.
|
||||
|
||||
## Custom Configuration
|
||||
|
||||
This tool supports custom
|
||||
[`runtimeConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig)
|
||||
and
|
||||
[`environmentConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig)
|
||||
settings, which can be specified in a `tools.yaml` file. These configurations
|
||||
are parsed as YAML and passed to the Dataproc API.
|
||||
|
||||
**Note:** If your project requires custom runtime or environment configuration,
|
||||
you must write a custom `tools.yaml`, you cannot use the `serverless-spark`
|
||||
prebuilt config.
|
||||
|
||||
### Example `tools.yaml`
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
- name: "serverless-spark-create-pyspark-batch"
|
||||
kind: "serverless-spark-create-pyspark-batch"
|
||||
source: "my-serverless-spark-source"
|
||||
runtimeConfig:
|
||||
properties:
|
||||
spark.driver.memory: "1024m"
|
||||
environmentConfig:
|
||||
executionConfig:
|
||||
networkUri: "my-network"
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
The response contains the
|
||||
[operation](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.operations#resource:-operation)
|
||||
metadata JSON object corresponding to [batch operation
|
||||
metadata](https://pkg.go.dev/cloud.google.com/go/dataproc/v2/apiv1/dataprocpb#BatchOperationMetadata),
|
||||
plus additional fields `consoleUrl` and `logsUrl` where a human can go for more
|
||||
detailed information.
|
||||
|
||||
```json
|
||||
{
|
||||
"opMetadata": {
|
||||
"batch": "projects/myproject/locations/us-central1/batches/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"batchUuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"createTime": "2025-11-19T16:36:47.607119Z",
|
||||
"description": "Batch",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"operationType": "BATCH",
|
||||
"warnings": [
|
||||
"No runtime version specified. Using the default runtime version."
|
||||
]
|
||||
},
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/...",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?..."
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark-create-pyspark-batch". |
|
||||
| source | string | true | Name of the source the tool should use. |
|
||||
| description | string | false | Description of the tool that is passed to the LLM. |
|
||||
| runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. |
|
||||
| environmentConfig | map | false | [Environment config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig) for all batches created with this tool. |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool. |
|
||||
@@ -0,0 +1,102 @@
|
||||
---
|
||||
title: "serverless-spark-create-spark-batch"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
A "serverless-spark-create-spark-batch" tool submits a Spark batch to run asynchronously.
|
||||
aliases:
|
||||
- /resources/tools/serverless-spark-create-spark-batch
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `serverless-spark-create-spark-batch` tool submits a Java Spark batch to a
|
||||
Google Cloud Serverless for Apache Spark source. The workload executes
|
||||
asynchronously and takes around a minute to begin executing; status can be
|
||||
polled using the [get batch](serverless-spark-get-batch.md) tool.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [serverless-spark](../../sources/serverless-spark.md)
|
||||
|
||||
`serverless-spark-create-spark-batch` accepts the following parameters:
|
||||
|
||||
- **`mainJarFile`**: Optional. The gs:// URI of the jar file that contains the
|
||||
main class. Exactly one of mainJarFile or mainClass must be specified.
|
||||
- **`mainClass`**: Optional. The name of the driver's main class. Exactly one of
|
||||
mainJarFile or mainClass must be specified.
|
||||
- **`jarFiles`**: Optional. A list of gs:// URIs of jar files to add to the CLASSPATHs of
|
||||
the Spark driver and tasks.
|
||||
- **`args`** Optional. A list of arguments passed to the driver.
|
||||
- **`version`** Optional. The Serverless [runtime
|
||||
version](https://docs.cloud.google.com/dataproc-serverless/docs/concepts/versions/dataproc-serverless-versions)
|
||||
to execute with.
|
||||
|
||||
## Custom Configuration
|
||||
|
||||
This tool supports custom
|
||||
[`runtimeConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig)
|
||||
and
|
||||
[`environmentConfig`](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig)
|
||||
settings, which can be specified in a `tools.yaml` file. These configurations
|
||||
are parsed as YAML and passed to the Dataproc API.
|
||||
|
||||
**Note:** If your project requires custom runtime or environment configuration,
|
||||
you must write a custom `tools.yaml`, you cannot use the `serverless-spark`
|
||||
prebuilt config.
|
||||
|
||||
### Example `tools.yaml`
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
- name: "serverless-spark-create-spark-batch"
|
||||
kind: "serverless-spark-create-spark-batch"
|
||||
source: "my-serverless-spark-source"
|
||||
runtimeConfig:
|
||||
properties:
|
||||
spark.driver.memory: "1024m"
|
||||
environmentConfig:
|
||||
executionConfig:
|
||||
networkUri: "my-network"
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
The response contains the
|
||||
[operation](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.operations#resource:-operation)
|
||||
metadata JSON object corresponding to [batch operation
|
||||
metadata](https://pkg.go.dev/cloud.google.com/go/dataproc/v2/apiv1/dataprocpb#BatchOperationMetadata),
|
||||
plus additional fields `consoleUrl` and `logsUrl` where a human can go for more
|
||||
detailed information.
|
||||
|
||||
```json
|
||||
{
|
||||
"opMetadata": {
|
||||
"batch": "projects/myproject/locations/us-central1/batches/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"batchUuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"createTime": "2025-11-19T16:36:47.607119Z",
|
||||
"description": "Batch",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"operationType": "BATCH",
|
||||
"warnings": [
|
||||
"No runtime version specified. Using the default runtime version."
|
||||
]
|
||||
},
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/...",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?..."
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ----------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "serverless-spark-create-spark-batch". |
|
||||
| source | string | true | Name of the source the tool should use. |
|
||||
| description | string | false | Description of the tool that is passed to the LLM. |
|
||||
| runtimeConfig | map | false | [Runtime config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/RuntimeConfig) for all batches created with this tool. |
|
||||
| environmentConfig | map | false | [Environment config](https://docs.cloud.google.com/dataproc-serverless/docs/reference/rest/v1/EnvironmentConfig) for all batches created with this tool. |
|
||||
| authRequired | string[] | false | List of auth services required to invoke this tool. |
|
||||
@@ -34,43 +34,50 @@ tools:
|
||||
|
||||
## Response Format
|
||||
|
||||
The response is a full Batch JSON object as defined in the [API
|
||||
spec](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#Batch).
|
||||
Example with a reduced set of fields:
|
||||
The response contains the full Batch object as defined in the [API
|
||||
spec](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#Batch),
|
||||
plus additional fields `consoleUrl` and `logsUrl` where a human can go for more
|
||||
detailed information.
|
||||
|
||||
```json
|
||||
{
|
||||
"createTime": "2025-10-10T15:15:21.303146Z",
|
||||
"creator": "alice@example.com",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
"name": "projects/google.com:hadoop-cloud-dev/locations/us-central1/batches/alice-20251010-abcd",
|
||||
"operation": "projects/google.com:hadoop-cloud-dev/regions/us-central1/operations/11111111-2222-3333-4444-555555555555",
|
||||
"runtimeConfig": {
|
||||
"properties": {
|
||||
"spark:spark.driver.cores": "4",
|
||||
"spark:spark.driver.memory": "12200m"
|
||||
}
|
||||
},
|
||||
"sparkBatch": {
|
||||
"jarFileUris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
|
||||
"mainClass": "org.apache.spark.examples.SparkPi"
|
||||
},
|
||||
"state": "SUCCEEDED",
|
||||
"stateHistory": [
|
||||
{
|
||||
"state": "PENDING",
|
||||
"stateStartTime": "2025-10-10T15:15:21.303146Z"
|
||||
"batch": {
|
||||
"createTime": "2025-10-10T15:15:21.303146Z",
|
||||
"creator": "alice@example.com",
|
||||
"labels": {
|
||||
"goog-dataproc-batch-uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"goog-dataproc-location": "us-central1"
|
||||
},
|
||||
{
|
||||
"state": "RUNNING",
|
||||
"stateStartTime": "2025-10-10T15:16:41.291747Z"
|
||||
}
|
||||
],
|
||||
"stateTime": "2025-10-10T15:17:21.265493Z",
|
||||
"uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
|
||||
"name": "projects/google.com:hadoop-cloud-dev/locations/us-central1/batches/alice-20251010-abcd",
|
||||
"operation": "projects/google.com:hadoop-cloud-dev/regions/us-central1/operations/11111111-2222-3333-4444-555555555555",
|
||||
"runtimeConfig": {
|
||||
"properties": {
|
||||
"spark:spark.driver.cores": "4",
|
||||
"spark:spark.driver.memory": "12200m"
|
||||
}
|
||||
},
|
||||
"sparkBatch": {
|
||||
"jarFileUris": [
|
||||
"file:///usr/lib/spark/examples/jars/spark-examples.jar"
|
||||
],
|
||||
"mainClass": "org.apache.spark.examples.SparkPi"
|
||||
},
|
||||
"state": "SUCCEEDED",
|
||||
"stateHistory": [
|
||||
{
|
||||
"state": "PENDING",
|
||||
"stateStartTime": "2025-10-10T15:15:21.303146Z"
|
||||
},
|
||||
{
|
||||
"state": "RUNNING",
|
||||
"stateStartTime": "2025-10-10T15:16:41.291747Z"
|
||||
}
|
||||
],
|
||||
"stateTime": "2025-10-10T15:17:21.265493Z",
|
||||
"uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
|
||||
},
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/...",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?..."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -50,14 +50,18 @@ tools:
|
||||
"uuid": "a1b2c3d4-e5f6-7890-1234-567890abcdef",
|
||||
"state": "SUCCEEDED",
|
||||
"creator": "alice@example.com",
|
||||
"createTime": "2023-10-27T10:00:00Z"
|
||||
"createTime": "2023-10-27T10:00:00Z",
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/us-central1/batch-abc-123/summary?project=my-project",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?advancedFilter=resource.type%3D%22cloud_dataproc_batch%22%0Aresource.labels.project_id%3D%22my-project%22%0Aresource.labels.location%3D%22us-central1%22%0Aresource.labels.batch_id%3D%22batch-abc-123%22%0Atimestamp%3E%3D%222023-10-27T09%3A59%3A00Z%22%0Atimestamp%3C%3D%222023-10-27T10%3A10%3A00Z%22&project=my-project&resource=cloud_dataproc_batch%2Fbatch_id%2Fbatch-abc-123"
|
||||
},
|
||||
{
|
||||
"name": "projects/my-project/locations/us-central1/batches/batch-def-456",
|
||||
"uuid": "b2c3d4e5-f6a7-8901-2345-678901bcdefa",
|
||||
"state": "FAILED",
|
||||
"creator": "alice@example.com",
|
||||
"createTime": "2023-10-27T11:30:00Z"
|
||||
"createTime": "2023-10-27T11:30:00Z",
|
||||
"consoleUrl": "https://console.cloud.google.com/dataproc/batches/us-central1/batch-def-456/summary?project=my-project",
|
||||
"logsUrl": "https://console.cloud.google.com/logs/viewer?advancedFilter=resource.type%3D%22cloud_dataproc_batch%22%0Aresource.labels.project_id%3D%22my-project%22%0Aresource.labels.location%3D%22us-central1%22%0Aresource.labels.batch_id%3D%22batch-def-456%22%0Atimestamp%3E%3D%222023-10-27T11%3A29%3A00Z%22%0Atimestamp%3C%3D%222023-10-27T11%3A40%3A00Z%22&project=my-project&resource=cloud_dataproc_batch%2Fbatch_id%2Fbatch-def-456"
|
||||
}
|
||||
],
|
||||
"nextPageToken": "abcd1234"
|
||||
|
||||
@@ -771,7 +771,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"version = \"0.23.0\" # x-release-please-version\n",
|
||||
"! curl -L -o /content/toolbox https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -123,7 +123,7 @@ In this section, we will download and install the Toolbox binary.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
export VERSION="0.21.0"
|
||||
export VERSION="0.23.0"
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.21.0\" # x-release-please-version\n",
|
||||
"version = \"0.23.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ In this section, we will download Toolbox and run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.23.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mcp-toolbox-for-databases",
|
||||
"version": "0.21.0",
|
||||
"version": "0.23.0",
|
||||
"description": "MCP Toolbox for Databases is an open-source MCP server for more than 30 different datasources.",
|
||||
"contextFileName": "MCP-TOOLBOX-EXTENSION.md"
|
||||
}
|
||||
4
go.mod
4
go.mod
@@ -2,7 +2,7 @@ module github.com/googleapis/genai-toolbox
|
||||
|
||||
go 1.24.7
|
||||
|
||||
toolchain go1.25.3
|
||||
toolchain go1.25.5
|
||||
|
||||
require (
|
||||
cloud.google.com/go/alloydbconn v1.15.5
|
||||
@@ -37,7 +37,7 @@ require (
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.18
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user