Merge branch 'main' into sdk-docs-migrate

This commit is contained in:
Anmol Shukla
2026-01-07 14:11:20 +05:30
committed by GitHub
406 changed files with 16054 additions and 11553 deletions

View File

@@ -305,4 +305,4 @@ substitutions:
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
_AR_REPO_NAME: toolbox-dev
_BUCKET_NAME: genai-toolbox-dev
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox

View File

@@ -212,6 +212,26 @@ steps:
bigquery \
bigquery
- id: "cloud-gda"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLOUD_GDA_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud Gemini Data Analytics" \
cloudgda \
cloudgda
- id: "dataplex"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -318,7 +338,7 @@ steps:
.ci/test_with_coverage.sh \
"Spanner" \
spanner \
spanner
spanner || echo "Integration tests failed." # ignore test failures
- id: "neo4j"
name: golang:1
@@ -826,8 +846,8 @@ steps:
cassandra
- id: "oracle"
name: golang:1
waitFor: ["compile-test-binary"]
name: ghcr.io/oracle/oraclelinux9-instantclient:23
waitFor: ["install-dependencies"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
@@ -840,10 +860,25 @@ steps:
args:
- -c
- |
.ci/test_with_coverage.sh \
"Oracle" \
oracle \
oracle
# Install the C compiler and Oracle SDK headers needed for cgo
dnf install -y gcc oracle-instantclient-devel
# Install Go
curl -L -o go.tar.gz "https://go.dev/dl/go1.25.1.linux-amd64.tar.gz"
tar -C /usr/local -xzf go.tar.gz
export PATH="/usr/local/go/bin:$$PATH"
go test -v ./internal/sources/oracle/... \
-coverprofile=oracle_coverage.out \
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
# Coverage check
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
echo "Oracle total coverage: $total_coverage"
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
echo "Coverage failure: $total_coverage is below 20%."
exit 1
fi
- id: "serverless-spark"
name: golang:1

View File

@@ -24,5 +24,23 @@
],
pinDigests: true,
},
{
groupName: 'Go',
matchManagers: [
'gomod',
],
},
{
groupName: 'Node',
matchManagers: [
'npm',
],
},
{
groupName: 'Pip',
matchManagers: [
'pip_requirements',
],
},
],
}

View File

@@ -40,7 +40,7 @@ jobs:
group: docs-deployment
cancel-in-progress: false
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
@@ -51,12 +51,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -30,14 +30,14 @@ jobs:
steps:
- name: Checkout main branch (for latest templates and theme)
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: 'main'
submodules: 'recursive'
fetch-depth: 0
- name: Checkout old content from tag into a temporary directory
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: ${{ github.event.inputs.version_tag }}
path: 'old_version_source' # Checkout into a temp subdir
@@ -57,7 +57,7 @@ jobs:
with:
hugo-version: "0.145.0"
extended: true
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"

View File

@@ -30,7 +30,7 @@ jobs:
cancel-in-progress: false
steps:
- name: Checkout Code at Tag
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: ${{ github.event.release.tag_name }}
@@ -44,7 +44,7 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"

View File

@@ -34,7 +34,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: versioned-gh-pages

View File

@@ -49,7 +49,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
@@ -62,12 +62,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -0,0 +1,59 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Link Checker
on:
pull_request:
jobs:
link-check:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- name: Restore lychee cache
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
restore-keys: cache-lychee-
- name: Link Checker
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
with:
args: >
--verbose
--no-progress
--cache
--max-cache-age 1d
README.md
docs/
output: /tmp/foo.txt
fail: true
jobSummary: true
debug: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# This step only runs if the 'lychee_check' step fails, ensuring the
# context note only appears when the developer needs to troubleshoot.
- name: Display Link Context Note on Failure
if: ${{ failure() }}
run: |
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY

View File

@@ -55,7 +55,7 @@ jobs:
with:
go-version: "1.25"
- name: Checkout code
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
@@ -66,7 +66,7 @@ jobs:
run: |
go mod tidy && git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0
with:
version: latest
args: --timeout 10m

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- name: Wait for image in Artifact Registry
shell: bash

View File

@@ -29,7 +29,7 @@ jobs:
issues: 'write'
pull-requests: 'write'
steps:
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -62,7 +62,7 @@ jobs:
go-version: "1.24"
- name: Checkout code
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

View File

@@ -51,6 +51,14 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown
[[params.versions]]
version = "v0.24.0"
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
[[params.versions]]
version = "v0.23.0"
url = "https://googleapis.github.io/genai-toolbox/v0.23.0/"
[[params.versions]]
version = "v0.22.0"
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"

45
.lycheeignore Normal file
View File

@@ -0,0 +1,45 @@
# Ignore documentation placeholders and generic example domains
^https?://([a-zA-Z0-9-]+\.)?example\.com(:\d+)?(/.*)?$
^http://example\.net
# Shields.io badges often trigger rate limits or intermittent 503s
^https://img\.shields\.io/.*
# PDF files are ignored as lychee cannot reliably parse internal PDF links
\.pdf$
# Standard mailto: protocol is not a web URL
^mailto:
# Ignore local development endpoints that won't resolve in CI/CD environments
^https?://(127\.0\.0\.1|localhost)(:\d+)?(/.*)?$
# Placeholder for Google Cloud Run service discovery
https://cloud-run-url.app/
# DGraph Cloud and private instance endpoints
https://xxx.cloud.dgraph.io/
https://cloud.dgraph.io/login
https://dgraph.io/docs
# MySQL Community downloads and main site (often protected by bot mitigation)
https://dev.mysql.com/downloads/installer/
https://www.mysql.com/
# Claude desktop download link
https://claude.ai/download
# Google Cloud Run product page
https://cloud.google.com/run
# These specific deep links are known to cause redirect loops or 403s in automated scrapers
https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
https://www.npmjs.com/package/@toolbox-sdk/core
https://www.npmjs.com/package/@toolbox-sdk/adk
# Ignore social media and blog profiles to reduce external request overhead
https://medium.com/@mcp_toolbox

View File

@@ -1,5 +1,54 @@
# Changelog
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
### Features
* **sources/cloud-gemini-data-analytics:** Add the Gemini Data Analytics (GDA) integration for DB NL2SQL conversion to Toolbox ([#2181](https://github.com/googleapis/genai-toolbox/issues/2181)) ([aa270b2](https://github.com/googleapis/genai-toolbox/commit/aa270b2630da2e3d618db804ca95550445367dbc))
* **source/cloudsqlmysql:** Add support for IAM authentication in Cloud SQL MySQL source ([#2050](https://github.com/googleapis/genai-toolbox/issues/2050)) ([af3d3c5](https://github.com/googleapis/genai-toolbox/commit/af3d3c52044bea17781b89ce4ab71ff0f874ac20))
* **sources/oracle:** Add Oracle OCI and Wallet support ([#1945](https://github.com/googleapis/genai-toolbox/issues/1945)) ([8ea39ec](https://github.com/googleapis/genai-toolbox/commit/8ea39ec32fbbaa97939c626fec8c5d86040ed464))
* Support combining prebuilt and custom tool configurations ([#2188](https://github.com/googleapis/genai-toolbox/issues/2188)) ([5788605](https://github.com/googleapis/genai-toolbox/commit/57886058188aa5d2a51d5846a98bc6d8a650edd1))
* **tools/mysql-get-query-plan:** Add new `mysql-get-query-plan` tool for MySQL source ([#2123](https://github.com/googleapis/genai-toolbox/issues/2123)) ([0641da0](https://github.com/googleapis/genai-toolbox/commit/0641da0353857317113b2169e547ca69603ddfde))
### Bug Fixes
* **spanner:** Move list graphs validation to runtime ([#2154](https://github.com/googleapis/genai-toolbox/issues/2154)) ([914b3ee](https://github.com/googleapis/genai-toolbox/commit/914b3eefda40a650efe552d245369e007277dab5))
## [0.23.0](https://github.com/googleapis/genai-toolbox/compare/v0.22.0...v0.23.0) (2025-12-11)
### ⚠ BREAKING CHANGES
* **serverless-spark:** add URLs to create batch tool outputs
* **serverless-spark:** add URLs to list_batches output
* **serverless-spark:** add Cloud Console and Logging URLs to get_batch
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033))
### Features
* **tools/postgres:** Add list-table-stats-tool to list table statistics. ([#2055](https://github.com/googleapis/genai-toolbox/issues/2055)) ([78b02f0](https://github.com/googleapis/genai-toolbox/commit/78b02f08c3cc3062943bb2f91cf60d5149c8d28d))
* **looker/tools:** Enhance dashboard creation with dashboard filters ([#2133](https://github.com/googleapis/genai-toolbox/issues/2133)) ([285aa46](https://github.com/googleapis/genai-toolbox/commit/285aa46b887d9acb2da8766e107bbf1ab75b8812))
* **serverless-spark:** Add Cloud Console and Logging URLs to get_batch ([e29c061](https://github.com/googleapis/genai-toolbox/commit/e29c0616d6b9ecda2badcaf7b69614e511ac031b))
* **serverless-spark:** Add URLs to create batch tool outputs ([c6ccf4b](https://github.com/googleapis/genai-toolbox/commit/c6ccf4bd87026484143a2d0f5527b2edab03b54a))
* **serverless-spark:** Add URLs to list_batches output ([5605eab](https://github.com/googleapis/genai-toolbox/commit/5605eabd696696ade07f52431a28ef65c0fb1f77))
* **sources/mariadb:** Add MariaDB source and MySQL tools integration ([#1908](https://github.com/googleapis/genai-toolbox/issues/1908)) ([3b40fea](https://github.com/googleapis/genai-toolbox/commit/3b40fea25edae607e02c1e8fc2b0c957fa2c8e9a))
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033)) ([489117d](https://github.com/googleapis/genai-toolbox/commit/489117d74711ac9260e7547163ca463eb45eeaa2))
* **tools/postgres:** Add list_pg_settings, list_database_stats tools for postgres ([#2030](https://github.com/googleapis/genai-toolbox/issues/2030)) ([32367a4](https://github.com/googleapis/genai-toolbox/commit/32367a472fae9653fed7f126428eba0252978bd5))
* **tools/postgres:** Add new postgres-list-roles tool ([#2038](https://github.com/googleapis/genai-toolbox/issues/2038)) ([bea9705](https://github.com/googleapis/genai-toolbox/commit/bea97054502cfa236aa10e2ebc8ff58eb00ad035))
### Bug Fixes
* List tables tools null fix ([#2107](https://github.com/googleapis/genai-toolbox/issues/2107)) ([2b45266](https://github.com/googleapis/genai-toolbox/commit/2b452665983154041d4cd0ed7d82532e4af682eb))
* **tools/mongodb:** Removed sortPayload and sortParams ([#1238](https://github.com/googleapis/genai-toolbox/issues/1238)) ([c5a6daa](https://github.com/googleapis/genai-toolbox/commit/c5a6daa7683d2f9be654300d977692c368e55e31))
### Miscellaneous Chores
* **looker:** Upgrade to latest go sdk ([#2159](https://github.com/googleapis/genai-toolbox/issues/2159)) ([78e015d](https://github.com/googleapis/genai-toolbox/commit/78e015d7dfd9cce7e2b444ed934da17eb355bc86))
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)

View File

@@ -167,15 +167,15 @@ tools.
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
[tool-get]:
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L31
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L41
[tool-call]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L79>
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L229
[mcp-call]:
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L554
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L789
[execute-sql]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L431>
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L609
[temp-param]:
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L297>
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L454
[temp-param-doc]:
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters

View File

@@ -207,6 +207,30 @@ variables for each source.
* SQLite - setup in the integration test, where we create a temporary database
file
### Link Checking and Fixing with Lychee
We use **[lychee](https://github.com/lycheeverse/lychee-action)** for repository link checks.
* To run the checker **locally**, see the [command-line usage guide](https://github.com/lycheeverse/lychee?tab=readme-ov-file#commandline-usage).
#### Fixing Broken Links
1. **Update the Link:** Correct the broken URL or update the content where it is used.
2. **Ignore the Link:** If you can't fix the link (e.g., due to **external rate-limits** or if it's a **local-only URL**), tell Lychee to **ignore** it.
* List **regular expressions** or **direct links** in the **[.lycheeignore](https://github.com/googleapis/genai-toolbox/blob/main/.lycheeignore)** file, one entry per line.
* **Always add a comment** explaining **why** the link is being skipped to prevent link rot. **Example `.lycheeignore`:**
```text
# These are email addresses, not standard web URLs, and usually cause check failures.
^mailto:.*
```
> [!NOTE]
> To avoid build failures in GitHub Actions, follow the linking pattern demonstrated here: <br>
> **Avoid:** (Works in Hugo, breaks Link Checker): `[Read more](docs/setup)` or `[Read more](docs/setup/)` <br>
> **Reason:** The link checker cannot find a file named "setup" or a directory with that name containing an index. <br>
> **Preferred:** `[Read more](docs/setup.md)` <br>
> **Reason:** The GitHub Action finds the physical file. Hugo then uses its internal logic (or render hooks) to resolve this to the correct `/docs/setup/` web URL. <br>
### Other GitHub Checks
* License header check (`.github/header-checker-lint.yml`) - Ensures files have
@@ -280,6 +304,7 @@ There are 3 GHA workflows we use to achieve document versioning:
Request a repo owner to run the preview deployment workflow on your PR. A
preview link will be automatically added as a comment to your PR.
#### Maintainers
1. **Inspect Changes:** Review the proposed changes in the PR to ensure they are

View File

@@ -105,6 +105,21 @@ redeploying your application.
## Getting Started
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](#configuration):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
This runs the latest version of the toolbox server with your configuration file.
> [!NOTE]
> This method should only be used for non-production use cases such as
> experimentation. For any production use-cases, please consider [Installing the
> server](#installing-the-server) and then [running it](#running-the-server).
### Installing the server
For the latest version, check the [releases page][releases] and use the
@@ -125,7 +140,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.22.0
> export VERSION=0.24.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
> chmod +x toolbox
> ```
@@ -138,7 +153,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.22.0
> export VERSION=0.24.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
> chmod +x toolbox
> ```
@@ -151,7 +166,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.22.0
> export VERSION=0.24.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
> chmod +x toolbox
> ```
@@ -164,7 +179,7 @@ To install Toolbox as a binary:
>
> ```cmd
> :: see releases page for other versions
> set VERSION=0.22.0
> set VERSION=0.24.0
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
> ```
>
@@ -176,7 +191,7 @@ To install Toolbox as a binary:
>
> ```powershell
> # see releases page for other versions
> $VERSION = "0.21.0"
> $VERSION = "0.24.0"
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
> ```
>
@@ -189,7 +204,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.22.0
export VERSION=0.24.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -213,7 +228,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.22.0
go install github.com/googleapis/genai-toolbox@v0.24.0
```
<!-- {x-release-please-end} -->
@@ -303,6 +318,16 @@ toolbox --tools-file "tools.yaml"
</details>
<details>
<summary>NPM</summary>
To run Toolbox directly without manually downloading the binary (requires Node.js):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
</details>
<details>
<summary>Gemini CLI</summary>
@@ -1010,12 +1035,12 @@ The version will be incremented as follows:
### Post-1.0.0 Versioning
Once the project reaches a stable `1.0.0` release, the versioning will follow
the more common convention:
Once the project reaches a stable `1.0.0` release, the version number
**`MAJOR.MINOR.PATCH`** will follow the more common convention:
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
- **`MAJOR`**: Incremented for incompatible API changes.
- **`MINOR`**: Incremented for new, backward-compatible functionality.
- **`PATCH`**: Incremented for backward-compatible bug fixes.
The public API that this applies to is the CLI associated with Toolbox, the
interactions with official SDKs, and the definitions in the `tools.yaml` file.

View File

@@ -33,6 +33,7 @@ import (
"github.com/fsnotify/fsnotify"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/googleapis/genai-toolbox/internal/prompts"
@@ -73,6 +74,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
@@ -120,6 +122,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
@@ -167,6 +170,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
@@ -194,8 +198,10 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
@@ -231,6 +237,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
@@ -351,12 +358,12 @@ func NewCommand(opts ...Option) *Command {
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
// deprecate tools_file
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder.")
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder.")
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files.")
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
@@ -364,7 +371,7 @@ func NewCommand(opts ...Option) *Command {
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: '%s'.",
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
@@ -380,12 +387,13 @@ func NewCommand(opts ...Option) *Command {
}
type ToolsFile struct {
Sources server.SourceConfigs `yaml:"sources"`
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
Sources server.SourceConfigs `yaml:"sources"`
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
}
// parseEnv replaces environment variables ${ENV_NAME} with their values.
@@ -434,11 +442,12 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
merged := ToolsFile{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
}
var conflicts []string
@@ -458,6 +467,9 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
if _, exists := merged.AuthSources[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
} else {
if merged.AuthSources == nil {
merged.AuthSources = make(server.AuthServiceConfigs)
}
merged.AuthSources[name] = authSource
}
}
@@ -471,6 +483,15 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
}
}
// Check for conflicts and merge embeddingModels
for name, em := range file.EmbeddingModels {
if _, exists := merged.EmbeddingModels[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
} else {
merged.EmbeddingModels[name] = em
}
}
// Check for conflicts and merge tools
for name, tool := range file.Tools {
if _, exists := merged.Tools[name]; exists {
@@ -575,14 +596,14 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
panic(err)
}
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
if err != nil {
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
logger.WarnContext(ctx, errMsg.Error())
return err
}
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
return nil
}
@@ -590,7 +611,7 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
func validateReloadEdits(
ctx context.Context, toolsFile ToolsFile,
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
) {
logger, err := util.LoggerFromContext(ctx)
if err != nil {
@@ -608,22 +629,23 @@ func validateReloadEdits(
defer span.End()
reloadedConfig := server.ServerConfig{
Version: versionString,
SourceConfigs: toolsFile.Sources,
AuthServiceConfigs: toolsFile.AuthServices,
ToolConfigs: toolsFile.Tools,
ToolsetConfigs: toolsFile.Toolsets,
PromptConfigs: toolsFile.Prompts,
Version: versionString,
SourceConfigs: toolsFile.Sources,
AuthServiceConfigs: toolsFile.AuthServices,
EmbeddingModelConfigs: toolsFile.EmbeddingModels,
ToolConfigs: toolsFile.Tools,
ToolsetConfigs: toolsFile.Toolsets,
PromptConfigs: toolsFile.Prompts,
}
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
if err != nil {
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
logger.WarnContext(ctx, errMsg.Error())
return nil, nil, nil, nil, nil, nil, err
return nil, nil, nil, nil, nil, nil, nil, err
}
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
}
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
@@ -834,16 +856,10 @@ func run(cmd *Command) error {
}
}()
var toolsFile ToolsFile
var allToolsFiles []ToolsFile
// Load Prebuilt Configuration
if cmd.prebuiltConfig != "" {
// Make sure --prebuilt and --tools-file/--tools-files/--tools-folder flags are mutually exclusive
if cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != "" {
errMsg := fmt.Errorf("--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Use prebuilt tools
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
@@ -854,72 +870,96 @@ func run(cmd *Command) error {
// Append prebuilt.source to Version string for the User Agent
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
toolsFile, err = parseToolsFile(ctx, buf)
parsed, err := parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
} else if len(cmd.tools_files) > 0 {
// Make sure --tools-file, --tools-files, and --tools-folder flags are mutually exclusive
if cmd.tools_file != "" || cmd.tools_folder != "" {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Use multiple tools files
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
var err error
toolsFile, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
} else if cmd.tools_folder != "" {
// Make sure --tools-folder and other flags are mutually exclusive
if cmd.tools_file != "" || len(cmd.tools_files) > 0 {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Use tools folder
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
var err error
toolsFile, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
} else {
// Set default value of tools-file flag to tools.yaml
if cmd.tools_file == "" {
cmd.tools_file = "tools.yaml"
}
// Read single tool file contents
buf, err := os.ReadFile(cmd.tools_file)
if err != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
toolsFile, err = parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
allToolsFiles = append(allToolsFiles, parsed)
}
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs, cmd.cfg.PromptConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts
// Determine if Custom Files should be loaded
// Check for explicit custom flags
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
authSourceConfigs := toolsFile.AuthSources
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
if useDefaultToolsFile {
cmd.tools_file = "tools.yaml"
isCustomConfigured = true
}
// Load Custom Configurations
if isCustomConfigured {
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
var customTools ToolsFile
var err error
if len(cmd.tools_files) > 0 {
// Use tools-files
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
} else if cmd.tools_folder != "" {
// Use tools-folder
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
} else {
// Use single file (tools-file or default `tools.yaml`)
buf, readFileErr := os.ReadFile(cmd.tools_file)
if readFileErr != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
customTools, err = parseToolsFile(ctx, buf)
if err != nil {
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
}
}
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
allToolsFiles = append(allToolsFiles, customTools)
}
// Merge Everything
// This will error if custom tools collide with prebuilt tools
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
cmd.cfg.SourceConfigs = finalToolsFile.Sources
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
cmd.cfg.ToolConfigs = finalToolsFile.Tools
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
authSourceConfigs := finalToolsFile.AuthSources
if authSourceConfigs != nil {
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
cmd.cfg.AuthServiceConfigs = authSourceConfigs
for k, v := range authSourceConfigs {
if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists {
errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
cmd.cfg.AuthServiceConfigs[k] = v
}
}
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
@@ -970,9 +1010,8 @@ func run(cmd *Command) error {
}()
}
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
if !cmd.cfg.DisableReload {
if isCustomConfigured && !cmd.cfg.DisableReload {
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
// start watching the file(s) or folder for changes to trigger dynamic reloading
go watchChanges(ctx, watchDirs, watchedFiles, s)
}

View File

@@ -32,6 +32,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/auth/google"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels/gemini"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/googleapis/genai-toolbox/internal/prompts"
@@ -92,6 +93,21 @@ func invokeCommand(args []string) (*Command, string, error) {
return c, buf.String(), err
}
// invokeCommandWithContext executes the command with a context and returns the captured output.
func invokeCommandWithContext(ctx context.Context, args []string) (*Command, string, error) {
// Capture output using a buffer
buf := new(bytes.Buffer)
c := NewCommand(WithStreams(buf, buf))
c.SetArgs(args)
c.SilenceUsage = true
c.SilenceErrors = true
c.SetContext(ctx)
err := c.Execute()
return c, buf.String(), err
}
func TestVersion(t *testing.T) {
data, err := os.ReadFile("version.txt")
if err != nil {
@@ -1488,7 +1504,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"alloydb_postgres_database_tools": tools.ToolsetConfig{
Name: "alloydb_postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
},
},
},
@@ -1518,7 +1534,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
},
},
},
@@ -1598,7 +1614,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"looker_tools": tools.ToolsetConfig{
Name: "looker_tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
},
},
},
@@ -1618,7 +1634,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"postgres_database_tools": tools.ToolsetConfig{
Name: "postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
},
},
},
@@ -1755,11 +1771,6 @@ func TestMutuallyExclusiveFlags(t *testing.T) {
args []string
errString string
}{
{
desc: "--prebuilt and --tools-file",
args: []string{"--prebuilt", "alloydb", "--tools-file", "my.yaml"},
errString: "--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously",
},
{
desc: "--tools-file and --tools-files",
args: []string{"--tools-file", "my.yaml", "--tools-files", "a.yaml,b.yaml"},
@@ -1820,9 +1831,10 @@ func TestFileLoadingErrors(t *testing.T) {
func TestMergeToolsFiles(t *testing.T) {
file1 := ToolsFile{
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}},
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}},
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
}
file2 := ToolsFile{
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
@@ -1844,11 +1856,12 @@ func TestMergeToolsFiles(t *testing.T) {
name: "merge two distinct files",
files: []ToolsFile{file1, file2},
want: ToolsFile{
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}},
Prompts: server.PromptConfigs{},
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}},
Prompts: server.PromptConfigs{},
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
},
wantErr: false,
},
@@ -1861,22 +1874,24 @@ func TestMergeToolsFiles(t *testing.T) {
name: "merge single file",
files: []ToolsFile{file1},
want: ToolsFile{
Sources: file1.Sources,
AuthServices: make(server.AuthServiceConfigs),
Tools: file1.Tools,
Toolsets: file1.Toolsets,
Prompts: server.PromptConfigs{},
Sources: file1.Sources,
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
Tools: file1.Tools,
Toolsets: file1.Toolsets,
Prompts: server.PromptConfigs{},
},
},
{
name: "merge empty list",
files: []ToolsFile{},
want: ToolsFile{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: server.PromptConfigs{},
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: server.PromptConfigs{},
},
},
}
@@ -1902,3 +1917,228 @@ func TestMergeToolsFiles(t *testing.T) {
})
}
}
func TestPrebuiltAndCustomTools(t *testing.T) {
t.Setenv("SQLITE_DATABASE", "test.db")
// Setup custom tools file
customContent := `
tools:
custom_tool:
kind: http
source: my-http
method: GET
path: /
description: "A custom tool for testing"
sources:
my-http:
kind: http
baseUrl: http://example.com
`
customFile := filepath.Join(t.TempDir(), "custom.yaml")
if err := os.WriteFile(customFile, []byte(customContent), 0644); err != nil {
t.Fatal(err)
}
// Tool Conflict File
// SQLite prebuilt has a tool named 'list_tables'
toolConflictContent := `
tools:
list_tables:
kind: http
source: my-http
method: GET
path: /
description: "Conflicting tool"
sources:
my-http:
kind: http
baseUrl: http://example.com
`
toolConflictFile := filepath.Join(t.TempDir(), "tool_conflict.yaml")
if err := os.WriteFile(toolConflictFile, []byte(toolConflictContent), 0644); err != nil {
t.Fatal(err)
}
// Source Conflict File
// SQLite prebuilt has a source named 'sqlite-source'
sourceConflictContent := `
sources:
sqlite-source:
kind: http
baseUrl: http://example.com
tools:
dummy_tool:
kind: http
source: sqlite-source
method: GET
path: /
description: "Dummy"
`
sourceConflictFile := filepath.Join(t.TempDir(), "source_conflict.yaml")
if err := os.WriteFile(sourceConflictFile, []byte(sourceConflictContent), 0644); err != nil {
t.Fatal(err)
}
// Toolset Conflict File
// SQLite prebuilt has a toolset named 'sqlite_database_tools'
toolsetConflictContent := `
sources:
dummy-src:
kind: http
baseUrl: http://example.com
tools:
dummy_tool:
kind: http
source: dummy-src
method: GET
path: /
description: "Dummy"
toolsets:
sqlite_database_tools:
- dummy_tool
`
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
if err := os.WriteFile(toolsetConflictFile, []byte(toolsetConflictContent), 0644); err != nil {
t.Fatal(err)
}
//Legacy Auth File
authContent := `
authSources:
legacy-auth:
kind: google
clientId: "test-client-id"
`
authFile := filepath.Join(t.TempDir(), "auth.yaml")
if err := os.WriteFile(authFile, []byte(authContent), 0644); err != nil {
t.Fatal(err)
}
testCases := []struct {
desc string
args []string
wantErr bool
errString string
cfgCheck func(server.ServerConfig) error
}{
{
desc: "success mixed",
args: []string{"--prebuilt", "sqlite", "--tools-file", customFile},
wantErr: false,
cfgCheck: func(cfg server.ServerConfig) error {
if _, ok := cfg.ToolConfigs["custom_tool"]; !ok {
return fmt.Errorf("custom tool not found")
}
if _, ok := cfg.ToolConfigs["list_tables"]; !ok {
return fmt.Errorf("prebuilt tool 'list_tables' not found")
}
return nil
},
},
{
desc: "tool conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "source conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", sourceConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "toolset conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", toolsetConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "legacy auth additive",
args: []string{"--prebuilt", "sqlite", "--tools-file", authFile},
wantErr: false,
cfgCheck: func(cfg server.ServerConfig) error {
if _, ok := cfg.AuthServiceConfigs["legacy-auth"]; !ok {
return fmt.Errorf("legacy auth source not merged into auth services")
}
return nil
},
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
cmd, output, err := invokeCommandWithContext(ctx, tc.args)
if tc.wantErr {
if err == nil {
t.Fatalf("expected an error but got none")
}
if !strings.Contains(err.Error(), tc.errString) {
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
}
} else {
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
t.Fatalf("unexpected error: %v", err)
}
if !strings.Contains(output, "Server ready to serve!") {
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
}
if tc.cfgCheck != nil {
if err := tc.cfgCheck(cmd.cfg); err != nil {
t.Errorf("config check failed: %v", err)
}
}
}
})
}
}
func TestDefaultToolsFileBehavior(t *testing.T) {
t.Setenv("SQLITE_DATABASE", "test.db")
testCases := []struct {
desc string
args []string
expectRun bool
errString string
}{
{
desc: "no flags (defaults to tools.yaml)",
args: []string{},
expectRun: false,
errString: "tools.yaml", // Expect error because tools.yaml doesn't exist in test env
},
{
desc: "prebuilt only (skips tools.yaml)",
args: []string{"--prebuilt", "sqlite"},
expectRun: true,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
_, output, err := invokeCommandWithContext(ctx, tc.args)
if tc.expectRun {
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
t.Fatalf("expected server start, got error: %v", err)
}
// Verify it actually started
if !strings.Contains(output, "Server ready to serve!") {
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
}
} else {
if err == nil {
t.Fatalf("expected error reading default file, got nil")
}
if !strings.Contains(err.Error(), tc.errString) {
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
}
}
})
}
}

View File

@@ -1 +1 @@
0.22.0
0.24.0

View File

@@ -68,6 +68,7 @@ The BigQuery MCP server is configured using environment variables.
export BIGQUERY_PROJECT="<your-gcp-project-id>"
export BIGQUERY_LOCATION="<your-dataset-location>" # Optional
export BIGQUERY_USE_CLIENT_OAUTH="true" # Optional
export BIGQUERY_SCOPES="<comma-separated-scopes>" # Optional
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):

View File

@@ -11,11 +11,11 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
3. Click "View raw config" and update the `tools.yaml` path with the full absolute path to your file.
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.

View File

@@ -183,11 +183,11 @@ Protocol (OTLP). If you would like to use a collector, please refer to this
The following flags are used to determine Toolbox's telemetry configuration:
| **flag** | **type** | **description** |
|----------------------------|----------|------------------------------------------------------------------------------------------------------------------|
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
| **flag** | **type** | **description** |
|----------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "127.0.0.1:4318"). To pass an insecure endpoint here, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`. |
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
In addition to the flags noted above, you can also make additional configuration
for OpenTelemetry via the [General SDK Configuration][sdk-configuration] through
@@ -207,5 +207,5 @@ To enable Google Cloud Exporter:
To enable OTLP Exporter, provide Collector endpoint:
```bash
./toolbox --telemetry-otlp="http://127.0.0.1:4553"
./toolbox --telemetry-otlp="127.0.0.1:4553"
```

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.22.0\" # x-release-please-version\n",
"version = \"0.24.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -71,6 +71,22 @@ redeploying your application.
## Getting Started
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](../configure.md):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
This runs the latest version of the toolbox server with your configuration file.
{{< notice note >}}
This method should only be used for non-production use cases such as
experimentation. For any production use-cases, please consider [Installing the
server](#installing-the-server) and then [running it](#running-the-server).
{{< /notice >}}
### Installing the server
For the latest version, check the [releases page][releases] and use the
@@ -87,7 +103,7 @@ To install Toolbox as a binary on Linux (AMD64):
```sh
# see releases page for other versions
export VERSION=0.22.0
export VERSION=0.24.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -98,7 +114,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
```sh
# see releases page for other versions
export VERSION=0.22.0
export VERSION=0.24.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
chmod +x toolbox
```
@@ -109,7 +125,7 @@ To install Toolbox as a binary on macOS (Intel):
```sh
# see releases page for other versions
export VERSION=0.22.0
export VERSION=0.24.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
chmod +x toolbox
```
@@ -120,7 +136,7 @@ To install Toolbox as a binary on Windows (Command Prompt):
```cmd
:: see releases page for other versions
set VERSION=0.22.0
set VERSION=0.24.0
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
```
@@ -130,7 +146,7 @@ To install Toolbox as a binary on Windows (PowerShell):
```powershell
# see releases page for other versions
$VERSION = "0.21.0"
$VERSION = "0.24.0"
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
```
@@ -142,7 +158,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.22.0
export VERSION=0.24.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -161,7 +177,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.22.0
go install github.com/googleapis/genai-toolbox@v0.24.0
```
{{% /tab %}}

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -0,0 +1,245 @@
---
title: "Prompts using Gemini CLI"
type: docs
weight: 5
description: >
How to get started using Toolbox prompts locally with PostgreSQL and [Gemini CLI](https://pypi.org/project/gemini-cli/).
---
## Before you begin
This guide assumes you have already done the following:
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
[install-postgres]: https://www.postgresql.org/download/
## Step 1: Set up your database
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create the required tables using the following commands:
```sql
CREATE TABLE users (
id SERIAL PRIMARY KEY,
username VARCHAR(50) NOT NULL,
email VARCHAR(100) UNIQUE NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE TABLE restaurants (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL,
location VARCHAR(100)
);
CREATE TABLE reviews (
id SERIAL PRIMARY KEY,
user_id INT REFERENCES users(id),
restaurant_id INT REFERENCES restaurants(id),
rating INT CHECK (rating >= 1 AND rating <= 5),
review_text TEXT,
is_published BOOLEAN DEFAULT false,
moderation_status VARCHAR(50) DEFAULT 'pending_manual_review',
created_at TIMESTAMPTZ DEFAULT NOW()
);
```
1. Insert dummy data into the tables.
```sql
INSERT INTO users (id, username, email) VALUES
(123, 'jane_d', 'jane.d@example.com'),
(124, 'john_s', 'john.s@example.com'),
(125, 'sam_b', 'sam.b@example.com');
INSERT INTO restaurants (id, name, location) VALUES
(455, 'Pizza Palace', '123 Main St'),
(456, 'The Corner Bistro', '456 Oak Ave'),
(457, 'Sushi Spot', '789 Pine Ln');
INSERT INTO reviews (user_id, restaurant_id, rating, review_text, is_published, moderation_status) VALUES
(124, 455, 5, 'Best pizza in town! The crust was perfect.', true, 'approved'),
(125, 457, 4, 'Great sushi, very fresh. A bit pricey but worth it.', true, 'approved'),
(123, 457, 5, 'Absolutely loved the dragon roll. Will be back!', true, 'approved'),
(123, 456, 4, 'The atmosphere was lovely and the food was great. My photo upload might have been weird though.', false, 'pending_manual_review'),
(125, 456, 1, 'This review contains inappropriate language.', false, 'rejected');
```
1. End the database session:
```bash
\q
```
## Step 2: Configure Toolbox
Create a file named `tools.yaml`. This file defines the database connection, the
SQL tools available, and the prompts the agents will use.
```yaml
sources:
my-foodiefind-db:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: toolbox_user
password: my-password
tools:
find_user_by_email:
kind: postgres-sql
source: my-foodiefind-db
description: Find a user's ID by their email address.
parameters:
- name: email
type: string
description: The email address of the user to find.
statement: SELECT id FROM users WHERE email = $1;
find_restaurant_by_name:
kind: postgres-sql
source: my-foodiefind-db
description: Find a restaurant's ID by its exact name.
parameters:
- name: name
type: string
description: The name of the restaurant to find.
statement: SELECT id FROM restaurants WHERE name = $1;
find_review_by_user_and_restaurant:
kind: postgres-sql
source: my-foodiefind-db
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
parameters:
- name: user_id
type: integer
description: The numerical ID of the user.
- name: restaurant_id
type: integer
description: The numerical ID of the restaurant.
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
prompts:
investigate_missing_review:
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
arguments:
- name: "user_email"
description: "The email of the user who wrote the review."
- name: "restaurant_name"
description: "The name of the restaurant being reviewed."
messages:
- content: >-
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
**Workflow:**
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
```
## Step 3: Connect to Gemini CLI
Configure the Gemini CLI to talk to your local Toolbox MCP server.
1. Open or create your Gemini settings file: `~/.gemini/settings.json`.
2. Add the following configuration to the file:
```json
{
"mcpServers": {
"MCPToolbox": {
"httpUrl": "http://localhost:5000/mcp"
}
},
"mcp": {
"allowed": ["MCPToolbox"]
}
}
```
3. Start Gemini CLI using
```sh
gemini
```
In case Gemini CLI is already running, use `/mcp refresh` to refresh the MCP server.
4. Use gemini slash commands to run your prompt:
```sh
/investigate_missing_review --user_email="jane.d@example.com" --restaurant_name="The Corner Bistro"
```

View File

@@ -28,11 +28,11 @@ require (
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
google.golang.org/grpc v1.76.0 // indirect

View File

@@ -88,18 +88,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

View File

@@ -39,11 +39,11 @@ require (
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genai v1.34.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect

View File

@@ -123,18 +123,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

View File

@@ -26,11 +26,11 @@ require (
go.opentelemetry.io/otel v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
google.golang.org/grpc v1.76.0 // indirect

View File

@@ -94,18 +94,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

View File

@@ -18,7 +18,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
"integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"arrify": "^2.0.0",
"extend": "^3.0.2"
@@ -32,7 +31,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
"integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
"license": "Apache-2.0",
"peer": true,
"engines": {
"node": ">=14.0.0"
}
@@ -42,7 +40,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz",
"integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==",
"license": "Apache-2.0",
"peer": true,
"engines": {
"node": ">=14"
}
@@ -52,7 +49,6 @@
"resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
"integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"@google-cloud/paginator": "^5.0.0",
"@google-cloud/projectify": "^4.0.0",
@@ -79,7 +75,6 @@
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"license": "MIT",
"peer": true,
"bin": {
"uuid": "dist/bin/uuid"
}
@@ -102,6 +97,7 @@
"resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.14.0.tgz",
"integrity": "sha512-jirYprAAJU1svjwSDVCzyVq+FrJpJd5CSxR/g2Ga/gZ0ZYZpcWjMS75KJl9y71K1mDN+tcx6s21CzCbB2R840g==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"google-auth-library": "^9.14.2",
"ws": "^8.18.0"
@@ -140,6 +136,7 @@
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.5.tgz",
"integrity": "sha512-QakrKIGniGuRVfWBdMsDea/dx1PNE739QJ7gCM41s9q+qaCYTHCdsIBXQVVXry3mfWAiaM9kT22Hyz53Uw8mfg==",
"license": "MIT",
"peer": true,
"dependencies": {
"ajv": "^6.12.6",
"content-type": "^1.0.5",
@@ -302,7 +299,6 @@
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 10"
}
@@ -311,15 +307,13 @@
"version": "0.12.5",
"resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz",
"integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/@types/node": {
"version": "24.10.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~7.16.0"
}
@@ -329,7 +323,6 @@
"resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz",
"integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==",
"license": "MIT",
"peer": true,
"dependencies": {
"@types/caseless": "*",
"@types/node": "*",
@@ -342,7 +335,6 @@
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"license": "MIT",
"peer": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
@@ -360,7 +352,6 @@
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 0.6"
}
@@ -370,7 +361,6 @@
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"peer": true,
"dependencies": {
"mime-db": "1.52.0"
},
@@ -382,15 +372,13 @@
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
"license": "MIT",
"peer": true,
"dependencies": {
"event-target-shim": "^5.0.0"
},
@@ -465,7 +453,6 @@
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=8"
}
@@ -475,7 +462,6 @@
"resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
"integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
"license": "MIT",
"peer": true,
"dependencies": {
"retry": "0.13.1"
}
@@ -768,7 +754,6 @@
"resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
"integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
"license": "MIT",
"peer": true,
"dependencies": {
"end-of-stream": "^1.4.1",
"inherits": "^2.0.3",
@@ -817,7 +802,6 @@
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
"license": "MIT",
"peer": true,
"dependencies": {
"once": "^1.4.0"
}
@@ -887,7 +871,6 @@
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=6"
}
@@ -918,6 +901,7 @@
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
"license": "MIT",
"peer": true,
"dependencies": {
"accepts": "^2.0.0",
"body-parser": "^2.2.0",
@@ -999,7 +983,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"strnum": "^1.1.1"
},
@@ -1350,8 +1333,7 @@
"url": "https://patreon.com/mdevils"
}
],
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/http-errors": {
"version": "2.0.0",
@@ -1383,7 +1365,6 @@
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
"integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@tootallnate/once": "2",
"agent-base": "6",
@@ -1398,7 +1379,6 @@
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"debug": "4"
},
@@ -1525,12 +1505,12 @@
}
},
"node_modules/jws": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"dependencies": {
"jwa": "^2.0.0",
"jwa": "^2.0.1",
"safe-buffer": "^5.0.1"
}
},
@@ -1575,7 +1555,6 @@
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
"license": "MIT",
"peer": true,
"bin": {
"mime": "cli.js"
},
@@ -1736,7 +1715,6 @@
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"yocto-queue": "^0.1.0"
},
@@ -1835,9 +1813,9 @@
}
},
"node_modules/qs": {
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
@@ -1878,7 +1856,6 @@
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"license": "MIT",
"peer": true,
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
@@ -1893,7 +1870,6 @@
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 4"
}
@@ -1903,7 +1879,6 @@
"resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz",
"integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@types/request": "^2.48.8",
"extend": "^3.0.2",
@@ -2132,7 +2107,6 @@
"resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
"integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
"license": "MIT",
"peer": true,
"dependencies": {
"stubs": "^3.0.0"
}
@@ -2141,15 +2115,13 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
"integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"license": "MIT",
"peer": true,
"dependencies": {
"safe-buffer": "~5.2.0"
}
@@ -2260,22 +2232,19 @@
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
"integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/teeny-request": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz",
"integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==",
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
@@ -2292,7 +2261,6 @@
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"debug": "4"
},
@@ -2305,7 +2273,6 @@
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
"license": "MIT",
"peer": true,
"dependencies": {
"agent-base": "6",
"debug": "4"
@@ -2347,8 +2314,7 @@
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/unpipe": {
"version": "1.0.0",
@@ -2372,8 +2338,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/uuid": {
"version": "9.0.1",
@@ -2560,7 +2525,6 @@
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -2573,6 +2537,7 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@@ -3376,22 +3376,23 @@
}
},
"node_modules/body-parser": {
"version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
"version": "1.20.4",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"bytes": "~3.1.2",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.13.0",
"raw-body": "2.5.2",
"destroy": "~1.2.0",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"on-finished": "~2.4.1",
"qs": "~6.14.0",
"raw-body": "~2.5.3",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
"unpipe": "~1.0.0"
},
"engines": {
"node": ">= 0.8",
@@ -3406,11 +3407,40 @@
"ms": "2.0.0"
}
},
"node_modules/body-parser/node_modules/http-errors": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"license": "MIT",
"dependencies": {
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/body-parser/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
},
"node_modules/body-parser/node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
@@ -3434,6 +3464,7 @@
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -3830,38 +3861,39 @@
}
},
"node_modules/express": {
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
"version": "4.22.1",
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
"license": "MIT",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"body-parser": "~1.20.3",
"content-disposition": "~0.5.4",
"content-type": "~1.0.4",
"cookie": "0.7.1",
"cookie-signature": "1.0.6",
"cookie": "~0.7.1",
"cookie-signature": "~1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "1.3.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"finalhandler": "~1.3.1",
"fresh": "~0.5.2",
"http-errors": "~2.0.0",
"merge-descriptors": "1.0.3",
"methods": "~1.1.2",
"on-finished": "2.4.1",
"on-finished": "~2.4.1",
"parseurl": "~1.3.3",
"path-to-regexp": "0.1.12",
"path-to-regexp": "~0.1.12",
"proxy-addr": "~2.0.7",
"qs": "6.13.0",
"qs": "~6.14.0",
"range-parser": "~1.2.1",
"safe-buffer": "5.2.1",
"send": "0.19.0",
"serve-static": "1.16.2",
"send": "~0.19.0",
"serve-static": "~1.16.2",
"setprototypeof": "1.2.0",
"statuses": "2.0.1",
"statuses": "~2.0.1",
"type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
@@ -4904,6 +4936,7 @@
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
},
@@ -5661,11 +5694,12 @@
}
},
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.0.6"
"side-channel": "^1.1.0"
},
"engines": {
"node": ">=0.6"
@@ -5683,19 +5717,49 @@
}
},
"node_modules/raw-body": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
"bytes": "~3.1.2",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"unpipe": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/raw-body/node_modules/http-errors": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"license": "MIT",
"dependencies": {
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/raw-body/node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
@@ -5813,7 +5877,8 @@
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.2",

View File

@@ -45,9 +45,9 @@
}
},
"node_modules/@langchain/core": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.0.tgz",
"integrity": "sha512-yJ6JHcU9psjnQbzRFkXjIdNTA+3074dA+2pHdH8ewvQCSleSk6JcjkCMIb5+NASjeMoi1ZuntlLKVsNqF38YxA==",
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.8.tgz",
"integrity": "sha512-kIUidOgc0ZdyXo4Ahn9Zas+OayqOfk4ZoKPi7XaDipNSWSApc2+QK5BVcjvwtzxstsNOrmXJiJWEN6WPF/MvAw==",
"license": "MIT",
"peer": true,
"dependencies": {
@@ -56,10 +56,9 @@
"camelcase": "6",
"decamelize": "1.2.0",
"js-tiktoken": "^1.0.12",
"langsmith": "^0.3.64",
"langsmith": ">=0.4.0 <1.0.0",
"mustache": "^4.2.0",
"p-queue": "^6.6.2",
"p-retry": "^7.0.0",
"uuid": "^10.0.0",
"zod": "^3.25.76 || ^4"
},
@@ -67,25 +66,10 @@
"node": ">=20"
}
},
"node_modules/@langchain/core/node_modules/p-retry": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.0.tgz",
"integrity": "sha512-xL4PiFRQa/f9L9ZvR4/gUCRNus4N8YX80ku8kv9Jqz+ZokkiZLM0bcvX0gm1F3PDi9SPRsww1BDsTWgE6Y1GLQ==",
"license": "MIT",
"dependencies": {
"is-network-error": "^1.1.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@langchain/google-genai": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.0.0.tgz",
"integrity": "sha512-PaAWkogQdF+Y2bhhXWXUrC2nO7sTgWLtobBbZl/0V8Aa1F/KG2wrMECie3S17bAdFu/6VmQOuFFrlgSMwQC5KA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.1.3.tgz",
"integrity": "sha512-ZdlFK/N10GyU6ATzkM01Sk1rlHBoy36Q/MawGD1SyXdD2lQxZxuQZjFWewj6uzWQ2Nnjj70EvU/kmmHVPn6sfQ==",
"license": "MIT",
"dependencies": {
"@google/generative-ai": "^0.24.0",
@@ -95,7 +79,7 @@
"node": ">=20"
},
"peerDependencies": {
"@langchain/core": "1.1.0"
"@langchain/core": "1.1.8"
}
},
"node_modules/@langchain/google-genai/node_modules/uuid": {
@@ -814,18 +798,6 @@
"node": ">=8"
}
},
"node_modules/is-network-error": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.0.tgz",
"integrity": "sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==",
"license": "MIT",
"engines": {
"node": ">=16"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
@@ -872,22 +844,24 @@
}
},
"node_modules/jws": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"dependencies": {
"jwa": "^2.0.0",
"jwa": "^2.0.1",
"safe-buffer": "^5.0.1"
}
},
"node_modules/langchain": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.0.2.tgz",
"integrity": "sha512-He/xvjVl8DHESvdaW6Dpyba72OaLCAfS2CyOm1aWrlJ4C38dKXyTIxphtld8hiii6MWX7qMSmu2EaUwWBx2STg==",
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.2.3.tgz",
"integrity": "sha512-3k986xJuqg4az53JxV5LnGlOzIXF1d9Kq6Y9s7XjitvzhpsbFuTDV5/kiF4cx3pkNGyw0mUXC4tLz9RxucO0hw==",
"license": "MIT",
"dependencies": {
"@langchain/langgraph": "^1.0.0",
"@langchain/langgraph-checkpoint": "^1.0.0",
"langsmith": "~0.3.74",
"langsmith": ">=0.4.0 <1.0.0",
"uuid": "^10.0.0",
"zod": "^3.25.76 || ^4"
},
@@ -895,19 +869,19 @@
"node": ">=20"
},
"peerDependencies": {
"@langchain/core": "^1.0.0"
"@langchain/core": "1.1.8"
}
},
"node_modules/langsmith": {
"version": "0.3.77",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.77.tgz",
"integrity": "sha512-wbS/9IX/hOAsOEOtPj8kCS8H0tFHaelwQ97gTONRtIfoPPLd9MMUmhk0KQB5DdsGAI5abg966+f0dZ/B+YRRzg==",
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
"license": "MIT",
"dependencies": {
"@types/uuid": "^10.0.0",
"chalk": "^4.1.2",
"console-table-printer": "^2.12.1",
"p-queue": "^6.6.2",
"p-retry": "4",
"semver": "^7.6.3",
"uuid": "^10.0.0"
},

View File

@@ -1,3 +1,3 @@
google-adk==1.19.0
toolbox-core==0.5.3
pytest==9.0.1
google-adk==1.21.0
toolbox-core==0.5.4
pytest==9.0.2

View File

@@ -1,3 +1,3 @@
google-genai==1.52.0
toolbox-core==0.5.3
pytest==9.0.1
google-genai==1.56.0
toolbox-core==0.5.4
pytest==9.0.2

View File

@@ -1,5 +1,5 @@
langchain==1.1.0
langchain-google-vertexai==3.1.0
langgraph==1.0.4
toolbox-langchain==0.5.3
pytest==9.0.1
langchain==1.2.0
langchain-google-vertexai==3.2.0
langgraph==1.0.5
toolbox-langchain==0.5.4
pytest==9.0.2

View File

@@ -1,4 +1,4 @@
llama-index==0.14.8
llama-index-llms-google-genai==0.7.3
toolbox-llamaindex==0.5.3
pytest==9.0.1
llama-index==0.14.12
llama-index-llms-google-genai==0.8.3
toolbox-llamaindex==0.5.4
pytest==9.0.2

View File

@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -18,6 +18,7 @@ to expose your developer assistant tools to a Looker instance:
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Antigravity][antigravity]
[toolbox]: https://github.com/googleapis/genai-toolbox
[gemini-cli]: #configure-your-mcp-client
@@ -27,6 +28,7 @@ to expose your developer assistant tools to a Looker instance:
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[antigravity]: #connect-with-antigravity
## Set up Looker
@@ -38,6 +40,55 @@ to expose your developer assistant tools to a Looker instance:
listening at a different port, and you will need to use
`https://looker.example.com:19999` instead.
## Connect with Antigravity
You can connect Looker to Antigravity in the following ways:
* Using the MCP Store
* Using a custom configuration
{{< notice note >}}
You don't need to download the MCP Toolbox binary to use these methods.
{{< /notice >}}
{{< tabpane text=true >}}
{{% tab header="MCP Store" lang="en" %}}
The most straightforward way to connect to Looker in Antigravity is by using the built-in MCP Store.
1. Open Antigravity and open the editor's agent panel.
1. Click the **"..."** icon at the top of the panel and select **MCP Servers**.
1. Locate **Looker** in the list of available servers and click Install.
1. Follow the on-screen prompts to securely link your accounts where applicable.
After you install Looker in the MCP Store, resources and tools from the server are automatically available to the editor.
{{% /tab %}}
{{% tab header="Custom config" lang="en" %}}
To connect to a custom MCP server, follow these steps:
1. Open Antigravity and navigate to the MCP store using the **"..."** drop-down at the top of the editor's agent panel.
1. To open the **mcp_config.json** file, click **MCP Servers** and then click **Manage MCP Servers > View raw config**.
1. Add the following configuration, replace the environment variables with your values, and save.
```json
{
"mcpServers": {
"looker": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "looker", "--stdio"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "your-client-id",
"LOOKER_CLIENT_SECRET": "your-client-secret"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
@@ -49,19 +100,19 @@ to expose your developer assistant tools to a Looker instance:
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -290,7 +341,7 @@ assistant to list models, explores, dimensions, and measures. Run a
query, retrieve the SQL for a query, and run a saved Look.
The full tool list is available in the [Prebuilt Tools
Reference](../../reference/prebuilt-tools/#looker).
Reference](../../reference/prebuilt-tools.md/#looker).
The following tools are available to the LLM:
@@ -323,6 +374,8 @@ instance and create new saved content.
data
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
1. **add_dashboard_element**: Add a tile to a dashboard
1. **add_dashboard_filter**: Add a filter to a dashboard
1. **generate_embed_url**: Generate an embed url for content
### Looker Instance Health Tools

View File

@@ -45,19 +45,19 @@ instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.22.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.24.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -79,12 +79,16 @@ There are a couple of steps to run and use a Collector.
```
1. Run toolbox with the `--telemetry-otlp` flag. Configure it to send them to
`http://127.0.0.1:4553` (for HTTP) or the Collector's URL.
`127.0.0.1:4553` (for HTTP) or the Collector's URL.
```bash
./toolbox --telemetry-otlp=http://127.0.0.1:4553
./toolbox --telemetry-otlp=127.0.0.1:4553
```
{{< notice tip >}}
To pass an insecure endpoint, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`.
{{< /notice >}}
1. Once telemetry datas are collected, you can view them in your telemetry
backend. If you are using GCP exporters, telemetry will be visible in GCP
dashboard at [Metrics Explorer][metrics-explorer] and [Trace

View File

@@ -16,14 +16,14 @@ description: >
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| | `--allowed-origins` | Specifies a list of origins permitted to access this server. | `*` |
| `-v` | `--version` | version for toolbox | |
@@ -46,6 +46,9 @@ description: >
```bash
# Basic server with custom port configuration
./toolbox --tools-file "tools.yaml" --port 8080
# Server with prebuilt + custom tools configurations
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
```
### Tool Configuration Sources
@@ -72,8 +75,8 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
{{< notice tip >}}
The CLI enforces mutual exclusivity between configuration source flags,
preventing simultaneous use of `--prebuilt` with file-based options, and
ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is
preventing simultaneous use of the file-based options ensuring only one of
`--tools-file`, `--tools-files`, or `--tools-folder` is
used at a time.
{{< /notice >}}

View File

@@ -13,6 +13,12 @@ allowing developers to interact with and take action on databases.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for
details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
{{< notice tip >}}
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
`--tools-folder` to combine prebuilt configs with custom tools.
See [Usage Examples](../reference/cli.md#examples).
{{< /notice >}}
## AlloyDB Postgres
* `--prebuilt` value: `alloydb-postgres`
@@ -99,6 +105,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
OAuth access token for authentication. Defaults to `false`.
* `BIGQUERY_SCOPES`: (Optional) A comma-separated list of OAuth scopes to
use for authentication.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
metadata.
@@ -416,6 +424,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
* `run_dashboard`: Runs the queries associated with a dashboard.
* `make_dashboard`: Creates a new dashboard.
* `add_dashboard_element`: Adds a tile to a dashboard.
* `add_dashboard_filter`: Adds a filter to a dashboard.
* `generate_embed_url`: Generate an embed url for content.
* `health_pulse`: Test the health of a Looker instance.
* `health_analyze`: Analyze the LookML usage of a Looker instance.
* `health_vacuum`: Suggest LookML elements that can be removed.

View File

@@ -0,0 +1,84 @@
---
title: "EmbeddingModels"
type: docs
weight: 2
description: >
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
---
EmbeddingModels represent services that generate vector representations of text
data. In the MCP Toolbox, these models enable **Semantic Queries**,
allowing [Tools](../tools/) to automatically convert human-readable text into
numerical vectors before using them in a query.
This is primarily used in two scenarios:
- **Vector Ingestion**: Converting a text parameter into a vector string during
an `INSERT` operation.
- **Semantic Search**: Converting a natural language query into a vector to
perform similarity searches.
## Example
The following configuration defines an embedding model and applies it to
specific tool parameters.
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your API keys into the configuration file.
{{< /notice >}}
### Step 1 - Define an Embedding Model
Define an embedding model in the `embeddingModels` section:
```yaml
embeddingModels:
gemini-model: # Name of the embedding model
kind: gemini
model: gemini-embedding-001
apiKey: ${GOOGLE_API_KEY}
dimension: 768
```
### Step 2 - Embed Tool Parameters
Use the defined embedding model, embed your query parameters using the
`embeddedBy` field. Only string-typed
parameters can be embedded:
```yaml
tools:
# Vector ingestion tool
insert_embedding:
kind: postgres-sql
source: my-pg-instance
statement: |
INSERT INTO documents (content, embedding)
VALUES ($1, $2);
parameters:
- name: content
type: string
- name: vector_string
type: string
description: The text to be vectorized and stored.
embeddedBy: gemini-model # refers to the name of a defined embedding model
# Semantic search tool
search_embedding:
kind: postgres-sql
source: my-pg-instance
statement: |
SELECT id, content, embedding <-> $1 AS distance
FROM documents
ORDER BY distance LIMIT 1
parameters:
- name: semantic_search_string
type: string
description: The search query that will be converted to a vector.
embeddedBy: gemini-model # refers to the name of a defined embedding model
```
## Kinds of Embedding Models

View File

@@ -0,0 +1,73 @@
---
title: "Gemini Embedding"
type: docs
weight: 1
description: >
Use Google's Gemini models to generate high-performance text embeddings for vector databases.
---
## About
Google Gemini provides state-of-the-art embedding models that convert text into
high-dimensional vectors.
### Authentication
Toolbox uses your [Application Default Credentials
(ADC)][adc] to authorize with the
Gemini API client.
Optionally, you can use an [API key][api-key] obtain an API
Key from the [Google AI Studio][ai-studio].
We recommend using an API key for testing and using application default
credentials for production.
[adc]: https://cloud.google.com/docs/authentication#adc
[api-key]: https://ai.google.dev/gemini-api/docs/api-key#api-keys
[ai-studio]: https://aistudio.google.com/app/apikey
## Behavior
### Automatic Vectorization
When a tool parameter is configured with `embeddedBy: <your-gemini-model-name>`,
the Toolbox intercepts the raw text input from the client and sends it to the
Gemini API. The resulting numerical array is then formatted before being passed
to your database source.
### Dimension Matching
The `dimension` field must match the expected size of your database column
(e.g., a `vector(768)` column in PostgreSQL). This setting is supported by newer
models since 2024 only. You cannot set this value if using the earlier model
(`models/embedding-001`). Check out [available Gemini models][modellist] for more
information.
[modellist]:
https://docs.cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#supported-models
## Example
```yaml
embeddingModels:
gemini-model:
kind: gemini
model: gemini-embedding-001
apiKey: ${GOOGLE_API_KEY}
dimension: 768
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------|
| kind | string | true | Must be `gemini`. |
| model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). |
| apiKey | string | false | Your API Key from Google AI Studio. |
| dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). |

View File

@@ -77,6 +77,9 @@ cluster][alloydb-free-trial].
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
List cardinality of columns in a table in a PostgreSQL database.
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
List statistics of a table in a PostgreSQL database.
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
List publication tables in a PostgreSQL database.
@@ -91,7 +94,10 @@ cluster][alloydb-free-trial].
instance.
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
Lists all the user-created roles in PostgreSQL database..
Lists all the user-created roles in PostgreSQL database.
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
Lists all the stored procedure in PostgreSQL database.
### Pre-built Configurations

View File

@@ -94,6 +94,13 @@ intend to run. Common roles include `roles/bigquery.user` (which includes
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
Follow this [guide][set-adc] to set up your ADC.
If you are running on Google Compute Engine (GCE) or Google Kubernetes Engine
(GKE), you might need to explicitly set the access scopes for the service
account. While you can configure scopes when creating the VM or node pool, you
can also specify them in the source configuration using the `scopes` field.
Common scopes include `https://www.googleapis.com/auth/bigquery` or
`https://www.googleapis.com/auth/cloud-platform`.
### Authentication via User's OAuth Access Token
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
@@ -124,6 +131,9 @@ sources:
# - "my_dataset_1"
# - "other_project.my_dataset_2"
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
# scopes: # Optional: List of OAuth scopes to request.
# - "https://www.googleapis.com/auth/bigquery"
# - "https://www.googleapis.com/auth/drive.readonly"
```
Initialize a BigQuery source that uses the client's access token:
@@ -140,6 +150,9 @@ sources:
# - "my_dataset_1"
# - "other_project.my_dataset_2"
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
# scopes: # Optional: List of OAuth scopes to request.
# - "https://www.googleapis.com/auth/bigquery"
# - "https://www.googleapis.com/auth/drive.readonly"
```
## Reference
@@ -152,4 +165,5 @@ sources:
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |

View File

@@ -0,0 +1,40 @@
---
title: "Gemini Data Analytics"
type: docs
weight: 1
description: >
A "cloud-gemini-data-analytics" source provides a client for the Gemini Data Analytics API.
aliases:
- /resources/sources/cloud-gemini-data-analytics
---
## About
The `cloud-gemini-data-analytics` source provides a client to interact with the [Gemini Data Analytics API](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/reference/rest). This allows tools to send natural language queries to the API.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC) (Recommended):** By default, the source uses ADC to authenticate with the API. The Toolbox server will fetch the credentials from its running environment (server-side authentication). This is the recommended method.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source expects the authentication token to be provided by the caller when making a request to the Toolbox server (typically via an HTTP Bearer token). The Toolbox server will then forward this token to the underlying Gemini Data Analytics API calls.
## Example
```yaml
sources:
my-gda-source:
kind: cloud-gemini-data-analytics
projectId: my-project-id
my-oauth-gda-source:
kind: cloud-gemini-data-analytics
projectId: my-project-id
useClientOAuth: true
```
## Reference
| **field** | **type** | **required** | **description** |
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| kind | string | true | Must be "cloud-gemini-data-analytics". |
| projectId | string | true | The Google Cloud Project ID where the API is enabled. |
| useClientOAuth | boolean | false | If true, the source uses the token provided by the caller (forwarded to the API). Otherwise, it uses server-side Application Default Credentials (ADC). Defaults to `false`. |

View File

@@ -31,6 +31,9 @@ to a database by following these instructions][csql-mysql-quickstart].
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
List active queries in Cloud SQL for MySQL.
- [`mysql-get-query-plan`](../tools/mysql/mysql-get-query-plan.md)
Provide information about how MySQL executes a SQL statement (EXPLAIN).
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a Cloud SQL for MySQL database.
@@ -88,13 +91,40 @@ mTLS.
[public-ip]: https://cloud.google.com/sql/docs/mysql/configure-ip
[conn-overview]: https://cloud.google.com/sql/docs/mysql/connect-overview
### Database User
### Authentication
Currently, this source only uses standard authentication. You will need to [create
a MySQL user][cloud-sql-users] to login to the database with.
This source supports both password-based authentication and IAM
authentication (using your [Application Default Credentials][adc]).
#### Standard Authentication
To connect using user/password, [create
a MySQL user][cloud-sql-users] and input your credentials in the `user` and
`password` fields.
```yaml
user: ${USER_NAME}
password: ${PASSWORD}
```
[cloud-sql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
#### IAM Authentication
To connect using IAM authentication:
1. Prepare your database instance and user following this [guide][iam-guide].
2. You could choose one of the two ways to log in:
- Specify your IAM email as the `user`.
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
automatically and log in using the email associated with it.
3. Leave the `password` field blank.
[iam-guide]: https://cloud.google.com/sql/docs/mysql/iam-logins
[cloudsql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
## Example
```yaml
@@ -124,6 +154,6 @@ instead of hardcoding your secrets into the configuration file.
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
| user | string | true | Name of the MySQL user to connect as (e.g. "my-pg-user"). |
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
| user | string | false | Name of the MySQL user to connect as (e.g "my-mysql-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
| password | string | false | Password of the MySQL user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
| ipType | string | false | IP Type of the Cloud SQL instance, must be either `public`, `private`, or `psc`. Default: `public`. |

View File

@@ -58,6 +58,7 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
List sequences in a PostgreSQL database.
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
List long running transactions in a PostgreSQL database.
@@ -73,6 +74,9 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
List cardinality of columns in a table in a PostgreSQL database.
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
List statistics of a table in a PostgreSQL database.
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
List publication tables in a PostgreSQL database.
@@ -87,7 +91,10 @@ to a database by following these instructions][csql-pg-quickstart].
instance.
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
Lists all the user-created roles in PostgreSQL database..
Lists all the user-created roles in PostgreSQL database.
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
Lists all the stored procedure in PostgreSQL database.
### Pre-built Configurations

View File

@@ -229,22 +229,38 @@ Finds resources that were created within, before, or after a given date or time.
### Aspect Search
To search for entries based on their attached aspects, use the following query syntax.
aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect:xOPERATORvalue
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
`has:x`
Matches `x` as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
- String: = (exact match) and : (substring)
- All number types: =, :, <, >, <=, >=, =>, =<
- Enum: =
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
- Boolean: =
`has=x`
Matches `x` as the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
- aspect:example-project.us-central1.employee-info.is-enrolled=true
- aspect:example-project.us-central1.employee=true
- aspect:employee=true
`xOPERATORvalue`
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID.FIELD_NAME`
The list of supported operators depends on the type of field in the aspect, as follows:
* **String**: `=` (exact match)
* **All number types**: `=`, `:`, `<`, `>`, `<=`, `>=`, `=>`, `=<`
* **Enum**: `=` (exact match only)
* **Datetime**: same as for numbers, but the values to compare are treated as datetimes instead of numbers
* **Boolean**: `=`
Only top-level fields of the aspect are searchable.
* Syntax for system aspect types:
* `ASPECT_TYPE_ID.FIELD_NAME`
* `dataplex-types.ASPECT_TYPE_ID.FIELD_NAME`
* `dataplex-types.LOCATION.ASPECT_TYPE_ID.FIELD_NAME`
For example, the following queries match entries where the value of the `type` field in the `bigquery-dataset` aspect is `default`:
* `bigquery-dataset.type=default`
* `dataplex-types.bigquery-dataset.type=default`
* `dataplex-types.global.bigquery-dataset.type=default`
* Syntax for custom aspect types:
* If the aspect is created in the global region: `PROJECT_ID.ASPECT_TYPE_ID.FIELD_NAME`
* If the aspect is created in a specific region: `PROJECT_ID.REGION.ASPECT_TYPE_ID.FIELD_NAME`
For example, the following queries match entries where the value of the `is-enrolled` field in the `employee-info` aspect is `true`.
* `example-project.us-central1.employee-info.is-enrolled=true`
* `example-project.employee-info.is-enrolled=true`
Example:-
You can use following filters
@@ -258,6 +274,25 @@ Logical AND and logical OR are supported. For example, foo OR bar.
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
### Abbreviated syntax
An abbreviated search syntax is also available, using `|` (vertical bar) for `OR` operators and `,` (comma) for `AND` operators.
For example, to search for entries inside one of many projects using the `OR` operator, you can use the following abbreviated syntax:
`projectid:(id1|id2|id3|id4)`
The same search without using abbreviated syntax looks like the following:
`projectid:id1 OR projectid:id2 OR projectid:id3 OR projectid:id4`
To search for entries with matching column names, use the following:
* **AND**: `column:(name1,name2,name3)`
* **OR**: `column:(name1|name2|name3)`
This abbreviated syntax works for the qualified predicates except for `label` in keyword search.
### Request
1. Always try to rewrite the prompt using search syntax.

View File

@@ -91,18 +91,17 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|----------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /. |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
| project | string | false | The project id to use in Google Cloud. |
| location | string | false | The location to use in Google Cloud. (default: us) |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header |
| | | | name is provided, it will be used instead of "Authorization". |
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
| **field** | **type** | **required** | **description** |
|----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /. |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
| project | string | false | The project id to use in Google Cloud. |
| location | string | false | The location to use in Google Cloud. (default: us) |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header name is provided, it will be used instead of "Authorization". |
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |

View File

@@ -25,6 +25,9 @@ reliability, performance, and ease of use.
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
List active queries in MySQL.
- [`mysql-get-query-plan`](../tools/mysql/mysql-get-query-plan.md)
Provide information about how MySQL executes a SQL statement (EXPLAIN).
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a MySQL database.

View File

@@ -18,10 +18,10 @@ DW) database workloads.
## Available Tools
- [`oracle-sql`](../tools/oracle/oracle-sql.md)
Execute pre-defined prepared SQL queries in Oracle.
Execute pre-defined prepared SQL queries in Oracle.
- [`oracle-execute-sql`](../tools/oracle/oracle-execute-sql.md)
Run parameterized SQL queries in Oracle.
Run parameterized SQL queries in Oracle.
## Requirements
@@ -33,6 +33,25 @@ user][oracle-users] to log in to the database with the necessary permissions.
[oracle-users]:
https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/CREATE-USER.html
### Oracle Driver Requirement (Conditional)
The Oracle source offers two connection drivers:
1. **Pure Go Driver (`useOCI: false`, default):** Uses the `go-ora` library.
This driver is simpler and does not require any local Oracle software
installation, but it **lacks support for advanced features** like Oracle
Wallets or Kerberos authentication.
2. **OCI-Based Driver (`useOCI: true`):** Uses the `godror` library, which
provides access to **advanced Oracle features** like Digital Wallet support.
If you set `useOCI: true`, you **must** install the **Oracle Instant Client**
libraries on the machine where this tool runs.
You can download the Instant Client from the official Oracle website: [Oracle
Instant Client
Downloads](https://www.oracle.com/database/technologies/instant-client/downloads.html)
## Connection Methods
You can configure the connection to your Oracle database using one of the
@@ -66,12 +85,15 @@ using a TNS (Transparent Network Substrate) alias.
containing it. This setting will override the `TNS_ADMIN` environment
variable.
## Example
## Examples
This example demonstrates the four connection methods you could choose from:
```yaml
sources:
my-oracle-source:
kind: oracle
# --- Choose one connection method ---
# 1. Host, Port, and Service Name
host: 127.0.0.1
@@ -88,6 +110,43 @@ sources:
user: ${USER_NAME}
password: ${PASSWORD}
# Optional: Set to true to use the OCI-based driver for advanced features (Requires Oracle Instant Client)
```
### Using an Oracle Wallet
Oracle Wallet allows you to store credentails used for database connection. Depending whether you are using an OCI-based driver, the wallet configuration is different.
#### Pure Go Driver (`useOCI: false`) - Oracle Wallet
The `go-ora` driver uses the `walletLocation` field to connect to a database secured with an Oracle Wallet without standard username and password.
```yaml
sources:
pure-go-wallet:
kind: oracle
connectionString: "127.0.0.1:1521/XEPDB1"
user: ${USER_NAME}
password: ${PASSWORD}
# The TNS Alias is often required to connect to a service registered in tnsnames.ora
tnsAlias: "SECURE_DB_ALIAS"
walletLocation: "/path/to/my/wallet/directory"
```
#### OCI-Based Driver (`useOCI: true`) - Oracle Wallet
For the OCI-based driver, wallet authentication is triggered by setting tnsAdmin to the wallet directory and connecting via a tnsAlias.
```yaml
sources:
oci-wallet:
kind: oracle
connectionString: "127.0.0.1:1521/XEPDB1"
user: ${USER_NAME}
password: ${PASSWORD}
tnsAlias: "WALLET_DB_ALIAS"
tnsAdmin: "/opt/oracle/wallet" # Directory containing tnsnames.ora, sqlnet.ora, and wallet files
useOCI: true
```
{{< notice tip >}}
@@ -97,14 +156,15 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "oracle". |
| user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). |
| password | string | true | Password of the Oracle user (e.g. "my-password"). |
| host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. |
| port | integer | false | Port to connect to (e.g. "1521"). Required if not using `connectionString` or `tnsAlias`. |
| serviceName | string | false | The Oracle service name of the database to connect to. Required if not using `connectionString` or `tnsAlias`. |
| connectionString | string | false | A direct connection string (e.g. "hostname:port/servicename"). Use as an alternative to `host`, `port`, and `serviceName`. |
| tnsAlias | string | false | A TNS alias from a `tnsnames.ora` file. Use as an alternative to `host`/`port` or `connectionString`. |
| tnsAdmin | string | false | Path to the directory containing the `tnsnames.ora` file. This overrides the `TNS_ADMIN` environment variable if it is set. |
| **field** | **type** | **required** | **description** |
|------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "oracle". |
| user | string | true | Name of the Oracle user to connect as (e.g. "my-oracle-user"). |
| password | string | true | Password of the Oracle user (e.g. "my-password"). |
| host | string | false | IP address or hostname to connect to (e.g. "127.0.0.1"). Required if not using `connectionString` or `tnsAlias`. |
| port | integer | false | Port to connect to (e.g. "1521"). Required if not using `connectionString` or `tnsAlias`. |
| serviceName | string | false | The Oracle service name of the database to connect to. Required if not using `connectionString` or `tnsAlias`. |
| connectionString | string | false | A direct connection string (e.g. "hostname:port/servicename"). Use as an alternative to `host`, `port`, and `serviceName`. |
| tnsAlias | string | false | A TNS alias from a `tnsnames.ora` file. Use as an alternative to `host`/`port` or `connectionString`. |
| tnsAdmin | string | false | Path to the directory containing the `tnsnames.ora` file. This overrides the `TNS_ADMIN` environment variable if it is set. |
| useOCI | bool | false | If true, uses the OCI-based driver (godror) which supports Oracle Wallet/Kerberos but requires the Oracle Instant Client libraries to be installed. Defaults to false (pure Go driver). |

View File

@@ -68,6 +68,9 @@ reputation for reliability, feature robustness, and performance.
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
List cardinality of columns in a table in a PostgreSQL database.
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
List statistics of a table in a PostgreSQL database.
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
List publication tables in a PostgreSQL database.
@@ -82,7 +85,10 @@ reputation for reliability, feature robustness, and performance.
server.
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
Lists all the user-created roles in PostgreSQL database..
Lists all the user-created roles in PostgreSQL database.
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
Lists all the stored procedure in PostgreSQL database.
### Pre-built Configurations

View File

@@ -0,0 +1,7 @@
---
title: "Gemini Data Analytics"
type: docs
weight: 1
description: >
Tools for Gemini Data Analytics.
---

View File

@@ -0,0 +1,92 @@
---
title: "Gemini Data Analytics QueryData"
type: docs
weight: 1
description: >
A tool to convert natural language queries into SQL statements using the Gemini Data Analytics QueryData API.
aliases:
- /resources/tools/cloud-gemini-data-analytics-query
---
## About
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
## Example
```yaml
tools:
my-gda-query-tool:
kind: cloud-gemini-data-analytics-query
source: my-gda-source
description: "Use this tool to send natural language queries to the Gemini Data Analytics API and receive SQL, natural language answers, and explanations."
location: ${your_database_location}
context:
datasourceReferences:
cloudSqlReference:
databaseReference:
projectId: "${your_project_id}"
region: "${your_database_instance_region}"
instanceId: "${your_database_instance_id}"
databaseId: "${your_database_name}"
engine: "POSTGRESQL"
agentContextReference:
contextSetId: "${your_context_set_id}" # E.g. projects/${project_id}/locations/${context_set_location}/contextSets/${context_set_id}
generationOptions:
generateQueryResult: true
generateNaturalLanguageAnswer: true
generateExplanation: true
generateDisambiguationQuestion: true
```
### Usage Flow
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
**Example Input Prompt:**
```text
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
```
**Example API Response:**
```json
{
"generatedQuery": "SELECT COUNT(T1.account_id) FROM account AS T1 INNER JOIN loan AS T2 ON T1.account_id = T2.account_id INNER JOIN district AS T3 ON T1.district_id = T3.district_id WHERE T3.A3 = 'Prague'",
"intentExplanation": "I found a template that matches the user's question. The template asks about the number of accounts who have region in a given city and are eligible for loans. The question asks about the number of accounts who have region in Prague and are eligible for loans. The template's parameterized SQL is 'SELECT COUNT(T1.account_id) FROM account AS T1 INNER JOIN loan AS T2 ON T1.account_id = T2.account_id INNER JOIN district AS T3 ON T1.district_id = T3.district_id WHERE T3.A3 = ?'. I will replace the named parameter '?' with 'Prague'.",
"naturalLanguageAnswer": "There are 84 accounts from the Prague region that are eligible for loans.",
"queryResult": {
"columns": [
{
"type": "INT64"
}
],
"rows": [
{
"values": [
{
"value": "84"
}
]
}
],
"totalRowCount": "1"
}
}
```
## Reference
| **field** | **type** | **required** | **description** |
| ----------------- | :------: | :----------: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| kind | string | true | Must be "cloud-gemini-data-analytics-query". |
| source | string | true | The name of the `cloud-gemini-data-analytics` source to use. |
| description | string | true | A description of the tool's purpose. |
| location | string | true | The Google Cloud location of the target database resource (e.g., "us-central1"). This is used to construct the parent resource name in the API call. |
| context | object | true | The context for the query, including datasource references. See [QueryDataContext](https://github.com/googleapis/googleapis/blob/b32495a713a68dd0dff90cf0b24021debfca048a/google/cloud/geminidataanalytics/v1beta/data_chat_service.proto#L156) for details. |
| generationOptions | object | false | Options for generating the response. See [GenerationOptions](https://github.com/googleapis/googleapis/blob/b32495a713a68dd0dff90cf0b24021debfca048a/google/cloud/geminidataanalytics/v1beta/data_chat_service.proto#L135) for details. |

View File

@@ -10,27 +10,18 @@ aliases:
## About
The `looker-add-dashboard-element` creates a dashboard element
in the given dashboard.
The `looker-add-dashboard-element` tool creates a new tile (element) within an existing Looker dashboard.
Tiles are added in the order this tool is called for a given `dashboard_id`.
CRITICAL ORDER OF OPERATIONS:
1. Create the dashboard using `make_dashboard`.
2. Add any dashboard-level filters using `add_dashboard_filter`.
3. Then, add elements (tiles) using this tool.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-add-dashboard-element` takes eleven parameters:
1. the `model`
2. the `explore`
3. the `fields` list
4. an optional set of `filters`
5. an optional set of `pivots`
6. an optional set of `sorts`
7. an optional `limit`
8. an optional `tz`
9. an optional `vis_config`
10. the `title`
11. the `dashboard_id`
## Example
```yaml
@@ -39,24 +30,37 @@ tools:
kind: looker-add-dashboard-element
source: looker-source
description: |
add_dashboard_element Tool
This tool creates a new tile (element) within an existing Looker dashboard.
Tiles are added in the order this tool is called for a given `dashboard_id`.
This tool creates a new tile in a Looker dashboard using
the query parameters and the vis_config specified.
CRITICAL ORDER OF OPERATIONS:
1. Create the dashboard using `make_dashboard`.
2. Add any dashboard-level filters using `add_dashboard_filter`.
3. Then, add elements (tiles) using this tool.
Most of the parameters are the same as the query_url
tool. In addition, there is a title that may be provided.
The dashboard_id must be specified. That is obtained
from calling make_dashboard.
Required Parameters:
- dashboard_id: The ID of the target dashboard, obtained from `make_dashboard`.
- model_name, explore_name, fields: These query parameters are inherited
from the `query` tool and are required to define the data for the tile.
This tool can be called many times for one dashboard_id
and the resulting tiles will be added in order.
Optional Parameters:
- title: An optional title for the dashboard tile.
- pivots, filters, sorts, limit, query_timezone: These query parameters are
inherited from the `query` tool and can be used to customize the tile's query.
- vis_config: A JSON object defining the visualization settings for this tile.
The structure and options are the same as for the `query_url` tool's `vis_config`.
Connecting to Dashboard Filters:
A dashboard element can be connected to one or more dashboard filters (created with
`add_dashboard_filter`). To do this, specify the `name` of the dashboard filter
and the `field` from the element's query that the filter should apply to.
The format for specifying the field is `view_name.field_name`.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "looker-add-dashboard-element" |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
|:------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "looker-add-dashboard-element". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -0,0 +1,75 @@
---
title: "looker-add-dashboard-filter"
type: docs
weight: 1
description: >
The "looker-add-dashboard-filter" tool adds a filter to a specified dashboard.
aliases:
- /resources/tools/looker-add-dashboard-filter
---
## About
The `looker-add-dashboard-filter` tool adds a filter to a specified Looker dashboard.
CRITICAL ORDER OF OPERATIONS:
1. Create a dashboard using `make_dashboard`.
2. Add all desired filters using this tool (`add_dashboard_filter`).
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
It's compatible with the following sources:
- [looker](../../sources/looker.md)
## Parameters
| **parameter** | **type** | **required** | **default** | **description** |
|:----------------------|:--------:|:-----------------:|:--------------:|-------------------------------------------------------------------------------------------------------------------------------|
| dashboard_id | string | true | none | The ID of the dashboard to add the filter to, obtained from `make_dashboard`. |
| name | string | true | none | A unique internal identifier for the filter. This name is used later in `add_dashboard_element` to bind tiles to this filter. |
| title | string | true | none | The label displayed to users in the Looker UI. |
| filter_type | string | true | `field_filter` | The filter type of filter. Can be `date_filter`, `number_filter`, `string_filter`, or `field_filter`. |
| default_value | string | false | none | The initial value for the filter. |
| model | string | if `field_filter` | none | The name of the LookML model, obtained from `get_models`. |
| explore | string | if `field_filter` | none | The name of the explore within the model, obtained from `get_explores`. |
| dimension | string | if `field_filter` | none | The name of the field (e.g., `view_name.field_name`) to base the filter on, obtained from `get_dimensions`. |
| allow_multiple_values | boolean | false | true | The Dashboard Filter should allow multiple values |
| required | boolean | false | false | The Dashboard Filter is required to run dashboard |
## Example
```yaml
tools:
add_dashboard_filter:
kind: looker-add-dashboard-filter
source: looker-source
description: |
This tool adds a filter to a Looker dashboard.
CRITICAL ORDER OF OPERATIONS:
1. Create a dashboard using `make_dashboard`.
2. Add all desired filters using this tool (`add_dashboard_filter`).
3. Finally, add dashboard elements (tiles) using `add_dashboard_element`.
Parameters:
- dashboard_id (required): The ID from `make_dashboard`.
- name (required): A unique internal identifier for the filter. You will use this `name` later in `add_dashboard_element` to bind tiles to this filter.
- title (required): The label displayed to users in the UI.
- filter_type (required): One of `date_filter`, `number_filter`, `string_filter`, or `field_filter`.
- default_value (optional): The initial value for the filter.
Field Filters (`flter_type: field_filter`):
If creating a field filter, you must also provide:
- model
- explore
- dimension
The filter will inherit suggestions and type information from this LookML field.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "looker-add-dashboard-filter". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -34,9 +34,10 @@ tools:
kind: looker-conversational-analytics
source: looker-source
description: |
Use this tool to perform data analysis, get insights,
or answer complex questions about the contents of specific
Looker explores.
Use this tool to ask questions about your data using the Looker Conversational
Analytics API. You must provide a natural language query and a list of
1 to 5 model and explore combinations (e.g. [{'model': 'the_model', 'explore': 'the_explore'}]).
Use the 'get_models' and 'get_explores' tools to discover available models and explores.
```
## Reference

View File

@@ -27,13 +27,18 @@ tools:
kind: looker-create-project-file
source: looker-source
description: |
create_project_file Tool
This tool creates a new LookML file within a specified project, populating
it with the provided content.
Given a project_id and a file path within the project, as well as the content
of a LookML file, this tool will create a new file within the project.
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
Parameters:
- project_id (required): The unique ID of the LookML project.
- file_path (required): The desired path and filename for the new file within the project.
- content (required): The full LookML content to write into the new file.
Output:
A confirmation message upon successful file creation.
```
## Reference

View File

@@ -26,13 +26,17 @@ tools:
kind: looker-delete-project-file
source: looker-source
description: |
delete_project_file Tool
This tool permanently deletes a specified LookML file from within a project.
Use with caution, as this action cannot be undone through the API.
Given a project_id and a file path within the project, this tool will delete
the file from the project.
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
Parameters:
- project_id (required): The unique ID of the LookML project.
- file_path (required): The exact path to the LookML file to delete within the project.
Output:
A confirmation message upon successful file deletion.
```
## Reference

View File

@@ -27,10 +27,13 @@ tools:
kind: looker-dev-mode
source: looker-source
description: |
dev_mode Tool
This tool allows toggling the Looker IDE session between Development Mode and Production Mode.
Development Mode enables making and testing changes to LookML projects.
Passing true to this tool switches the session to dev mode. Passing false to this tool switches the
session to production mode.
Parameters:
- enable (required): A boolean value.
- `true`: Switches the current session to Development Mode.
- `false`: Switches the current session to Production Mode.
```
## Reference

View File

@@ -36,11 +36,17 @@ tools:
kind: looker-generate-embed-url
source: looker-source
description: |
generate_embed_url Tool
This tool generates a signed, private embed URL for specific Looker content,
allowing users to access it directly.
This tool generates an embeddable URL for Looker content.
You need to provide the type of content (e.g., 'dashboards', 'looks', 'query-visualization')
and the ID of the content.
Parameters:
- type (required): The type of content to embed. Common values include:
- `dashboards`
- `looks`
- `explore`
- id (required): The unique identifier for the content.
- For dashboards and looks, use the numeric ID (e.g., "123").
- For explores, use the format "model_name/explore_name".
```
## Reference

View File

@@ -26,10 +26,16 @@ tools:
kind: looker-get-connection-databases
source: looker-source
description: |
get_connection_databases Tool
This tool retrieves a list of databases available through a specified Looker connection.
This is only applicable for connections that support multiple databases.
Use `get_connections` to check if a connection supports multiple databases.
This tool will list the databases available from a connection if the connection
supports multiple databases.
Parameters:
- connection_name (required): The name of the database connection, obtained from `get_connections`.
Output:
A JSON array of strings, where each string is the name of an available database.
If the connection does not support multiple databases, an empty list or an error will be returned.
```
## Reference

View File

@@ -26,10 +26,16 @@ tools:
kind: looker-get-connection-schemas
source: looker-source
description: |
get_connection_schemas Tool
This tool retrieves a list of database schemas available through a specified
Looker connection.
This tool will list the schemas available from a connection, filtered by
an optional database name.
Parameters:
- connection_name (required): The name of the database connection, obtained from `get_connections`.
- database (optional): An optional database name to filter the schemas.
Only applicable for connections that support multiple databases.
Output:
A JSON array of strings, where each string is the name of an available schema.
```
## Reference

View File

@@ -26,11 +26,20 @@ tools:
kind: looker-get-connection-table-columns
source: looker-source
description: |
get_connection_table_columns Tool
This tool retrieves a list of columns for one or more specified tables within a
given database schema and connection.
This tool will list the columns available from a connection, for all the tables
given in a comma separated list of table names, filtered by the
schema name and optional database name.
Parameters:
- connection_name (required): The name of the database connection, obtained from `get_connections`.
- schema (required): The name of the schema where the tables reside, obtained from `get_connection_schemas`.
- tables (required): A comma-separated string of table names for which to retrieve columns
(e.g., "users,orders,products"), obtained from `get_connection_tables`.
- database (optional): The name of the database to filter by. Only applicable for connections
that support multiple databases (check with `get_connections`).
Output:
A JSON array of objects, where each object represents a column and contains details
such as `table_name`, `column_name`, `data_type`, and `is_nullable`.
```
## Reference

View File

@@ -27,10 +27,17 @@ tools:
kind: looker-get-connection-tables
source: looker-source
description: |
get_connection_tables Tool
This tool retrieves a list of tables available within a specified database schema
through a Looker connection.
This tool will list the tables available from a connection, filtered by the
schema name and optional database name.
Parameters:
- connection_name (required): The name of the database connection, obtained from `get_connections`.
- schema (required): The name of the schema to list tables from, obtained from `get_connection_schemas`.
- database (optional): The name of the database to filter by. Only applicable for connections
that support multiple databases (check with `get_connections`).
Output:
A JSON array of strings, where each string is the name of an available table.
```
## Reference

View File

@@ -26,11 +26,18 @@ tools:
kind: looker-get-connections
source: looker-source
description: |
get_connections Tool
This tool retrieves a list of all database connections configured in the Looker system.
This tool will list all the connections available in the Looker system, as
well as the dialect name, the default schema, the database if applicable,
and whether the connection supports multiple databases.
Parameters:
This tool takes no parameters.
Output:
A JSON array of objects, each representing a database connection and including details such as:
- `name`: The connection's unique identifier.
- `dialect`: The database dialect (e.g., "mysql", "postgresql", "bigquery").
- `default_schema`: The default schema for the connection.
- `database`: The associated database name (if applicable).
- `supports_multiple_databases`: A boolean indicating if the connection can access multiple databases.
```
## Reference

View File

@@ -29,25 +29,29 @@ default to 100 and 0.
```yaml
tools:
get_dashboards:
kind: looker-get-dashboards
source: looker-source
description: |
get_dashboards Tool
This tool is used to search for saved dashboards in a Looker instance.
String search params use case-insensitive matching. String search
params can contain % and '_' as SQL LIKE pattern match wildcard
expressions. example="dan%" will match "danger" and "Danzig" but
not "David" example="D_m%" will match "Damage" and "dump".
Most search params can accept "IS NULL" and "NOT NULL" as special
expressions to match or exclude (respectively) rows where the
column is null.
The limit and offset are used to paginate the results.
The result of the get_dashboards tool is a list of json objects.
get_dashboards:
kind: looker-get-dashboards
source: looker-source
description: |
This tool searches for saved dashboards in a Looker instance. It returns a list of JSON objects, each representing a dashboard.
Search Parameters:
- title (optional): Filter by dashboard title (supports wildcards).
- folder_id (optional): Filter by the ID of the folder where the dashboard is saved.
- user_id (optional): Filter by the ID of the user who created the dashboard.
- description (optional): Filter by description content (supports wildcards).
- id (optional): Filter by specific dashboard ID.
- limit (optional): Maximum number of results to return. Defaults to a system limit.
- offset (optional): Starting point for pagination.
String Search Behavior:
- Case-insensitive matching.
- Supports SQL LIKE pattern match wildcards:
- `%`: Matches any sequence of zero or more characters. (e.g., `"finan%"` matches "financial", "finance")
- `_`: Matches any single character. (e.g., `"s_les"` matches "sales")
- Special expressions for null checks:
- `"IS NULL"`: Matches dashboards where the field is null.
- `"NOT NULL"`: Excludes dashboards where the field is null.
```
## Reference

View File

@@ -28,16 +28,20 @@ tools:
kind: looker-get-dimensions
source: looker-source
description: |
The get_dimensions tool retrieves the list of dimensions defined in
an explore.
This tool retrieves a list of dimensions defined within a specific Looker explore.
Dimensions are non-aggregatable attributes or characteristics of your data
(e.g., product name, order date, customer city) that can be used for grouping,
filtering, or segmenting query results.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
Parameters:
- model_name (required): The name of the LookML model, obtained from `get_models`.
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
If this returns a suggestions field for a dimension, the contents of suggestions
can be used as filters for this field. If this returns a suggest_explore and
suggest_dimension, a query against that explore and dimension can be used to find
valid filters for this field.
Output Details:
- If a dimension includes a `suggestions` field, its contents are valid values
that can be used directly as filters for that dimension.
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
that specified explore and dimension to retrieve a list of valid filter values.
```

View File

@@ -40,10 +40,13 @@ tools:
kind: looker-get-explores
source: looker-source
description: |
The get_explores tool retrieves the list of explores defined in a LookML model
in the Looker system.
This tool retrieves a list of explores defined within a specific LookML model.
Explores represent a curated view of your data, typically joining several
tables together to allow for focused analysis on a particular subject area.
The output provides details like the explore's `name` and `label`.
It takes one parameter, the model_name looked up from get_models.
Parameters:
- model_name (required): The name of the LookML model, obtained from `get_models`.
```
## Reference

View File

@@ -24,15 +24,22 @@ It's compatible with the following sources:
```yaml
tools:
get_dimensions:
get_filters:
kind: looker-get-filters
source: looker-source
description: |
The get_filters tool retrieves the list of filters defined in
an explore.
This tool retrieves a list of "filter-only fields" defined within a specific
Looker explore. These are special fields defined in LookML specifically to
create user-facing filter controls that do not directly affect the `GROUP BY`
clause of the SQL query. They are often used in conjunction with liquid templating
to create dynamic queries.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
Note: Regular dimensions and measures can also be used as filters in a query.
This tool *only* returns fields explicitly defined as `filter:` in LookML.
Parameters:
- model_name (required): The name of the LookML model, obtained from `get_models`.
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
```
The response is a json array with the following elements:

View File

@@ -34,21 +34,26 @@ tools:
kind: looker-get-looks
source: looker-source
description: |
get_looks Tool
This tool searches for saved Looks (pre-defined queries and visualizations)
in a Looker instance. It returns a list of JSON objects, each representing a Look.
This tool is used to search for saved looks in a Looker instance.
String search params use case-insensitive matching. String search
params can contain % and '_' as SQL LIKE pattern match wildcard
expressions. example="dan%" will match "danger" and "Danzig" but
not "David" example="D_m%" will match "Damage" and "dump".
Search Parameters:
- title (optional): Filter by Look title (supports wildcards).
- folder_id (optional): Filter by the ID of the folder where the Look is saved.
- user_id (optional): Filter by the ID of the user who created the Look.
- description (optional): Filter by description content (supports wildcards).
- id (optional): Filter by specific Look ID.
- limit (optional): Maximum number of results to return. Defaults to a system limit.
- offset (optional): Starting point for pagination.
Most search params can accept "IS NULL" and "NOT NULL" as special
expressions to match or exclude (respectively) rows where the
column is null.
The limit and offset are used to paginate the results.
The result of the get_looks tool is a list of json objects.
String Search Behavior:
- Case-insensitive matching.
- Supports SQL LIKE pattern match wildcards:
- `%`: Matches any sequence of zero or more characters. (e.g., `"dan%"` matches "danger", "Danzig")
- `_`: Matches any single character. (e.g., `"D_m%"` matches "Damage", "dump")
- Special expressions for null checks:
- `"IS NULL"`: Matches Looks where the field is null.
- `"NOT NULL"`: Excludes Looks where the field is null.
```
## Reference

View File

@@ -28,16 +28,19 @@ tools:
kind: looker-get-measures
source: looker-source
description: |
The get_measures tool retrieves the list of measures defined in
an explore.
This tool retrieves a list of measures defined within a specific Looker explore.
Measures are aggregatable metrics (e.g., total sales, average price, count of users)
that are used for calculations and quantitative analysis in your queries.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
Parameters:
- model_name (required): The name of the LookML model, obtained from `get_models`.
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
If this returns a suggestions field for a measure, the contents of suggestions
can be used as filters for this field. If this returns a suggest_explore and
suggest_dimension, a query against that explore and dimension can be used to find
valid filters for this field.
Output Details:
- If a measure includes a `suggestions` field, its contents are valid values
that can be used directly as filters for that measure.
- If a `suggest_explore` and `suggest_dimension` are provided, you can query
that specified explore and dimension to retrieve a list of valid filter values.
```

View File

@@ -26,9 +26,12 @@ tools:
kind: looker-get-models
source: looker-source
description: |
The get_models tool retrieves the list of LookML models in the Looker system.
This tool retrieves a list of available LookML models in the Looker instance.
LookML models define the data structure and relationships that users can query.
The output includes details like the model's `name` and `label`, which are
essential for subsequent calls to tools like `get_explores` or `query`.
It takes no parameters.
This tool takes no parameters.
```
## Reference

View File

@@ -28,11 +28,15 @@ tools:
kind: looker-get-parameters
source: looker-source
description: |
The get_parameters tool retrieves the list of parameters defined in
an explore.
This tool retrieves a list of parameters defined within a specific Looker explore.
LookML parameters are dynamic input fields that allow users to influence query
behavior without directly modifying the underlying LookML. They are often used
with `liquid` templating to create flexible dashboards and reports, enabling
users to choose dimensions, measures, or other query components at runtime.
It takes two parameters, the model_name looked up from get_models and the
explore_name looked up from get_explores.
Parameters:
- model_name (required): The name of the LookML model, obtained from `get_models`.
- explore_name (required): The name of the explore within the model, obtained from `get_explores`.
```
The response is a json array with the following elements:

View File

@@ -26,10 +26,15 @@ tools:
kind: looker-get-project-file
source: looker-source
description: |
get_project_file Tool
This tool retrieves the raw content of a specific LookML file from within a project.
Given a project_id and a file path within the project, this tool returns
the contents of the LookML file.
Parameters:
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
- file_path (required): The path to the LookML file within the project,
typically obtained from `get_project_files`.
Output:
The raw text content of the specified LookML file.
```
## Reference

View File

@@ -26,10 +26,15 @@ tools:
kind: looker-get-project-files
source: looker-source
description: |
get_project_files Tool
This tool retrieves a list of all LookML files within a specified project,
providing details about each file.
Given a project_id this tool returns the details about
the LookML files that make up that project.
Parameters:
- project_id (required): The unique ID of the LookML project, obtained from `get_projects`.
Output:
A JSON array of objects, each representing a LookML file and containing
details such as `path`, `id`, `type`, and `git_status`.
```
## Reference

View File

@@ -26,10 +26,16 @@ tools:
kind: looker-get-projects
source: looker-source
description: |
get_projects Tool
This tool retrieves a list of all LookML projects available on the Looker instance.
It is useful for identifying projects before performing actions like retrieving
project files or making modifications.
This tool returns the project_id and project_name for
all the LookML projects on the looker instance.
Parameters:
This tool takes no parameters.
Output:
A JSON array of objects, each containing the `project_id` and `project_name`
for a LookML project.
```
## Reference

View File

@@ -42,17 +42,18 @@ tools:
kind: looker-health-analyze
source: looker-source
description: |
health-analyze Tool
This tool calculates the usage statistics for Looker projects, models, and explores.
This tool calculates the usage of projects, models and explores.
Parameters:
- action (required): The type of resource to analyze. Can be `"projects"`, `"models"`, or `"explores"`.
- project (optional): The specific project ID to analyze.
- model (optional): The specific model name to analyze. Requires `project` if used without `explore`.
- explore (optional): The specific explore name to analyze. Requires `model` if used.
- timeframe (optional): The lookback period in days for usage data. Defaults to `90` days.
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
It accepts 6 parameters:
1. `action`: can be "projects", "models", or "explores"
2. `project`: the project to analyze (optional)
3. `model`: the model to analyze (optional)
4. `explore`: the explore to analyze (optional)
5. `timeframe`: the lookback period in days, default is 90
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
Output:
The result is a JSON object containing usage metrics for the specified resources.
```
## Reference

View File

@@ -49,20 +49,22 @@ tools:
kind: looker-health-pulse
source: looker-source
description: |
health-pulse Tool
This tool performs various health checks on a Looker instance.
This tool takes the pulse of a Looker instance by taking
one of the following actions:
1. `check_db_connections`,
2. `check_dashboard_performance`,
3. `check_dashboard_errors`,
4. `check_explore_performance`,
5. `check_schedule_failures`, or
6. `check_legacy_features`
The `check_legacy_features` action is only available in Looker Core. If
it is called on a Looker Core instance, you will get a notice. That notice
should not be reported as an error.
Parameters:
- action (required): Specifies the type of health check to perform.
Choose one of the following:
- `check_db_connections`: Verifies database connectivity.
- `check_dashboard_performance`: Assesses dashboard loading performance.
- `check_dashboard_errors`: Identifies errors within dashboards.
- `check_explore_performance`: Evaluates explore query performance.
- `check_schedule_failures`: Reports on failed scheduled deliveries.
- `check_legacy_features`: Checks for the usage of legacy features.
Note on `check_legacy_features`:
This action is exclusively available in Looker Core instances. If invoked
on a non-Looker Core instance, it will return a notice rather than an error.
This notice should be considered normal behavior and not an indication of an issue.
```
## Reference

View File

@@ -39,20 +39,19 @@ tools:
kind: looker-health-vacuum
source: looker-source
description: |
health-vacuum Tool
This tool identifies and suggests LookML models or explores that can be
safely removed due to inactivity or low usage.
This tool suggests models or explores that can removed
because they are unused.
Parameters:
- action (required): The type of resource to analyze for removal candidates. Can be `"models"` or `"explores"`.
- project (optional): The specific project ID to consider.
- model (optional): The specific model name to consider. Requires `project` if used without `explore`.
- explore (optional): The specific explore name to consider. Requires `model` if used.
- timeframe (optional): The lookback period in days to assess usage. Defaults to `90` days.
- min_queries (optional): The minimum number of queries for a resource to be considered active. Defaults to `1`.
It accepts 6 parameters:
1. `action`: can be "models" or "explores"
2. `project`: the project to vacuum (optional)
3. `model`: the model to vacuum (optional)
4. `explore`: the explore to vacuum (optional)
5. `timeframe`: the lookback period in days, default is 90
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
The result is a list of objects that are candidates for deletion.
Output:
A JSON array of objects, each representing a model or explore that is a candidate for deletion due to low usage.
```
| **field** | **type** | **required** | **description** |

View File

@@ -18,9 +18,11 @@ It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-make-dashboard` takes one parameter:
`looker-make-dashboard` takes three parameters:
1. the `title`
2. the `description`
3. an optional `folder` id. If not provided, the user's default folder will be used.
## Example
@@ -30,18 +32,19 @@ tools:
kind: looker-make-dashboard
source: looker-source
description: |
make_dashboard Tool
This tool creates a new, empty dashboard in Looker. Dashboards are stored
in the user's personal folder, and the dashboard name must be unique.
After creation, use `add_dashboard_filter` to add filters and
`add_dashboard_element` to add content tiles.
This tool creates a new dashboard in Looker. The dashboard is
initially empty and the add_dashboard_element tool is used to
add content to the dashboard.
Required Parameters:
- title (required): A unique title for the new dashboard.
- description (required): A brief description of the dashboard's purpose.
The newly created dashboard will be created in the user's
personal folder in looker. The dashboard name must be unique.
The result is a json document with a link to the newly
created dashboard and the id of the dashboard. Use the id
when calling add_dashboard_element.
Output:
A JSON object containing a link (`url`) to the newly created dashboard and
its unique `id`. This `dashboard_id` is crucial for subsequent calls to
`add_dashboard_filter` and `add_dashboard_element`.
```
## Reference

View File

@@ -18,7 +18,7 @@ It's compatible with the following sources:
- [looker](../../sources/looker.md)
`looker-make-look` takes eleven parameters:
`looker-make-look` takes twelve parameters:
1. the `model`
2. the `explore`
@@ -31,6 +31,7 @@ It's compatible with the following sources:
9. an optional `vis_config`
10. the `title`
11. an optional `description`
12. an optional `folder` id. If not provided, the user's default folder will be used.
## Example
@@ -40,20 +41,24 @@ tools:
kind: looker-make-look
source: looker-source
description: |
make_look Tool
This tool creates a new Look (saved query with visualization) in Looker.
The Look will be saved in the user's personal folder, and its name must be unique.
This tool creates a new look in Looker, using the query
parameters and the vis_config specified.
Required Parameters:
- title: A unique title for the new Look.
- description: A brief description of the Look's purpose.
- model_name: The name of the LookML model (from `get_models`).
- explore_name: The name of the explore (from `get_explores`).
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
Most of the parameters are the same as the query_url
tool. In addition, there is a title and a description
that must be provided.
Optional Parameters:
- pivots, filters, sorts, limit, query_timezone: These parameters are identical
to those described for the `query` tool.
- vis_config: A JSON object defining the visualization settings for the Look.
The structure and options are the same as for the `query_url` tool's `vis_config`.
The newly created look will be created in the user's
personal folder in looker. The look name must be unique.
The result is a json document with a link to the newly
created look.
Output:
A JSON object containing a link (`url`) to the newly created Look, along with its `id` and `slug`.
```
## Reference

View File

@@ -41,38 +41,17 @@ tools:
kind: looker-query-sql
source: looker-source
description: |
Query SQL Tool
This tool generates the underlying SQL query that Looker would execute
against the database for a given set of parameters. It is useful for
understanding how Looker translates a request into SQL.
This tool is used to generate a sql query against the LookML model. The
model, explore, and fields list must be specified. Pivots,
filters and sorts are optional.
Parameters:
All parameters for this tool are identical to those of the `query` tool.
This includes `model_name`, `explore_name`, `fields` (required),
and optional parameters like `pivots`, `filters`, `sorts`, `limit`, and `query_timezone`.
The model can be found from the get_models tool. The explore
can be found from the get_explores tool passing in the model.
The fields can be found from the get_dimensions, get_measures,
get_filters, and get_parameters tools, passing in the model
and the explore.
Provide a model_id and explore_name, then a list
of fields. Optionally a list of pivots can be provided.
The pivots must also be included in the fields list.
Filters are provided as a map of {"field.id": "condition",
"field.id2": "condition2", ...}. Do not put the field.id in
quotes. Filter expressions can be found at
https://cloud.google.com/looker/docs/filter-expressions.
Sorts can be specified like [ "field.id desc 0" ].
An optional row limit can be added. If not provided the limit
will default to 500. "-1" can be specified for unlimited.
An optional query timezone can be added. The query_timezone to
will default to that of the workstation where this MCP server
is running, or Etc/UTC if that can't be determined. Not all
models support custom timezones.
The result of the query tool is the sql string.
Output:
The result of this tool is the raw SQL text.
```
## Reference

View File

@@ -37,17 +37,21 @@ tools:
kind: looker-query-url
source: looker-source
description: |
Query URL Tool
This tool generates a shareable URL for a Looker query, allowing users to
explore the query further within the Looker UI. It returns the generated URL,
along with the `query_id` and `slug`.
This tool is used to generate the URL of a query in Looker.
The user can then explore the query further inside Looker.
The tool also returns the query_id and slug. The parameters
are the same as the query tool with an additional vis_config
parameter.
Parameters:
All query parameters (e.g., `model_name`, `explore_name`, `fields`, `pivots`,
`filters`, `sorts`, `limit`, `query_timezone`) are the same as the `query` tool.
The vis_config is optional. If provided, it will be used to
control the default visualization for the query. Here are
some notes on making visualizations.
Additionally, it accepts an optional `vis_config` parameter:
- vis_config (optional): A JSON object that controls the default visualization
settings for the generated query.
vis_config Details:
The `vis_config` object supports a wide range of properties for various chart types.
Here are some notes on making visualizations.
### Cartesian Charts (Area, Bar, Column, Line, Scatter)

View File

@@ -41,38 +41,24 @@ tools:
kind: looker-query
source: looker-source
description: |
Query Tool
This tool runs a query against a LookML model and returns the results in JSON format.
This tool is used to run a query against the LookML model. The
model, explore, and fields list must be specified. Pivots,
filters and sorts are optional.
Required Parameters:
- model_name: The name of the LookML model (from `get_models`).
- explore_name: The name of the explore (from `get_explores`).
- fields: A list of field names (dimensions, measures, filters, or parameters) to include in the query.
The model can be found from the get_models tool. The explore
can be found from the get_explores tool passing in the model.
The fields can be found from the get_dimensions, get_measures,
get_filters, and get_parameters tools, passing in the model
and the explore.
Optional Parameters:
- pivots: A list of fields to pivot the results by. These fields must also be included in the `fields` list.
- filters: A map of filter expressions, e.g., `{"view.field": "value", "view.date": "7 days"}`.
- Do not quote field names.
- Use `not null` instead of `-NULL`.
- If a value contains a comma, enclose it in single quotes (e.g., "'New York, NY'").
- sorts: A list of fields to sort by, optionally including direction (e.g., `["view.field desc"]`).
- limit: Row limit (default 500). Use "-1" for unlimited.
- query_timezone: specific timezone for the query (e.g. `America/Los_Angeles`).
Provide a model_id and explore_name, then a list
of fields. Optionally a list of pivots can be provided.
The pivots must also be included in the fields list.
Filters are provided as a map of {"field.id": "condition",
"field.id2": "condition2", ...}. Do not put the field.id in
quotes. Filter expressions can be found at
https://cloud.google.com/looker/docs/filter-expressions.
If the condition is a string that contains a comma, use a second
set of quotes. For example, {"user.city": "'New York, NY'"}.
Sorts can be specified like [ "field.id desc 0" ].
An optional row limit can be added. If not provided the limit
will default to 500. "-1" can be specified for unlimited.
An optional query timezone can be added. The query_timezone to
will default to that of the workstation where this MCP server
is running, or Etc/UTC if that can't be determined. Not all
models support custom timezones.
Note: Use `get_dimensions`, `get_measures`, `get_filters`, and `get_parameters` to find valid fields.
The result of the query tool is JSON
```

View File

@@ -27,11 +27,15 @@ tools:
kind: looker-run-dashboard
source: looker-source
description: |
run_dashboard Tool
This tool executes the queries associated with each tile in a specified dashboard
and returns the aggregated data in a JSON structure.
This tools runs the query associated with each tile in a dashboard
and returns the data in a JSON structure. It accepts the dashboard_id
as the parameter.
Parameters:
- dashboard_id (required): The unique identifier of the dashboard to run,
typically obtained from the `get_dashboards` tool.
Output:
The data from all dashboard tiles is returned as a JSON object.
```
## Reference

View File

@@ -27,11 +27,15 @@ tools:
kind: looker-run-look
source: looker-source
description: |
run_look Tool
This tool executes the query associated with a saved Look and
returns the resulting data in a JSON structure.
This tool runs the query associated with a look and returns
the data in a JSON structure. It accepts the look_id as the
parameter.
Parameters:
- look_id (required): The unique identifier of the Look to run,
typically obtained from the `get_looks` tool.
Output:
The query results are returned as a JSON object.
```
## Reference

View File

@@ -27,13 +27,17 @@ tools:
kind: looker-update-project-file
source: looker-source
description: |
update_project_file Tool
This tool modifies the content of an existing LookML file within a specified project.
Given a project_id and a file path within the project, as well as the content
of a LookML file, this tool will modify the file within the project.
Prerequisite: The Looker session must be in Development Mode. Use `dev_mode: true` first.
This tool must be called after the dev_mode tool has changed the session to
dev mode.
Parameters:
- project_id (required): The unique ID of the LookML project.
- file_path (required): The exact path to the LookML file to modify within the project.
- content (required): The new, complete LookML content to overwrite the existing file.
Output:
A confirmation message upon successful file modification.
```
## Reference

Some files were not shown because too many files have changed in this diff Show More