Compare commits

..

2 Commits

Author SHA1 Message Date
Yuan Teoh
89c85729d1 resolve comments 2025-11-24 09:59:01 -08:00
Yuan Teoh
93f638650d docs: update configure instructions 2025-11-24 09:59:01 -08:00
519 changed files with 12348 additions and 30266 deletions

View File

@@ -305,4 +305,4 @@ substitutions:
_AR_HOSTNAME: ${_REGION}-docker.pkg.dev
_AR_REPO_NAME: toolbox-dev
_BUCKET_NAME: genai-toolbox-dev
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox
_DOCKER_URI: ${_AR_HOSTNAME}/${PROJECT_ID}/${_AR_REPO_NAME}/toolbox

View File

@@ -212,26 +212,6 @@ steps:
bigquery \
bigquery
- id: "cloud-gda"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "CLOUD_GDA_PROJECT=$PROJECT_ID"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Cloud Gemini Data Analytics" \
cloudgda \
cloudgda
- id: "dataplex"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -293,7 +273,7 @@ steps:
.ci/test_with_coverage.sh \
"Cloud Healthcare API" \
cloudhealthcare \
cloudhealthcare || echo "Integration tests failed."
cloudhealthcare
- id: "postgres"
name: golang:1
@@ -338,7 +318,7 @@ steps:
.ci/test_with_coverage.sh \
"Spanner" \
spanner \
spanner || echo "Integration tests failed." # ignore test failures
spanner
- id: "neo4j"
name: golang:1
@@ -405,7 +385,7 @@ steps:
.ci/test_with_coverage.sh \
"Cloud SQL MySQL" \
cloudsqlmysql \
mysql
mysql || echo "Integration tests failed." # ignore test failures
- id: "mysql"
name: golang:1
@@ -427,7 +407,7 @@ steps:
.ci/test_with_coverage.sh \
"MySQL" \
mysql \
mysql
mysql || echo "Integration tests failed." # ignore test failures
- id: "mssql"
name: golang:1
@@ -609,26 +589,6 @@ steps:
firestore \
firestore
- id: "mongodb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "MONGODB_DATABASE=$_DATABASE_NAME"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["MONGODB_URI", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"MongoDB" \
mongodb \
mongodb
- id: "looker"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -825,27 +785,7 @@ steps:
elasticsearch \
elasticsearch
- id: "snowflake"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
- "SNOWFLAKE_DATABASE=$_SNOWFLAKE_DATABASE"
- "SNOWFLAKE_SCHEMA=$_SNOWFLAKE_SCHEMA"
secretEnv: ["CLIENT_ID", "SNOWFLAKE_USER", "SNOWFLAKE_PASS", "SNOWFLAKE_ACCOUNT"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
.ci/test_with_coverage.sh \
"Snowflake" \
snowflake \
snowflake
- id: "cassandra"
name: golang:1
waitFor: ["compile-test-binary"]
@@ -866,8 +806,8 @@ steps:
cassandra
- id: "oracle"
name: ghcr.io/oracle/oraclelinux9-instantclient:23
waitFor: ["install-dependencies"]
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
@@ -880,25 +820,10 @@ steps:
args:
- -c
- |
# Install the C compiler and Oracle SDK headers needed for cgo
dnf install -y gcc oracle-instantclient-devel
# Install Go
curl -L -o go.tar.gz "https://go.dev/dl/go1.25.1.linux-amd64.tar.gz"
tar -C /usr/local -xzf go.tar.gz
export PATH="/usr/local/go/bin:$$PATH"
go test -v ./internal/sources/oracle/... \
-coverprofile=oracle_coverage.out \
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
# Coverage check
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
echo "Oracle total coverage: $total_coverage"
coverage_numeric=$(echo "$total_coverage" | sed 's/%//')
if awk -v cov="$coverage_numeric" 'BEGIN {exit !(cov < 20)}'; then
echo "Coverage failure: $total_coverage is below 20%."
exit 1
fi
.ci/test_with_coverage.sh \
"Oracle" \
oracle \
oracle
- id: "serverless-spark"
name: golang:1
@@ -942,26 +867,6 @@ steps:
singlestore \
singlestore
- id: "mariadb"
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
- "GOPATH=/gopath"
- "MARIADB_DATABASE=$_MARIADB_DATABASE"
- "MARIADB_PORT=$_MARIADB_PORT"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["MARIADB_USER", "MARIADB_PASS", "MARIADB_HOST", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
args:
- -c
- |
# skip coverage check as it re-uses current MySQL implementation
go test ./tests/mariadb
availableSecrets:
secretManager:
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
@@ -1058,12 +963,6 @@ availableSecrets:
env: ELASTICSEARCH_USER
- versionName: projects/$PROJECT_ID/secrets/elastic_search_pass/versions/latest
env: ELASTICSEARCH_PASS
- versionName: projects/$PROJECT_ID/secrets/snowflake_account/versions/latest
env: SNOWFLAKE_ACCOUNT
- versionName: projects/$PROJECT_ID/secrets/snowflake_user/versions/latest
env: SNOWFLAKE_USER
- versionName: projects/$PROJECT_ID/secrets/snowflake_pass/versions/latest
env: SNOWFLAKE_PASS
- versionName: projects/$PROJECT_ID/secrets/cassandra_user/versions/latest
env: CASSANDRA_USER
- versionName: projects/$PROJECT_ID/secrets/cassandra_pass/versions/latest
@@ -1080,14 +979,6 @@ availableSecrets:
env: SINGLESTORE_PASSWORD
- versionName: projects/$PROJECT_ID/secrets/singlestore_host/versions/latest
env: SINGLESTORE_HOST
- versionName: projects/$PROJECT_ID/secrets/mariadb_user/versions/latest
env: MARIADB_USER
- versionName: projects/$PROJECT_ID/secrets/mariadb_pass/versions/latest
env: MARIADB_PASS
- versionName: projects/$PROJECT_ID/secrets/mariadb_host/versions/latest
env: MARIADB_HOST
- versionName: projects/$PROJECT_ID/secrets/mongodb_uri/versions/latest
env: MONGODB_URI
options:
logging: CLOUD_LOGGING_ONLY
@@ -1148,7 +1039,3 @@ substitutions:
_SINGLESTORE_PORT: "3308"
_SINGLESTORE_DATABASE: "singlestore"
_SINGLESTORE_USER: "root"
_MARIADB_PORT: "3307"
_MARIADB_DATABASE: test_database
_SNOWFLAKE_DATABASE: "test"
_SNOWFLAKE_SCHEMA: "PUBLIC"

View File

@@ -13,7 +13,7 @@
# limitations under the License.
steps:
- name: 'node:22'
- name: 'node:20'
id: 'js-quickstart-test'
entrypoint: 'bash'
args:
@@ -44,4 +44,4 @@ availableSecrets:
timeout: 1000s
options:
logging: CLOUD_LOGGING_ONLY
logging: CLOUD_LOGGING_ONLY

View File

@@ -1,18 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ignore_patterns:
- "package-lock.json"
- "go.sum"
- "requirements.txt"

View File

@@ -15,6 +15,7 @@
assign_issues:
- Yuan325
- duwenxin99
- averikitsch
- anubhav756
- twishabansal
- dishaprakash
@@ -61,7 +62,8 @@ assign_issues_by:
- 'googleapis/toolbox-spanner'
assign_prs:
- Yuan325
- duwenxin99
- duwenxin99
- averikitsch
assign_prs_by:
- labels:
- 'product: alloydb'

View File

@@ -24,23 +24,5 @@
],
pinDigests: true,
},
{
groupName: 'Go',
matchManagers: [
'gomod',
],
},
{
groupName: 'Node',
matchManagers: [
'npm',
],
},
{
groupName: 'Pip',
matchManagers: [
'pip_requirements',
],
},
],
}

View File

@@ -1,27 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Trigger presubmit tests for trusted contributors
# https://github.com/googleapis/repo-automation-bots/tree/main/packages/trusted-contribution
# Install: https://github.com/apps/trusted-contributions-gcf
trustedContributors:
- "dependabot[bot]"
- "renovate-bot"
annotations:
# Trigger Cloud Build tests
- type: comment
text: "/gcbrun"
- type: label
text: "tests: run"

View File

@@ -40,7 +40,7 @@ jobs:
group: docs-deployment
cancel-in-progress: false
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
@@ -51,12 +51,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -30,14 +30,14 @@ jobs:
steps:
- name: Checkout main branch (for latest templates and theme)
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
ref: 'main'
submodules: 'recursive'
fetch-depth: 0
- name: Checkout old content from tag into a temporary directory
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
ref: ${{ github.event.inputs.version_tag }}
path: 'old_version_source' # Checkout into a temp subdir
@@ -57,7 +57,7 @@ jobs:
with:
hugo-version: "0.145.0"
extended: true
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
with:
node-version: "22"

View File

@@ -30,7 +30,7 @@ jobs:
cancel-in-progress: false
steps:
- name: Checkout Code at Tag
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
ref: ${{ github.event.release.tag_name }}
@@ -44,7 +44,7 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
with:
node-version: "22"

View File

@@ -34,7 +34,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
ref: versioned-gh-pages

View File

@@ -49,7 +49,7 @@ jobs:
group: "preview-${{ github.event.number }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
# Checkout the PR's HEAD commit (supports forks).
ref: ${{ github.event.pull_request.head.sha }}
@@ -62,12 +62,12 @@ jobs:
extended: true
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6
with:
node-version: "22"
- name: Cache dependencies
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}

View File

@@ -1,59 +0,0 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Link Checker
on:
pull_request:
jobs:
link-check:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
- name: Restore lychee cache
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
restore-keys: cache-lychee-
- name: Link Checker
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
with:
args: >
--verbose
--no-progress
--cache
--max-cache-age 1d
README.md
docs/
output: /tmp/foo.txt
fail: true
jobSummary: true
debug: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# This step only runs if the 'lychee_check' step fails, ensuring the
# context note only appears when the developer needs to troubleshoot.
- name: Display Link Context Note on Failure
if: ${{ failure() }}
run: |
echo "## Link Resolution Note" >> $GITHUB_STEP_SUMMARY
echo "Local links and directory changes work differently on GitHub than on the docsite." >> $GITHUB_STEP_SUMMARY
echo "You must ensure fixes pass the **GitHub check** and also work with **\`hugo server\`**." >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY

View File

@@ -51,11 +51,11 @@ jobs:
console.log('Failed to remove label. Another job may have already removed it!');
}
- name: Setup Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
go-version: "1.25"
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
@@ -66,7 +66,7 @@ jobs:
run: |
go mod tidy && git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0
uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
with:
version: latest
args: --timeout 10m

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Wait for image in Artifact Registry
shell: bash

View File

@@ -29,7 +29,7 @@ jobs:
issues: 'write'
pull-requests: 'write'
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- uses: micnncim/action-label-syncer@3abd5ab72fda571e69fffd97bd4e0033dd5f495c # v1.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -57,12 +57,12 @@ jobs:
}
- name: Setup Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
go-version: "1.24"
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }}

2
.gitignore vendored
View File

@@ -20,4 +20,4 @@ node_modules
# executable
genai-toolbox
toolbox
toolbox

View File

@@ -1,9 +1 @@
@import 'td/code-dark';
// Make tabs scrollable horizontally instead of wrapping
.nav-tabs {
flex-wrap: nowrap;
white-space: nowrap;
overflow-x: auto;
overflow-y: hidden;
}
@import 'td/code-dark';

View File

@@ -51,22 +51,6 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
# Add a new version block here before every release
# The order of versions in this file is mirrored into the dropdown
[[params.versions]]
version = "v0.25.0"
url = "https://googleapis.github.io/genai-toolbox/v0.25.0/"
[[params.versions]]
version = "v0.24.0"
url = "https://googleapis.github.io/genai-toolbox/v0.24.0/"
[[params.versions]]
version = "v0.23.0"
url = "https://googleapis.github.io/genai-toolbox/v0.23.0/"
[[params.versions]]
version = "v0.22.0"
url = "https://googleapis.github.io/genai-toolbox/v0.22.0/"
[[params.versions]]
version = "v0.21.0"
url = "https://googleapis.github.io/genai-toolbox/v0.21.0/"

View File

@@ -1,45 +0,0 @@
# Ignore documentation placeholders and generic example domains
^https?://([a-zA-Z0-9-]+\.)?example\.com(:\d+)?(/.*)?$
^http://example\.net
# Shields.io badges often trigger rate limits or intermittent 503s
^https://img\.shields\.io/.*
# PDF files are ignored as lychee cannot reliably parse internal PDF links
\.pdf$
# Standard mailto: protocol is not a web URL
^mailto:
# Ignore local development endpoints that won't resolve in CI/CD environments
^https?://(127\.0\.0\.1|localhost)(:\d+)?(/.*)?$
# Placeholder for Google Cloud Run service discovery
https://cloud-run-url.app/
# DGraph Cloud and private instance endpoints
https://xxx.cloud.dgraph.io/
https://cloud.dgraph.io/login
https://dgraph.io/docs
# MySQL Community downloads and main site (often protected by bot mitigation)
https://dev.mysql.com/downloads/installer/
https://www.mysql.com/
# Claude desktop download link
https://claude.ai/download
# Google Cloud Run product page
https://cloud.google.com/run
# These specific deep links are known to cause redirect loops or 403s in automated scrapers
https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
https://www.npmjs.com/package/@toolbox-sdk/core
https://www.npmjs.com/package/@toolbox-sdk/adk
# Ignore social media and blog profiles to reduce external request overhead
https://medium.com/@mcp_toolbox

View File

@@ -1,109 +1,5 @@
# Changelog
## [0.25.0](https://github.com/googleapis/genai-toolbox/compare/v0.24.0...v0.25.0) (2026-01-08)
### Features
* Add `embeddingModel` support ([#2121](https://github.com/googleapis/genai-toolbox/issues/2121)) ([9c62f31](https://github.com/googleapis/genai-toolbox/commit/9c62f313ff5edf0a3b5b8a3e996eba078fba4095))
* Add `allowed-hosts` flag ([#2254](https://github.com/googleapis/genai-toolbox/issues/2254)) ([17b41f6](https://github.com/googleapis/genai-toolbox/commit/17b41f64531b8fe417c28ada45d1992ba430dc1b))
* Add parameter default value to manifest ([#2264](https://github.com/googleapis/genai-toolbox/issues/2264)) ([9d1feca](https://github.com/googleapis/genai-toolbox/commit/9d1feca10810fa42cb4c94a409252f1bd373ee36))
* **snowflake:** Add Snowflake Source and Tools ([#858](https://github.com/googleapis/genai-toolbox/issues/858)) ([b706b5b](https://github.com/googleapis/genai-toolbox/commit/b706b5bc685aeda277f277868bae77d38d5fd7b6))
* **prebuilt/cloud-sql-mysql:** Update CSQL MySQL prebuilt tools to use IAM ([#2202](https://github.com/googleapis/genai-toolbox/issues/2202)) ([731a32e](https://github.com/googleapis/genai-toolbox/commit/731a32e5360b4d6862d81fcb27d7127c655679a8))
* **sources/bigquery:** Make credentials scope configurable ([#2210](https://github.com/googleapis/genai-toolbox/issues/2210)) ([a450600](https://github.com/googleapis/genai-toolbox/commit/a4506009b93771b77fb05ae97044f914967e67ed))
* **sources/trino:** Add ssl verification options and fix docs example ([#2155](https://github.com/googleapis/genai-toolbox/issues/2155)) ([4a4cf1e](https://github.com/googleapis/genai-toolbox/commit/4a4cf1e712b671853678dba99c4dc49dd4fc16a2))
* **tools/looker:** Add ability to set destination folder with `make_look` and `make_dashboard`. ([#2245](https://github.com/googleapis/genai-toolbox/issues/2245)) ([eb79339](https://github.com/googleapis/genai-toolbox/commit/eb793398cd1cc4006d9808ccda5dc7aea5e92bd5))
* **tools/postgressql:** Add tool to list store procedure ([#2156](https://github.com/googleapis/genai-toolbox/issues/2156)) ([cf0fc51](https://github.com/googleapis/genai-toolbox/commit/cf0fc515b57d9b84770076f3c0c5597c4597ef62))
* **tools/postgressql:** Add Parameter `embeddedBy` config support ([#2151](https://github.com/googleapis/genai-toolbox/issues/2151)) ([17b70cc](https://github.com/googleapis/genai-toolbox/commit/17b70ccaa754d15bcc33a1a3ecb7e652520fa600))
### Bug Fixes
* **server:** Add `embeddingModel` config initialization ([#2281](https://github.com/googleapis/genai-toolbox/issues/2281)) ([a779975](https://github.com/googleapis/genai-toolbox/commit/a7799757c9345f99b6d2717841fbf792d364e1a2))
* **sources/cloudgda:** Add import for cloudgda source ([#2217](https://github.com/googleapis/genai-toolbox/issues/2217)) ([7daa411](https://github.com/googleapis/genai-toolbox/commit/7daa4111f4ebfb0a35319fd67a8f7b9f0f99efcf))
* **tools/alloydb-wait-for-operation:** Fix connection message generation ([#2228](https://github.com/googleapis/genai-toolbox/issues/2228)) ([7053fbb](https://github.com/googleapis/genai-toolbox/commit/7053fbb1953653143d39a8510916ea97a91022a6))
* **tools/alloydbainl:** Only add psv when NL Config Param is defined ([#2265](https://github.com/googleapis/genai-toolbox/issues/2265)) ([ef8f3b0](https://github.com/googleapis/genai-toolbox/commit/ef8f3b02f2f38ce94a6ba9acf35d08b9469bef4e))
* **tools/looker:** Looker client OAuth nil pointer error ([#2231](https://github.com/googleapis/genai-toolbox/issues/2231)) ([268700b](https://github.com/googleapis/genai-toolbox/commit/268700bdbf8281de0318d60ca613ed3672990b20))
## [0.24.0](https://github.com/googleapis/genai-toolbox/compare/v0.23.0...v0.24.0) (2025-12-19)
### Features
* **sources/cloud-gemini-data-analytics:** Add the Gemini Data Analytics (GDA) integration for DB NL2SQL conversion to Toolbox ([#2181](https://github.com/googleapis/genai-toolbox/issues/2181)) ([aa270b2](https://github.com/googleapis/genai-toolbox/commit/aa270b2630da2e3d618db804ca95550445367dbc))
* **source/cloudsqlmysql:** Add support for IAM authentication in Cloud SQL MySQL source ([#2050](https://github.com/googleapis/genai-toolbox/issues/2050)) ([af3d3c5](https://github.com/googleapis/genai-toolbox/commit/af3d3c52044bea17781b89ce4ab71ff0f874ac20))
* **sources/oracle:** Add Oracle OCI and Wallet support ([#1945](https://github.com/googleapis/genai-toolbox/issues/1945)) ([8ea39ec](https://github.com/googleapis/genai-toolbox/commit/8ea39ec32fbbaa97939c626fec8c5d86040ed464))
* Support combining prebuilt and custom tool configurations ([#2188](https://github.com/googleapis/genai-toolbox/issues/2188)) ([5788605](https://github.com/googleapis/genai-toolbox/commit/57886058188aa5d2a51d5846a98bc6d8a650edd1))
* **tools/mysql-get-query-plan:** Add new `mysql-get-query-plan` tool for MySQL source ([#2123](https://github.com/googleapis/genai-toolbox/issues/2123)) ([0641da0](https://github.com/googleapis/genai-toolbox/commit/0641da0353857317113b2169e547ca69603ddfde))
### Bug Fixes
* **spanner:** Move list graphs validation to runtime ([#2154](https://github.com/googleapis/genai-toolbox/issues/2154)) ([914b3ee](https://github.com/googleapis/genai-toolbox/commit/914b3eefda40a650efe552d245369e007277dab5))
## [0.23.0](https://github.com/googleapis/genai-toolbox/compare/v0.22.0...v0.23.0) (2025-12-11)
### ⚠ BREAKING CHANGES
* **serverless-spark:** add URLs to create batch tool outputs
* **serverless-spark:** add URLs to list_batches output
* **serverless-spark:** add Cloud Console and Logging URLs to get_batch
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033))
### Features
* **tools/postgres:** Add list-table-stats-tool to list table statistics. ([#2055](https://github.com/googleapis/genai-toolbox/issues/2055)) ([78b02f0](https://github.com/googleapis/genai-toolbox/commit/78b02f08c3cc3062943bb2f91cf60d5149c8d28d))
* **looker/tools:** Enhance dashboard creation with dashboard filters ([#2133](https://github.com/googleapis/genai-toolbox/issues/2133)) ([285aa46](https://github.com/googleapis/genai-toolbox/commit/285aa46b887d9acb2da8766e107bbf1ab75b8812))
* **serverless-spark:** Add Cloud Console and Logging URLs to get_batch ([e29c061](https://github.com/googleapis/genai-toolbox/commit/e29c0616d6b9ecda2badcaf7b69614e511ac031b))
* **serverless-spark:** Add URLs to create batch tool outputs ([c6ccf4b](https://github.com/googleapis/genai-toolbox/commit/c6ccf4bd87026484143a2d0f5527b2edab03b54a))
* **serverless-spark:** Add URLs to list_batches output ([5605eab](https://github.com/googleapis/genai-toolbox/commit/5605eabd696696ade07f52431a28ef65c0fb1f77))
* **sources/mariadb:** Add MariaDB source and MySQL tools integration ([#1908](https://github.com/googleapis/genai-toolbox/issues/1908)) ([3b40fea](https://github.com/googleapis/genai-toolbox/commit/3b40fea25edae607e02c1e8fc2b0c957fa2c8e9a))
* **tools/postgres:** Add additional filter params for existing postgres tools ([#2033](https://github.com/googleapis/genai-toolbox/issues/2033)) ([489117d](https://github.com/googleapis/genai-toolbox/commit/489117d74711ac9260e7547163ca463eb45eeaa2))
* **tools/postgres:** Add list_pg_settings, list_database_stats tools for postgres ([#2030](https://github.com/googleapis/genai-toolbox/issues/2030)) ([32367a4](https://github.com/googleapis/genai-toolbox/commit/32367a472fae9653fed7f126428eba0252978bd5))
* **tools/postgres:** Add new postgres-list-roles tool ([#2038](https://github.com/googleapis/genai-toolbox/issues/2038)) ([bea9705](https://github.com/googleapis/genai-toolbox/commit/bea97054502cfa236aa10e2ebc8ff58eb00ad035))
### Bug Fixes
* List tables tools null fix ([#2107](https://github.com/googleapis/genai-toolbox/issues/2107)) ([2b45266](https://github.com/googleapis/genai-toolbox/commit/2b452665983154041d4cd0ed7d82532e4af682eb))
* **tools/mongodb:** Removed sortPayload and sortParams ([#1238](https://github.com/googleapis/genai-toolbox/issues/1238)) ([c5a6daa](https://github.com/googleapis/genai-toolbox/commit/c5a6daa7683d2f9be654300d977692c368e55e31))
### Miscellaneous Chores
* **looker:** Upgrade to latest go sdk ([#2159](https://github.com/googleapis/genai-toolbox/issues/2159)) ([78e015d](https://github.com/googleapis/genai-toolbox/commit/78e015d7dfd9cce7e2b444ed934da17eb355bc86))
## [0.22.0](https://github.com/googleapis/genai-toolbox/compare/v0.21.0...v0.22.0) (2025-12-04)
### Features
* **tools/postgres:** Add allowed-origins flag ([#1984](https://github.com/googleapis/genai-toolbox/issues/1984)) ([862868f](https://github.com/googleapis/genai-toolbox/commit/862868f28476ea981575ce412faa7d6a03138f31))
* **tools/postgres:** Add list-query-stats and get-column-cardinality functions ([#1976](https://github.com/googleapis/genai-toolbox/issues/1976)) ([9f76026](https://github.com/googleapis/genai-toolbox/commit/9f760269253a8cc92a357e995c6993ccc4a0fb7b))
* **tools/spanner:** Add spanner list graphs to prebuiltconfigs ([#2056](https://github.com/googleapis/genai-toolbox/issues/2056)) ([0e7fbf4](https://github.com/googleapis/genai-toolbox/commit/0e7fbf465c488397aa9d8cab2e55165fff4eb53c))
* **prebuilt/cloud-sql:** Add clone instance tool for cloud sql ([#1845](https://github.com/googleapis/genai-toolbox/issues/1845)) ([5e43630](https://github.com/googleapis/genai-toolbox/commit/5e43630907aa2d7bc6818142483a33272eab060b))
* **serverless-spark:** Add create_pyspark_batch tool ([1bf0b51](https://github.com/googleapis/genai-toolbox/commit/1bf0b51f033c956790be1577bf5310d0b17e9c12))
* **serverless-spark:** Add create_spark_batch tool ([17a9792](https://github.com/googleapis/genai-toolbox/commit/17a979207dbc4fe70acd0ebda164d1a8d34c1ed3))
* Support alternate accessToken header name ([#1968](https://github.com/googleapis/genai-toolbox/issues/1968)) ([18017d6](https://github.com/googleapis/genai-toolbox/commit/18017d6545335a6fc1c472617101c35254d9a597))
* Support for annotations ([#2007](https://github.com/googleapis/genai-toolbox/issues/2007)) ([ac21335](https://github.com/googleapis/genai-toolbox/commit/ac21335f4e88ca52d954d7f8143a551a35661b94))
* **tool/mssql:** Set default host and port for MSSQL source ([#1943](https://github.com/googleapis/genai-toolbox/issues/1943)) ([7a9cc63](https://github.com/googleapis/genai-toolbox/commit/7a9cc633768d9ae9a7ff8230002da69d6a36ca86))
* **tools/cloudsqlpg:** Add CloudSQL PostgreSQL pre-check tool ([#1722](https://github.com/googleapis/genai-toolbox/issues/1722)) ([8752e05](https://github.com/googleapis/genai-toolbox/commit/8752e05ab6e98812d95673a6f1ff67e9a6ae48d2))
* **tools/postgres-list-publication-tables:** Add new postgres-list-publication-tables tool ([#1919](https://github.com/googleapis/genai-toolbox/issues/1919)) ([f4b1f0a](https://github.com/googleapis/genai-toolbox/commit/f4b1f0a68000ca2fc0325f55a1905705417c38a2))
* **tools/postgres-list-tablespaces:** Add new postgres-list-tablespaces tool ([#1934](https://github.com/googleapis/genai-toolbox/issues/1934)) ([5ad7c61](https://github.com/googleapis/genai-toolbox/commit/5ad7c6127b3e47504fc4afda0b7f3de1dff78b8b))
* **tools/spanner-list-graph:** Tool impl + docs + tests ([#1923](https://github.com/googleapis/genai-toolbox/issues/1923)) ([a0f44d3](https://github.com/googleapis/genai-toolbox/commit/a0f44d34ea3f044dd08501be616f70ddfd63ab45))
### Bug Fixes
* Add import for firebirdsql ([#2045](https://github.com/googleapis/genai-toolbox/issues/2045)) ([fb7aae9](https://github.com/googleapis/genai-toolbox/commit/fb7aae9d35b760d3471d8379642f835a0d84ec41))
* Correct FAQ to mention HTTP tools ([#2036](https://github.com/googleapis/genai-toolbox/issues/2036)) ([7b44237](https://github.com/googleapis/genai-toolbox/commit/7b44237d4a21bfbf8d3cebe4d32a15affa29584d))
* Format BigQuery numeric output as decimal strings ([#2084](https://github.com/googleapis/genai-toolbox/issues/2084)) ([155bff8](https://github.com/googleapis/genai-toolbox/commit/155bff80c1da4fae1e169e425fd82e1dc3373041))
* Set default annotations for tools in code if annotation not provided in yaml ([#2049](https://github.com/googleapis/genai-toolbox/issues/2049)) ([565460c](https://github.com/googleapis/genai-toolbox/commit/565460c4ea8953dbe80070a8e469f957c0f7a70c))
* **tools/alloydb-postgres-list-tables:** Exclude google_ml schema from list_tables ([#2046](https://github.com/googleapis/genai-toolbox/issues/2046)) ([a03984c](https://github.com/googleapis/genai-toolbox/commit/a03984cc15254c928f30085f8fa509ded6a79a0c))
* **tools/alloydbcreateuser:** Remove duplication of project praram ([#2028](https://github.com/googleapis/genai-toolbox/issues/2028)) ([730ac6d](https://github.com/googleapis/genai-toolbox/commit/730ac6d22805fd50b4a675b74c1865f4e7689e7c))
* **tools/mongodb:** Remove `required` tag from the `canonical` field ([#2099](https://github.com/googleapis/genai-toolbox/issues/2099)) ([744214e](https://github.com/googleapis/genai-toolbox/commit/744214e04cd12b11d166e6eb7da8ce4714904abc))
## [0.21.0](https://github.com/googleapis/genai-toolbox/compare/v0.20.0...v0.21.0) (2025-11-19)

View File

@@ -48,17 +48,6 @@ squashed when merged.
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
are inactive for more than 30 days may be closed.
### Automated Code Reviews
This repository uses **Gemini Code Assist** to provide automated code reviews on Pull Requests. While this does not replace human review, it provides immediate feedback on code quality and potential issues.
You can manually trigger the bot by commenting on your Pull Request:
* `/gemini`: Manually invokes Gemini Code Assist in comments
* `/gemini review`: Posts a code review of the changes in the pull request
* `/gemini summary`: Posts a summary of the changes in the pull request.
* `/gemini help`: Overview of the available commands
## Adding a New Database Source or Tool
Please create an
@@ -167,15 +156,15 @@ tools.
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
[tool-get]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L41
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L31
[tool-call]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L229
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L79>
[mcp-call]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L789
https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L554
[execute-sql]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L609
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L431>
[temp-param]:
https://github.com/googleapis/genai-toolbox/blob/v0.23.0/tests/tool.go#L454
<https://github.com/googleapis/genai-toolbox/blob/fd300dc606d88bf9f7bba689e2cee4e3565537dd/tests/tool.go#L297>
[temp-param-doc]:
https://googleapis.github.io/genai-toolbox/resources/tools/#template-parameters
@@ -244,4 +233,4 @@ resources.
* **PR Description:** PR description should **always** be included. It should
include a concise description of the changes, it's impact, along with a
summary of the solution. If the PR is related to a specific issue, the issue
number should be mentioned in the PR description (e.g. `Fixes #1`).
number should be mentioned in the PR description (e.g. `Fixes #1`).

View File

@@ -109,7 +109,7 @@ golangci-lint run --fix
Execute unit tests locally:
```bash
go test -race -v ./cmd/... ./internal/...
go test -race -v ./...
```
### Integration Tests
@@ -207,30 +207,6 @@ variables for each source.
* SQLite - setup in the integration test, where we create a temporary database
file
### Link Checking and Fixing with Lychee
We use **[lychee](https://github.com/lycheeverse/lychee-action)** for repository link checks.
* To run the checker **locally**, see the [command-line usage guide](https://github.com/lycheeverse/lychee?tab=readme-ov-file#commandline-usage).
#### Fixing Broken Links
1. **Update the Link:** Correct the broken URL or update the content where it is used.
2. **Ignore the Link:** If you can't fix the link (e.g., due to **external rate-limits** or if it's a **local-only URL**), tell Lychee to **ignore** it.
* List **regular expressions** or **direct links** in the **[.lycheeignore](https://github.com/googleapis/genai-toolbox/blob/main/.lycheeignore)** file, one entry per line.
* **Always add a comment** explaining **why** the link is being skipped to prevent link rot. **Example `.lycheeignore`:**
```text
# These are email addresses, not standard web URLs, and usually cause check failures.
^mailto:.*
```
> [!NOTE]
> To avoid build failures in GitHub Actions, follow the linking pattern demonstrated here: <br>
> **Avoid:** (Works in Hugo, breaks Link Checker): `[Read more](docs/setup)` or `[Read more](docs/setup/)` <br>
> **Reason:** The link checker cannot find a file named "setup" or a directory with that name containing an index. <br>
> **Preferred:** `[Read more](docs/setup.md)` <br>
> **Reason:** The GitHub Action finds the physical file. Hugo then uses its internal logic (or render hooks) to resolve this to the correct `/docs/setup/` web URL. <br>
### Other GitHub Checks
* License header check (`.github/header-checker-lint.yml`) - Ensures files have
@@ -304,7 +280,6 @@ There are 3 GHA workflows we use to achieve document versioning:
Request a repo owner to run the preview deployment workflow on your PR. A
preview link will be automatically added as a comment to your PR.
#### Maintainers
1. **Inspect Changes:** Review the proposed changes in the PR to ensure they are
@@ -379,23 +354,6 @@ to approve PRs for main. TeamSync is used to create this team from the MDB
Group `toolbox-contributors`. Googlers who are developing for MCP-Toolbox
but aren't part of the core team should join this group.
### Issue/PR Triage and SLO
After an issue is created, maintainers will assign the following labels:
* `Priority` (defaulted to P0)
* `Type` (if applicable)
* `Product` (if applicable)
All incoming issues and PRs will follow the following SLO:
| Type | Priority | Objective |
|-----------------|----------|------------------------------------------------------------------------|
| Feature Request | P0 | Must respond within **5 days** |
| Process | P0 | Must respond within **5 days** |
| Bugs | P0 | Must respond within **5 days**, and resolve/closure within **14 days** |
| Bugs | P1 | Must respond within **7 days**, and resolve/closure within **90 days** |
| Bugs | P2 | Must respond within **30 days**
_Types that are not listed in the table do not adhere to any SLO._
### Releasing
Toolbox has two types of releases: versioned and continuous. It uses Google

View File

@@ -105,21 +105,6 @@ redeploying your application.
## Getting Started
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](#configuration):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
This runs the latest version of the toolbox server with your configuration file.
> [!NOTE]
> This method should only be used for non-production use cases such as
> experimentation. For any production use-cases, please consider [Installing the
> server](#installing-the-server) and then [running it](#running-the-server).
### Installing the server
For the latest version, check the [releases page][releases] and use the
@@ -140,7 +125,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.25.0
> export VERSION=0.21.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
> chmod +x toolbox
> ```
@@ -153,7 +138,7 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.25.0
> export VERSION=0.21.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
> chmod +x toolbox
> ```
@@ -166,33 +151,21 @@ To install Toolbox as a binary:
>
> ```sh
> # see releases page for other versions
> export VERSION=0.25.0
> export VERSION=0.21.0
> curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
> chmod +x toolbox
> ```
>
> </details>
> <details>
> <summary>Windows (Command Prompt)</summary>
> <summary>Windows (AMD64)</summary>
>
> To install Toolbox as a binary on Windows (Command Prompt):
>
> ```cmd
> :: see releases page for other versions
> set VERSION=0.25.0
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
> ```
>
> </details>
> <details>
> <summary>Windows (PowerShell)</summary>
>
> To install Toolbox as a binary on Windows (PowerShell):
> To install Toolbox as a binary on Windows (AMD64):
>
> ```powershell
> # see releases page for other versions
> $VERSION = "0.25.0"
> curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
> :: see releases page for other versions
> set VERSION=0.21.0
> curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
> ```
>
> </details>
@@ -204,7 +177,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.25.0
export VERSION=0.21.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -228,7 +201,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.25.0
go install github.com/googleapis/genai-toolbox@v0.21.0
```
<!-- {x-release-please-end} -->
@@ -318,16 +291,6 @@ toolbox --tools-file "tools.yaml"
</details>
<details>
<summary>NPM</summary>
To run Toolbox directly without manually downloading the binary (requires Node.js):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
</details>
<details>
<summary>Gemini CLI</summary>
@@ -552,36 +515,6 @@ For more detailed instructions on using the Toolbox Core SDK, see the
```
</details>
<details>
<summary>ADK</summary>
1. Install [Toolbox ADK SDK][toolbox-adk-js]:
```bash
npm install @toolbox-sdk/adk
```
2. Load tools:
```javascript
import { ToolboxClient } from '@toolbox-sdk/adk';
// update the url to point to your server
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
// these tools can be passed to your application!
const tools = await client.loadToolset('toolsetName');
```
For more detailed instructions on using the Toolbox ADK SDK, see the
[project's README][toolbox-adk-js-readme].
[toolbox-adk-js]: https://www.npmjs.com/package/@toolbox-sdk/adk
[toolbox-adk-js-readme]:
https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-adk/README.md
</details>
</details>
</blockquote>
<details>
@@ -1035,12 +968,12 @@ The version will be incremented as follows:
### Post-1.0.0 Versioning
Once the project reaches a stable `1.0.0` release, the version number
**`MAJOR.MINOR.PATCH`** will follow the more common convention:
Once the project reaches a stable `1.0.0` release, the versioning will follow
the more common convention:
- **`MAJOR`**: Incremented for incompatible API changes.
- **`MINOR`**: Incremented for new, backward-compatible functionality.
- **`PATCH`**: Incremented for backward-compatible bug fixes.
- **`MAJOR.MINOR.PATCH`**: Incremented for incompatible API changes.
- **`MAJOR.MINOR.PATCH`**: Incremented for new, backward-compatible functionality.
- **`MAJOR.MINOR.PATCH`**: Incremented for backward-compatible bug fixes.
The public API that this applies to is the CLI associated with Toolbox, the
interactions with official SDKs, and the definitions in the `tools.yaml` file.

View File

@@ -33,7 +33,6 @@ import (
"github.com/fsnotify/fsnotify"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/googleapis/genai-toolbox/internal/prompts"
@@ -74,7 +73,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
@@ -91,7 +89,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
@@ -122,7 +119,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
@@ -170,7 +166,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
@@ -185,23 +180,14 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
@@ -209,16 +195,11 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
@@ -239,7 +220,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
@@ -265,7 +245,6 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
@@ -313,15 +292,15 @@ func Execute() {
type Command struct {
*cobra.Command
cfg server.ServerConfig
logger log.Logger
tools_file string
tools_files []string
tools_folder string
prebuiltConfigs []string
inStream io.Reader
outStream io.Writer
errStream io.Writer
cfg server.ServerConfig
logger log.Logger
tools_file string
tools_files []string
tools_folder string
prebuiltConfig string
inStream io.Reader
outStream io.Writer
errStream io.Writer
}
// NewCommand returns a Command object representing an invocation of the CLI.
@@ -361,12 +340,12 @@ func NewCommand(opts ...Option) *Command {
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
// deprecate tools_file
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder.")
flags.StringSliceVar(&cmd.tools_files, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder.")
flags.StringVar(&cmd.tools_folder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files.")
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
@@ -374,16 +353,13 @@ func NewCommand(opts ...Option) *Command {
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
"Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: '%s'.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
flags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
// TODO: Insecure by default. Might consider updating this for v1.0.0
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
// wrap RunE command so that we have access to original Command object
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
@@ -392,13 +368,12 @@ func NewCommand(opts ...Option) *Command {
}
type ToolsFile struct {
Sources server.SourceConfigs `yaml:"sources"`
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
Sources server.SourceConfigs `yaml:"sources"`
AuthSources server.AuthServiceConfigs `yaml:"authSources"` // Deprecated: Kept for compatibility.
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
}
// parseEnv replaces environment variables ${ENV_NAME} with their values.
@@ -447,12 +422,11 @@ func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
merged := ToolsFile{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
}
var conflicts []string
@@ -472,9 +446,6 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
if _, exists := merged.AuthSources[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("authSource '%s' (file #%d)", name, fileIndex+1))
} else {
if merged.AuthSources == nil {
merged.AuthSources = make(server.AuthServiceConfigs)
}
merged.AuthSources[name] = authSource
}
}
@@ -488,15 +459,6 @@ func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
}
}
// Check for conflicts and merge embeddingModels
for name, em := range file.EmbeddingModels {
if _, exists := merged.EmbeddingModels[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
} else {
merged.EmbeddingModels[name] = em
}
}
// Check for conflicts and merge tools
for name, tool := range file.Tools {
if _, exists := merged.Tools[name]; exists {
@@ -601,14 +563,14 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
panic(err)
}
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := validateReloadEdits(ctx, toolsFile)
if err != nil {
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
logger.WarnContext(ctx, errMsg.Error())
return err
}
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
return nil
}
@@ -616,7 +578,7 @@ func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Ser
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
func validateReloadEdits(
ctx context.Context, toolsFile ToolsFile,
) (map[string]sources.Source, map[string]auth.AuthService, map[string]embeddingmodels.EmbeddingModel, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, map[string]prompts.Prompt, map[string]prompts.Promptset, error,
) {
logger, err := util.LoggerFromContext(ctx)
if err != nil {
@@ -634,23 +596,22 @@ func validateReloadEdits(
defer span.End()
reloadedConfig := server.ServerConfig{
Version: versionString,
SourceConfigs: toolsFile.Sources,
AuthServiceConfigs: toolsFile.AuthServices,
EmbeddingModelConfigs: toolsFile.EmbeddingModels,
ToolConfigs: toolsFile.Tools,
ToolsetConfigs: toolsFile.Toolsets,
PromptConfigs: toolsFile.Prompts,
Version: versionString,
SourceConfigs: toolsFile.Sources,
AuthServiceConfigs: toolsFile.AuthServices,
ToolConfigs: toolsFile.Tools,
ToolsetConfigs: toolsFile.Toolsets,
PromptConfigs: toolsFile.Prompts,
}
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
if err != nil {
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
logger.WarnContext(ctx, errMsg.Error())
return nil, nil, nil, nil, nil, nil, nil, err
return nil, nil, nil, nil, nil, nil, err
}
return sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, nil
}
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
@@ -861,119 +822,92 @@ func run(cmd *Command) error {
}
}()
var allToolsFiles []ToolsFile
var toolsFile ToolsFile
// Load Prebuilt Configuration
if len(cmd.prebuiltConfigs) > 0 {
slices.Sort(cmd.prebuiltConfigs)
sourcesList := strings.Join(cmd.prebuiltConfigs, ", ")
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
cmd.logger.InfoContext(ctx, logMsg)
for _, configName := range cmd.prebuiltConfigs {
buf, err := prebuiltconfigs.Get(configName)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
// Update version string
cmd.cfg.Version += "+prebuilt." + configName
// Parse into ToolsFile struct
parsed, err := parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
allToolsFiles = append(allToolsFiles, parsed)
if cmd.prebuiltConfig != "" {
// Make sure --prebuilt and --tools-file/--tools-files/--tools-folder flags are mutually exclusive
if cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != "" {
errMsg := fmt.Errorf("--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
// Use prebuilt tools
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
cmd.logger.InfoContext(ctx, logMsg)
// Append prebuilt.source to Version string for the User Agent
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
// Determine if Custom Files should be loaded
// Check for explicit custom flags
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured
if useDefaultToolsFile {
cmd.tools_file = "tools.yaml"
isCustomConfigured = true
}
// Load Custom Configurations
if isCustomConfigured {
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
if (cmd.tools_file != "" && len(cmd.tools_files) > 0) ||
(cmd.tools_file != "" && cmd.tools_folder != "") ||
(len(cmd.tools_files) > 0 && cmd.tools_folder != "") {
toolsFile, err = parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
} else if len(cmd.tools_files) > 0 {
// Make sure --tools-file, --tools-files, and --tools-folder flags are mutually exclusive
if cmd.tools_file != "" || cmd.tools_folder != "" {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
var customTools ToolsFile
// Use multiple tools files
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
var err error
if len(cmd.tools_files) > 0 {
// Use tools-files
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
customTools, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
} else if cmd.tools_folder != "" {
// Use tools-folder
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
customTools, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
} else {
// Use single file (tools-file or default `tools.yaml`)
buf, readFileErr := os.ReadFile(cmd.tools_file)
if readFileErr != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, readFileErr)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
customTools, err = parseToolsFile(ctx, buf)
if err != nil {
err = fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
}
}
toolsFile, err = loadAndMergeToolsFiles(ctx, cmd.tools_files)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
allToolsFiles = append(allToolsFiles, customTools)
} else if cmd.tools_folder != "" {
// Make sure --tools-folder and other flags are mutually exclusive
if cmd.tools_file != "" || len(cmd.tools_files) > 0 {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Use tools folder
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
var err error
toolsFile, err = loadAndMergeToolsFolder(ctx, cmd.tools_folder)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
} else {
// Set default value of tools-file flag to tools.yaml
if cmd.tools_file == "" {
cmd.tools_file = "tools.yaml"
}
// Read single tool file contents
buf, err := os.ReadFile(cmd.tools_file)
if err != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
toolsFile, err = parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse tool file at %q: %w", cmd.tools_file, err)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
// Merge Everything
// This will error if custom tools collide with prebuilt tools
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
if err != nil {
cmd.logger.ErrorContext(ctx, err.Error())
return err
}
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs, cmd.cfg.PromptConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts
cmd.cfg.SourceConfigs = finalToolsFile.Sources
cmd.cfg.AuthServiceConfigs = finalToolsFile.AuthServices
cmd.cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
cmd.cfg.ToolConfigs = finalToolsFile.Tools
cmd.cfg.ToolsetConfigs = finalToolsFile.Toolsets
cmd.cfg.PromptConfigs = finalToolsFile.Prompts
authSourceConfigs := finalToolsFile.AuthSources
authSourceConfigs := toolsFile.AuthSources
if authSourceConfigs != nil {
cmd.logger.WarnContext(ctx, "`authSources` is deprecated, use `authServices` instead")
for k, v := range authSourceConfigs {
if _, exists := cmd.cfg.AuthServiceConfigs[k]; exists {
errMsg := fmt.Errorf("resource conflict detected: authSource '%s' has the same name as an existing authService. Please rename your authSource", k)
cmd.logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
cmd.cfg.AuthServiceConfigs[k] = v
}
cmd.cfg.AuthServiceConfigs = authSourceConfigs
}
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
@@ -1024,8 +958,9 @@ func run(cmd *Command) error {
}()
}
if isCustomConfigured && !cmd.cfg.DisableReload {
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
if !cmd.cfg.DisableReload {
// start watching the file(s) or folder for changes to trigger dynamic reloading
go watchChanges(ctx, watchDirs, watchedFiles, s)
}

View File

@@ -32,7 +32,6 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/auth/google"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels/gemini"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/googleapis/genai-toolbox/internal/prompts"
@@ -64,12 +63,6 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
if c.TelemetryServiceName == "" {
c.TelemetryServiceName = "toolbox"
}
if c.AllowedOrigins == nil {
c.AllowedOrigins = []string{"*"}
}
if c.AllowedHosts == nil {
c.AllowedHosts = []string{"*"}
}
return c
}
@@ -96,21 +89,6 @@ func invokeCommand(args []string) (*Command, string, error) {
return c, buf.String(), err
}
// invokeCommandWithContext executes the command with a context and returns the captured output.
func invokeCommandWithContext(ctx context.Context, args []string) (*Command, string, error) {
// Capture output using a buffer
buf := new(bytes.Buffer)
c := NewCommand(WithStreams(buf, buf))
c.SetArgs(args)
c.SilenceUsage = true
c.SilenceErrors = true
c.SetContext(ctx)
err := c.Execute()
return c, buf.String(), err
}
func TestVersion(t *testing.T) {
data, err := os.ReadFile("version.txt")
if err != nil {
@@ -216,20 +194,6 @@ func TestServerConfigFlags(t *testing.T) {
DisableReload: true,
}),
},
{
desc: "allowed origin",
args: []string{"--allowed-origins", "http://foo.com,http://bar.com"},
want: withDefaults(server.ServerConfig{
AllowedOrigins: []string{"http://foo.com", "http://bar.com"},
}),
},
{
desc: "allowed hosts",
args: []string{"--allowed-hosts", "http://foo.com,http://bar.com"},
want: withDefaults(server.ServerConfig{
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
}),
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
@@ -420,27 +384,17 @@ func TestPrebuiltFlag(t *testing.T) {
tcs := []struct {
desc string
args []string
want []string
want string
}{
{
desc: "default value",
args: []string{},
want: []string{},
want: "",
},
{
desc: "single prebuilt flag",
args: []string{"--prebuilt", "alloydb"},
want: []string{"alloydb"},
},
{
desc: "multiple prebuilt flags",
args: []string{"--prebuilt", "alloydb", "--prebuilt", "bigquery"},
want: []string{"alloydb", "bigquery"},
},
{
desc: "comma separated prebuilt flags",
args: []string{"--prebuilt", "alloydb,bigquery"},
want: []string{"alloydb", "bigquery"},
desc: "custom pre built flag",
args: []string{"--tools-file", "alloydb"},
want: "alloydb",
},
}
for _, tc := range tcs {
@@ -449,8 +403,8 @@ func TestPrebuiltFlag(t *testing.T) {
if err != nil {
t.Fatalf("unexpected error invoking command: %s", err)
}
if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" {
t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff)
if c.tools_file != tc.want {
t.Fatalf("got %v, want %v", c.cfg, tc.want)
}
})
}
@@ -1372,7 +1326,6 @@ func TestPrebuiltTools(t *testing.T) {
cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability")
serverless_spark_config, _ := prebuiltconfigs.Get("serverless-spark")
cloudhealthcare_config, _ := prebuiltconfigs.Get("cloud-healthcare")
snowflake_config, _ := prebuiltconfigs.Get("snowflake")
// Set environment variables
t.Setenv("API_KEY", "your_api_key")
@@ -1470,14 +1423,6 @@ func TestPrebuiltTools(t *testing.T) {
t.Setenv("CLOUD_HEALTHCARE_REGION", "your_gcp_region")
t.Setenv("CLOUD_HEALTHCARE_DATASET", "your_healthcare_dataset")
t.Setenv("SNOWFLAKE_ACCOUNT", "your_account")
t.Setenv("SNOWFLAKE_USER", "your_username")
t.Setenv("SNOWFLAKE_PASSWORD", "your_pass")
t.Setenv("SNOWFLAKE_DATABASE", "your_db")
t.Setenv("SNOWFLAKE_SCHEMA", "your_schema")
t.Setenv("SNOWFLAKE_WAREHOUSE", "your_wh")
t.Setenv("SNOWFLAKE_ROLE", "your_role")
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
t.Fatalf("unexpected error: %s", err)
@@ -1503,7 +1448,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck"},
},
},
},
@@ -1513,7 +1458,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
},
},
},
@@ -1523,7 +1468,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
Name: "cloud_sql_mssql_admin_tools",
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation"},
},
},
},
@@ -1533,7 +1478,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"alloydb_postgres_database_tools": tools.ToolsetConfig{
Name: "alloydb_postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
},
},
},
@@ -1563,7 +1508,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
},
},
},
@@ -1603,7 +1548,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"serverless_spark_tools": tools.ToolsetConfig{
Name: "serverless_spark_tools",
ToolNames: []string{"list_batches", "get_batch", "cancel_batch", "create_pyspark_batch", "create_spark_batch"},
ToolNames: []string{"list_batches", "get_batch", "cancel_batch"},
},
},
},
@@ -1643,7 +1588,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"looker_tools": tools.ToolsetConfig{
Name: "looker_tools",
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "add_dashboard_filter", "generate_embed_url", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "run_dashboard", "make_dashboard", "add_dashboard_element", "health_pulse", "health_analyze", "health_vacuum", "dev_mode", "get_projects", "get_project_files", "get_project_file", "create_project_file", "update_project_file", "delete_project_file", "get_connections", "get_connection_schemas", "get_connection_databases", "get_connection_tables", "get_connection_table_columns"},
},
},
},
@@ -1663,7 +1608,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"postgres_database_tools": tools.ToolsetConfig{
Name: "postgres_database_tools",
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats", "list_query_stats", "get_column_cardinality", "list_publication_tables", "list_tablespaces", "list_pg_settings", "list_database_stats", "list_roles", "list_table_stats", "list_stored_procedure"},
ToolNames: []string{"execute_sql", "list_tables", "list_active_queries", "list_available_extensions", "list_installed_extensions", "list_autovacuum_configurations", "list_memory_configurations", "list_top_bloated_tables", "list_replication_slots", "list_invalid_indexes", "get_query_plan", "list_views", "list_schemas", "database_overview", "list_triggers", "list_indexes", "list_sequences", "long_running_transactions", "list_locks", "replication_stats"},
},
},
},
@@ -1673,7 +1618,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"spanner-database-tools": tools.ToolsetConfig{
Name: "spanner-database-tools",
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables", "list_graphs"},
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
},
},
},
@@ -1775,16 +1720,6 @@ func TestPrebuiltTools(t *testing.T) {
},
},
},
{
name: "Snowflake prebuilt tool",
in: snowflake_config,
wantToolset: server.ToolsetConfigs{
"snowflake_tools": tools.ToolsetConfig{
Name: "snowflake_tools",
ToolNames: []string{"execute_sql", "list_tables"},
},
},
},
}
for _, tc := range tcs {
@@ -1810,6 +1745,11 @@ func TestMutuallyExclusiveFlags(t *testing.T) {
args []string
errString string
}{
{
desc: "--prebuilt and --tools-file",
args: []string{"--prebuilt", "alloydb", "--tools-file", "my.yaml"},
errString: "--prebuilt and --tools-file/--tools-files/--tools-folder flags cannot be used simultaneously",
},
{
desc: "--tools-file and --tools-files",
args: []string{"--tools-file", "my.yaml", "--tools-files", "a.yaml,b.yaml"},
@@ -1870,10 +1810,9 @@ func TestFileLoadingErrors(t *testing.T) {
func TestMergeToolsFiles(t *testing.T) {
file1 := ToolsFile{
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}},
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}},
}
file2 := ToolsFile{
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
@@ -1895,12 +1834,11 @@ func TestMergeToolsFiles(t *testing.T) {
name: "merge two distinct files",
files: []ToolsFile{file1, file2},
want: ToolsFile{
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}},
Prompts: server.PromptConfigs{},
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
Sources: server.SourceConfigs{"source1": httpsrc.Config{Name: "source1"}},
AuthServices: server.AuthServiceConfigs{"auth1": google.Config{Name: "auth1"}},
Tools: server.ToolConfigs{"tool1": http.Config{Name: "tool1"}, "tool2": http.Config{Name: "tool2"}},
Toolsets: server.ToolsetConfigs{"set1": tools.ToolsetConfig{Name: "set1"}, "set2": tools.ToolsetConfig{Name: "set2"}},
Prompts: server.PromptConfigs{},
},
wantErr: false,
},
@@ -1913,24 +1851,22 @@ func TestMergeToolsFiles(t *testing.T) {
name: "merge single file",
files: []ToolsFile{file1},
want: ToolsFile{
Sources: file1.Sources,
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: server.EmbeddingModelConfigs{"model1": gemini.Config{Name: "gemini-text"}},
Tools: file1.Tools,
Toolsets: file1.Toolsets,
Prompts: server.PromptConfigs{},
Sources: file1.Sources,
AuthServices: make(server.AuthServiceConfigs),
Tools: file1.Tools,
Toolsets: file1.Toolsets,
Prompts: server.PromptConfigs{},
},
},
{
name: "merge empty list",
files: []ToolsFile{},
want: ToolsFile{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: server.PromptConfigs{},
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: server.PromptConfigs{},
},
},
}
@@ -1956,234 +1892,3 @@ func TestMergeToolsFiles(t *testing.T) {
})
}
}
func TestPrebuiltAndCustomTools(t *testing.T) {
t.Setenv("SQLITE_DATABASE", "test.db")
// Setup custom tools file
customContent := `
tools:
custom_tool:
kind: http
source: my-http
method: GET
path: /
description: "A custom tool for testing"
sources:
my-http:
kind: http
baseUrl: http://example.com
`
customFile := filepath.Join(t.TempDir(), "custom.yaml")
if err := os.WriteFile(customFile, []byte(customContent), 0644); err != nil {
t.Fatal(err)
}
// Tool Conflict File
// SQLite prebuilt has a tool named 'list_tables'
toolConflictContent := `
tools:
list_tables:
kind: http
source: my-http
method: GET
path: /
description: "Conflicting tool"
sources:
my-http:
kind: http
baseUrl: http://example.com
`
toolConflictFile := filepath.Join(t.TempDir(), "tool_conflict.yaml")
if err := os.WriteFile(toolConflictFile, []byte(toolConflictContent), 0644); err != nil {
t.Fatal(err)
}
// Source Conflict File
// SQLite prebuilt has a source named 'sqlite-source'
sourceConflictContent := `
sources:
sqlite-source:
kind: http
baseUrl: http://example.com
tools:
dummy_tool:
kind: http
source: sqlite-source
method: GET
path: /
description: "Dummy"
`
sourceConflictFile := filepath.Join(t.TempDir(), "source_conflict.yaml")
if err := os.WriteFile(sourceConflictFile, []byte(sourceConflictContent), 0644); err != nil {
t.Fatal(err)
}
// Toolset Conflict File
// SQLite prebuilt has a toolset named 'sqlite_database_tools'
toolsetConflictContent := `
sources:
dummy-src:
kind: http
baseUrl: http://example.com
tools:
dummy_tool:
kind: http
source: dummy-src
method: GET
path: /
description: "Dummy"
toolsets:
sqlite_database_tools:
- dummy_tool
`
toolsetConflictFile := filepath.Join(t.TempDir(), "toolset_conflict.yaml")
if err := os.WriteFile(toolsetConflictFile, []byte(toolsetConflictContent), 0644); err != nil {
t.Fatal(err)
}
//Legacy Auth File
authContent := `
authSources:
legacy-auth:
kind: google
clientId: "test-client-id"
`
authFile := filepath.Join(t.TempDir(), "auth.yaml")
if err := os.WriteFile(authFile, []byte(authContent), 0644); err != nil {
t.Fatal(err)
}
testCases := []struct {
desc string
args []string
wantErr bool
errString string
cfgCheck func(server.ServerConfig) error
}{
{
desc: "success mixed",
args: []string{"--prebuilt", "sqlite", "--tools-file", customFile},
wantErr: false,
cfgCheck: func(cfg server.ServerConfig) error {
if _, ok := cfg.ToolConfigs["custom_tool"]; !ok {
return fmt.Errorf("custom tool not found")
}
if _, ok := cfg.ToolConfigs["list_tables"]; !ok {
return fmt.Errorf("prebuilt tool 'list_tables' not found")
}
return nil
},
},
{
desc: "sqlite called twice error",
args: []string{"--prebuilt", "sqlite", "--prebuilt", "sqlite"},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "tool conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "source conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", sourceConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "toolset conflict error",
args: []string{"--prebuilt", "sqlite", "--tools-file", toolsetConflictFile},
wantErr: true,
errString: "resource conflicts detected",
},
{
desc: "legacy auth additive",
args: []string{"--prebuilt", "sqlite", "--tools-file", authFile},
wantErr: false,
cfgCheck: func(cfg server.ServerConfig) error {
if _, ok := cfg.AuthServiceConfigs["legacy-auth"]; !ok {
return fmt.Errorf("legacy auth source not merged into auth services")
}
return nil
},
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
cmd, output, err := invokeCommandWithContext(ctx, tc.args)
if tc.wantErr {
if err == nil {
t.Fatalf("expected an error but got none")
}
if !strings.Contains(err.Error(), tc.errString) {
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
}
} else {
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
t.Fatalf("unexpected error: %v", err)
}
if !strings.Contains(output, "Server ready to serve!") {
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
}
if tc.cfgCheck != nil {
if err := tc.cfgCheck(cmd.cfg); err != nil {
t.Errorf("config check failed: %v", err)
}
}
}
})
}
}
func TestDefaultToolsFileBehavior(t *testing.T) {
t.Setenv("SQLITE_DATABASE", "test.db")
testCases := []struct {
desc string
args []string
expectRun bool
errString string
}{
{
desc: "no flags (defaults to tools.yaml)",
args: []string{},
expectRun: false,
errString: "tools.yaml", // Expect error because tools.yaml doesn't exist in test env
},
{
desc: "prebuilt only (skips tools.yaml)",
args: []string{"--prebuilt", "sqlite"},
expectRun: true,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
_, output, err := invokeCommandWithContext(ctx, tc.args)
if tc.expectRun {
if err != nil && err != context.DeadlineExceeded && err != context.Canceled {
t.Fatalf("expected server start, got error: %v", err)
}
// Verify it actually started
if !strings.Contains(output, "Server ready to serve!") {
t.Errorf("server did not start successfully (no ready message found). Output:\n%s", output)
}
} else {
if err == nil {
t.Fatalf("expected error reading default file, got nil")
}
if !strings.Contains(err.Error(), tc.errString) {
t.Errorf("expected error message to contain %q, but got %q", tc.errString, err.Error())
}
}
})
}
}

View File

@@ -1 +1 @@
0.25.0
0.21.0

View File

@@ -8,25 +8,54 @@ An editor configured to use the AlloyDB MCP server can use its AI capabilities t
* **Provision & Manage Infrastructure**: Create and manage AlloyDB clusters, instances, and users
To connect to the database to explore and query data, search the MCP store for the AlloyDB for PostgreSQL MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **AlloyDB API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* AlloyDB Admin (`roles/alloydb.admin`) (for managing infrastructure)
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
## Install & Configuration
### Configuration
In the Antigravity MCP Store, click the "Install" button.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL Admin", and click "Install".
## Usage
@@ -53,20 +82,6 @@ The AlloyDB MCP server provides the following tools:
| `list_users` | List users in a given project and location. |
| `wait_for_operation` | Poll the operations API until the operation is done. |
## Custom MCP Server Configuration
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"alloydb-admin": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "alloydb-postgres-admin", "--stdio"]
}
}
}
```
## Documentation

View File

@@ -11,29 +11,66 @@ An editor configured to use the AlloyDB MCP server can use its AI capabilities t
- **Monitor Performance** - View active queries, query plans, and other performance metrics (via observability tools)
- **Manage Extensions** - List available and installed PostgreSQL extensions
For AlloyDB infrastructure management, search the MCP store for the AlloyDB for PostgreSQL Admin MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **AlloyDB API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* AlloyDB Client (`roles/alloydb.client`) (for connecting and querying)
* Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`)
> **Note:** If your AlloyDB instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
### Configuration
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "AlloyDB for PostgreSQL", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* AlloyDB Project ID: The GCP project ID.
* AlloyDB Region: The region of your AlloyDB instance.
* AlloyDB Cluster ID: The ID of your AlloyDB cluster.
* AlloyDB Instance ID: The ID of your AlloyDB instance.
* AlloyDB Database Name: The name of the database.
* AlloyDB Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
* AlloyDB Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* AlloyDB IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
You'll now be able to see all enabled tools in the "Tools" tab.
> If your AlloyDB instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
## Usage
@@ -61,34 +98,6 @@ The AlloyDB MCP server provides the following tools:
| `list_replication_slots` | List replication slots. |
| `list_invalid_indexes` | List invalid indexes. |
## Custom MCP Server Configuration
The AlloyDB MCP server is configured using environment variables.
```bash
export ALLOYDB_POSTGRES_PROJECT="<your-gcp-project-id>"
export ALLOYDB_POSTGRES_REGION="<your-alloydb-region>"
export ALLOYDB_POSTGRES_CLUSTER="<your-alloydb-cluster-id>"
export ALLOYDB_POSTGRES_INSTANCE="<your-alloydb-instance-id>"
export ALLOYDB_POSTGRES_DATABASE="<your-database-name>"
export ALLOYDB_POSTGRES_USER="<your-database-user>" # Optional
export ALLOYDB_POSTGRES_PASSWORD="<your-database-password>" # Optional
export ALLOYDB_POSTGRES_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"alloydb-postgres": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "alloydb-postgres", "--stdio"]
}
}
}
```
## Documentation
For more information, visit the [AlloyDB for PostgreSQL documentation](https://cloud.google.com/alloydb/docs).

View File

@@ -10,25 +10,56 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
- **Seamless Workflow:** Stay within your CLI, eliminating the need to constantly switch to the GCP console for generating analytical insights.
- **Run Advanced Analytics:** Generate forecasts and perform contribution analysis using built-in advanced tools.
## Prerequisites
## Installation and Setup
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **BigQuery API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* BigQuery User (`roles/bigquery.user`)
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
You'll now be able to see all enabled tools in the "Tools" tab.
### Configuration
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "BigQuery", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* BigQuery Project ID: The GCP project ID.
* BigQuery Location: (Optional) The location of your BigQuery dataset (e.g. "US", "EU").
### Usage
@@ -60,30 +91,6 @@ The BigQuery MCP server provides the following tools:
| `analyze_contribution` | Perform contribution analysis, also called key driver analysis. |
| `search_catalog` | Search for tables based on the provided query. |
## Custom MCP Server Configuration
The BigQuery MCP server is configured using environment variables.
```bash
export BIGQUERY_PROJECT="<your-gcp-project-id>"
export BIGQUERY_LOCATION="<your-dataset-location>" # Optional
export BIGQUERY_USE_CLIENT_OAUTH="true" # Optional
export BIGQUERY_SCOPES="<comma-separated-scopes>" # Optional
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"bigquery": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "bigquery", "--stdio"]
}
}
}
```
## Documentation
For more information, visit the [BigQuery documentation](https://cloud.google.com/bigquery/docs).

View File

@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for SQL Server MCP server can use its
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for SQL Server MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
* Cloud SQL Admin (`roles/cloudsql.admin`)
## Install & Configuration
### Configuration
In the Antigravity MCP Store, click the "Install" button.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for SQL Server Admin", and click "Install".
## Usage
@@ -49,22 +78,6 @@ The Cloud SQL for SQL Server MCP server provides the following tools:
| `list_users` | List users in a given project and location. |
| `wait_for_operation` | Poll the operations API until the operation is done. |
## Custom MCP Server Configuration
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-sqlserver-admin": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mssql-admin", "--stdio"]
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).

View File

@@ -9,28 +9,65 @@ An editor configured to use the Cloud SQL for SQL Server MCP server can use its
- **Query Data** - Execute SQL queries
- **Explore Schema** - List tables and view schema details
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for SQL Server Admin MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Client (`roles/cloudsql.client`)
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
### Configuration
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for SQL Server", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Cloud SQL Project ID: The GCP project ID.
* Cloud SQL Region: The region of your Cloud SQL instance.
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
* Cloud SQL Database Name: The name of the database.
* Cloud SQL Database User: The database username.
* Cloud SQL Database Password: The password for the database user.
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
## Usage
@@ -48,42 +85,6 @@ The Cloud SQL for SQL Server MCP server provides the following tools:
| `execute_sql` | Use this tool to execute SQL. |
| `list_tables` | Lists detailed schema information for user-created tables. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export CLOUD_SQL_MSSQL_PROJECT="<your-gcp-project-id>"
export CLOUD_SQL_MSSQL_REGION="<your-cloud-sql-region>"
export CLOUD_SQL_MSSQL_INSTANCE="<your-cloud-sql-instance-id>"
export CLOUD_SQL_MSSQL_DATABASE="<your-database-name>"
export CLOUD_SQL_MSSQL_USER="<your-database-user>" # Optional
export CLOUD_SQL_MSSQL_PASSWORD="<your-database-password>" # Optional
export CLOUD_SQL_MSSQL_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-mssql": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mssql", "--stdio"],
"env": {
"CLOUD_SQL_MSSQL_PROJECT": "your-project-id",
"CLOUD_SQL_MSSQL_REGION": "your-region",
"CLOUD_SQL_MSSQL_INSTANCE": "your-instance-id",
"CLOUD_SQL_MSSQL_DATABASE": "your-database-name",
"CLOUD_SQL_MSSQL_USER": "your-username",
"CLOUD_SQL_MSSQL_PASSWORD": "your-password"
}
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for SQL Server documentation](https://cloud.google.com/sql/docs/sqlserver).

View File

@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for MySQL MCP server can use its AI ca
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for MySQL MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
* Cloud SQL Admin (`roles/cloudsql.admin`)
## Install & Configuration
### Configuration
In the Antigravity MCP Store, click the "Install" button.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for MySQL Admin", and click "Install".
## Usage
@@ -49,21 +78,6 @@ The Cloud SQL for MySQL MCP server provides the following tools:
| `list_users` | List users in a given project and location. |
| `wait_for_operation` | Poll the operations API until the operation is done. |
## Custom MCP Server Configuration
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mysql-admin", "--stdio"]
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).

View File

@@ -11,28 +11,64 @@ An editor configured to use the Cloud SQL for MySQL MCP server can use its AI ca
- **Database Maintenance** - Check for fragmentation and missing indexes
- **Monitor Performance** - View active queries
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for MySQL Admin MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Client (`roles/cloudsql.client`)
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
### Configuration
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for MySQL", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Cloud SQL Project ID: The GCP project ID.
* Cloud SQL Region: The region of your Cloud SQL instance.
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
* Cloud SQL Database Name: The name of the database.
* Cloud SQL Database User: The database username.
* Cloud SQL Database Password: The password for the database user.
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
## Usage
@@ -55,41 +91,6 @@ The Cloud SQL for MySQL MCP server provides the following tools:
| `list_tables_missing_unique_indexes` | Find tables that do not have primary or unique key constraint. |
| `list_table_fragmentation` | List table fragmentation in MySQL. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export CLOUD_SQL_MYSQL_PROJECT="<your-gcp-project-id>"
export CLOUD_SQL_MYSQL_REGION="<your-cloud-sql-region>"
export CLOUD_SQL_MYSQL_INSTANCE="<your-cloud-sql-instance-id>"
export CLOUD_SQL_MYSQL_DATABASE="<your-database-name>"
export CLOUD_SQL_MYSQL_USER="<your-database-user>" # Optional
export CLOUD_SQL_MYSQL_PASSWORD="<your-database-password>" # Optional
export CLOUD_SQL_MYSQL_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-mysql": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-mysql", "--stdio"],
"env": {
"CLOUD_SQL_MYSQL_PROJECT": "your-project-id",
"CLOUD_SQL_MYSQL_REGION": "your-region",
"CLOUD_SQL_MYSQL_INSTANCE": "your-instance-id",
"CLOUD_SQL_MYSQL_DATABASE": "your-database-name",
"CLOUD_SQL_MYSQL_USER": "your-username",
"CLOUD_SQL_MYSQL_PASSWORD": "your-password"
}
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for MySQL documentation](https://cloud.google.com/sql/docs/mysql).

View File

@@ -8,25 +8,54 @@ An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its
- **Provision & Manage Infrastructure** - Create and manage Cloud SQL instances and users
To connect to the database to explore and query data, search the MCP store for the Cloud SQL for PostgreSQL MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
* Cloud SQL Admin (`roles/cloudsql.admin`)
* Cloud SQL Viewer (`roles/cloudsql.viewer`)
* Cloud SQL Admin (`roles/cloudsql.admin`)
## Install & Configuration
### Configuration
In the Antigravity MCP Store, click the "Install" button.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL Admin", and click "Install".
## Usage
@@ -49,21 +78,6 @@ The Cloud SQL for PostgreSQL MCP server provides the following tools:
| `list_users` | List users in a given project and location. |
| `wait_for_operation` | Poll the operations API until the operation is done. |
## Custom MCP Server Configuration
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-postgres-admin", "--stdio"]
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).

View File

@@ -11,28 +11,65 @@ An editor configured to use the Cloud SQL for PostgreSQL MCP server can use its
- **Monitor Performance** - View active queries, bloat, and memory configurations
- **Manage Extensions** - List available and installed extensions
For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL for PostgreSQL Admin MCP Server.
## Installation and Setup
## Prerequisites
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud SQL Admin API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud SQL Client (`roles/cloudsql.client`)
> **Note:** If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
### Configuration
## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Cloud SQL for PostgreSQL", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Cloud SQL Project ID: The GCP project ID.
* Cloud SQL Region: The region of your Cloud SQL instance.
* Cloud SQL Instance ID: The ID of your Cloud SQL instance.
* Cloud SQL Database Name: The name of the database.
* Cloud SQL Database User: (Optional) The database username. Defaults to IAM authentication if unspecified.
* Cloud SQL Database Password: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* Cloud SQL IP Type: (Optional) The IP type i.e. “Public” or “Private”. Defaults to "Public" if unspecified.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
> If your instance uses private IPs, you must run the MCP server in the same Virtual Private Cloud (VPC) network.
## Usage
@@ -66,42 +103,6 @@ The Cloud SQL for PostgreSQL MCP server provides the following tools:
| `list_indexes` | Lists available user indexes in the database. |
| `list_sequences` | Lists sequences in the database. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export CLOUD_SQL_POSTGRES_PROJECT="<your-gcp-project-id>"
export CLOUD_SQL_POSTGRES_REGION="<your-cloud-sql-region>"
export CLOUD_SQL_POSTGRES_INSTANCE="<your-cloud-sql-instance-id>"
export CLOUD_SQL_POSTGRES_DATABASE="<your-database-name>"
export CLOUD_SQL_POSTGRES_USER="<your-database-user>" # Optional
export CLOUD_SQL_POSTGRES_PASSWORD="<your-database-password>" # Optional
export CLOUD_SQL_POSTGRES_IP_TYPE="PUBLIC" # Optional: `PUBLIC`, `PRIVATE`, `PSC`. Defaults to `PUBLIC`.
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"cloud-sql-postgres": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "cloud-sql-postgres", "--stdio"],
"env": {
"CLOUD_SQL_POSTGRES_PROJECT": "your-project-id",
"CLOUD_SQL_POSTGRES_REGION": "your-region",
"CLOUD_SQL_POSTGRES_INSTANCE": "your-instance-id",
"CLOUD_SQL_POSTGRES_DATABASE": "your-database-name",
"CLOUD_SQL_POSTGRES_USER": "your-username",
"CLOUD_SQL_POSTGRES_PASSWORD": "your-password"
}
}
}
}
```
## Documentation
For more information, visit the [Cloud SQL for PostgreSQL documentation](https://cloud.google.com/sql/docs/postgres).

View File

@@ -9,24 +9,55 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
- **Search Catalog** - Search for entries in Dataplex Catalog
- **Explore Metadata** - Lookup specific entries and search aspect types
## Prerequisites
## Installation and Setup
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Dataplex API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Dataplex Viewer (`roles/dataplex.viewer`) or equivalent permissions to read catalog entries.
## Install & Configuration
### Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Dataplex", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Dataplex Project ID: The GCP project ID.
## Usage
@@ -45,30 +76,6 @@ The Dataplex MCP server provides the following tools:
| `lookup_entry` | Retrieve a specific entry from Dataplex Catalog. |
| `search_aspect_types` | Find aspect types relevant to the query. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export DATAPLEX_PROJECT="<your-gcp-project-id>"
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"dataplex": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "dataplex", "--stdio"],
"env": {
"DATAPLEX_PROJECT": "your-project-id"
}
}
}
}
```
## Documentation
For more information, visit the [Dataplex documentation](https://cloud.google.com/dataplex/docs).

View File

@@ -11,23 +11,62 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
- **Manage Dashboards** - Create, run, and modify dashboards
- **Manage Looks** - Search for and run saved looks
- **Health Checks** - Analyze instance health and performance
- **Developer Tools** - Manage project files and toggle dev mode
## Prerequisites
## Installation and Setup
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* Access to a Looker instance.
* API Credentials (`Client ID` and `Client Secret`) or OAuth configuration.
## Install & Configuration
### Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Looker", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Looker Base URL: The URL of your Looker instance.
* Looker Client ID: The client ID for the Looker API.
* Looker Client Secret: The client secret for the Looker API.
* Looker Verify SSL: Whether to verify SSL certificates.
* Looker Use Client OAuth: Whether to use OAuth for authentication.
* Looker Show Hidden Models: Whether to show hidden models.
* Looker Show Hidden Explores: Whether to show hidden explores.
* Looker Show Hidden Fields: Whether to show hidden fields.
## Usage
@@ -55,38 +94,8 @@ The Looker MCP server provides a wide range of tools. Here are some of the key c
| `dev_mode` | Toggles development mode. |
| `get_projects` | Lists LookML projects. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export LOOKER_BASE_URL="<your-looker-instance-url>" # e.g. `https://looker.example.com`. You may need to add the port, i.e. `:19999`.
export LOOKER_CLIENT_ID="<your-looker-client-id>"
export LOOKER_CLIENT_SECRET="<your-looker-client-secret>"
export LOOKER_VERIFY_SSL="true" # Optional, defaults to true
export LOOKER_SHOW_HIDDEN_MODELS="true" # Optional, defaults to true
export LOOKER_SHOW_HIDDEN_EXPLORES="true" # Optional, defaults to true
export LOOKER_SHOW_HIDDEN_FIELDS="true" # Optional, defaults to true
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"looker": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "looker", "--stdio"],
"env": {
"LOOKER_BASE_URL": "https://your.looker.instance.com",
"LOOKER_CLIENT_ID": "your-client-id",
"LOOKER_CLIENT_SECRET": "your-client-secret"
}
}
}
}
```
*(See the full list of tools in the extension)*
## Documentation
For more information, visit the [Looker documentation](https://cloud.google.com/looker).
For more information, visit the [Looker documentation](https://cloud.google.com/looker/docs).

View File

@@ -9,25 +9,59 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
- **Query Data** - Execute DML and DQL SQL queries
- **Explore Schema** - List tables and view schema details
## Prerequisites
## Installation and Setup
### Prerequisites
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
**Note:** You may need to restart Antigravity for changes to take effect.
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with the **Cloud Spanner API** enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* IAM Permissions:
* Cloud Spanner Database User (`roles/spanner.databaseUser`) (for data access)
* Cloud Spanner Viewer (`roles/spanner.viewer`) (for schema access)
## Install & Configuration
### Configuration
1. In the Antigravity MCP Store, click the "Install" button.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "Spanner", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Spanner Project ID: The GCP project ID.
* Spanner Instance ID: The Spanner instance ID.
* Spanner Database ID: The Spanner database ID.
* Spanner Dialect: (Optional) The database dialect, which can be "googlesql" or "postgresql". Defaults to "googlesql" if unspecified.
## Usage
@@ -41,42 +75,11 @@ Once configured, the MCP server will automatically provide Cloud Spanner capabil
The Cloud Spanner MCP server provides the following tools:
| Tool Name | Description |
|:------------------|:-----------------------------------------------------------------|
| `execute_sql` | Use this tool to execute DML SQL. |
| `execute_sql_dql` | Use this tool to execute DQL SQL. |
| `list_tables` | Lists detailed schema information for user-created tables. |
| `list_graphs` | Lists detailed graph schema information for user-created graphs. |
## Custom MCP Server Configuration
The MCP server is configured using environment variables.
```bash
export SPANNER_PROJECT="<your-gcp-project-id>"
export SPANNER_INSTANCE="<your-spanner-instance-id>"
export SPANNER_DATABASE="<your-spanner-database-id>"
export SPANNER_DIALECT="googlesql" # Optional: "googlesql" or "postgresql". Defaults to "googlesql".
```
Add the following configuration to your MCP client (e.g., `settings.json` for Gemini CLI, `mcp_config.json` for Antigravity):
```json
{
"mcpServers": {
"spanner": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "spanner", "--stdio"],
"env": {
"SPANNER_PROJECT": "your-project-id",
"SPANNER_INSTANCE": "your-instance-id",
"SPANNER_DATABASE": "your-database-name",
"SPANNER_DIALECT": "googlesql"
}
}
}
}
```
| Tool Name | Description |
|:------------------|:-----------------------------------------------------------|
| `execute_sql` | Use this tool to execute DML SQL. |
| `execute_sql_dql` | Use this tool to execute DQL SQL. |
| `list_tables` | Lists detailed schema information for user-created tables. |
## Documentation

View File

@@ -2,43 +2,59 @@
The MCP Toolbox for Databases Server gives AI-powered development tools the ability to work with your custom tools. It is designed to simplify and secure the development of tools for interacting with databases.
## Installation and Setup
## Prerequisites
### Prerequisites
* [Node.js](https://nodejs.org/) installed.
* A Google Cloud project with relevant APIs enabled.
* Ensure [Application Default Credentials](https://cloud.google.com/docs/authentication/gcloud) are available in your environment.
* Download and install [MCP Toolbox](https://github.com/googleapis/genai-toolbox):
1. **Download the Toolbox binary**:
Download the latest binary for your operating system and architecture from the storage bucket. Check the [releases page](https://github.com/googleapis/genai-toolbox/releases) for OS and CPU architecture support:
`https://storage.googleapis.com/genai-toolbox/v0.21.0/<os>/<arch>/toolbox`
* Replace `<os>` with `linux`, `darwin` (macOS), or `windows`.
* Replace `<arch>` with `amd64` (Intel) or `arm64` (Apple Silicon).
<!-- {x-release-please-start-version} -->
```
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
```
<!-- {x-release-please-end} -->
2. **Make it executable**:
```bash
chmod +x toolbox
```
## Install & Configuration
3. **Move binary to `/usr/local/bin/` or `/usr/bin/`**:
```bash
sudo mv toolbox /usr/local/bin/
# sudo mv toolbox /usr/bin/
```
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
**On Windows, move binary to the `WindowsApps\` folder**:
```
move "C:\Users\<path-to-binary>\toolbox.exe" "C:\Users\<username>\AppData\Local\Microsoft\WindowsApps\"
```
**Tip:** Ensure the destination folder for your binary is included in
your system's PATH environment variable. To check `PATH`, use `echo
$PATH` (or `echo %PATH%` on Windows).
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
**Note:** You may need to restart Antigravity for changes to take effect.
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
* Any required APIs and permissions for connecting to your database.
> [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
### Configuration
1. **Access the Store**: Open the MCP Store panel within the "..." dropdown at the top of the editor's side panel.
2. **Browse and Install**: Search for "MCP Toolbox for Databases", and click "Install".
3. **Configuration**: The following configuration is needed for the server:
* Add your [`tools.yaml` configuration
file](https://googleapis.github.io/genai-toolbox/getting-started/configure/)
to the directory you are running Antigravity
## Usage
Interact with your custom tools using natural language.
## Custom MCP Server Configuration
```json
{
"mcpServers": {
"mcp-toolbox": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--tools-file", "your-tool-file.yaml"],
"env": {
"ENV_VAR_NAME": "ENV_VAR_VALUE",
}
}
}
}
```
## Documentation

View File

@@ -44,11 +44,15 @@ most popular issues, so make sure to +1 ones you are the most interested in.
## Can Toolbox be used for non-database tools?
**Yes!** While Toolbox is primarily focused on databases, it also supports generic
**HTTP tools** (`kind: http`). These allow you to connect your agents to REST APIs
and other web services, enabling workflows that extend beyond database interactions.
Currently, Toolbox is primarily focused on making it easier to create and
develop tools focused on interacting with Databases. We believe that there are a
lot of unique problems when interacting with Databases for Gen AI use cases, and
want to prioritize solving those first.
For configuration details, see the [HTTP Tools documentation](../resources/tools/http/http.md).
However, we've also received feedback that supporting more generic HTTP or
GRPC tools might be helpful in assisting with migrating to Toolbox or in
accomplishing more complicated workflows. We're looking into what that might
best look like in Toolbox.
## Can I use _$BAR_ orchestration framework to use tools from Toolbox?

View File

@@ -1,18 +0,0 @@
---
title: "Featured Articles"
weight: 3
description: Toolbox Medium Blogs
manualLink: "https://medium.com/@mcp_toolbox"
manualLinkTarget: _blank
---
<html>
<head>
<title>Redirecting to Featured Articles</title>
<link rel="canonical" href="https://medium.com/@mcp_toolbox"/>
<meta http-equiv="refresh" content="0;url=https://medium.com/@mcp_toolbox"/>
</head>
<body>
<p>If you are not automatically redirected, please <a href="https://medium.com/@mcp_toolbox">follow this link to our articles</a>.</p>
</body>
</html>

View File

@@ -183,11 +183,11 @@ Protocol (OTLP). If you would like to use a collector, please refer to this
The following flags are used to determine Toolbox's telemetry configuration:
| **flag** | **type** | **description** |
|----------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "127.0.0.1:4318"). To pass an insecure endpoint here, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`. |
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
| **flag** | **type** | **description** |
|----------------------------|----------|------------------------------------------------------------------------------------------------------------------|
| `--telemetry-gcp` | bool | Enable exporting directly to Google Cloud Monitoring. Default is `false`. |
| `--telemetry-otlp` | string | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. "<http://127.0.0.1:4318>"). |
| `--telemetry-service-name` | string | Sets the value of the `service.name` resource attribute. Default is `toolbox`. |
In addition to the flags noted above, you can also make additional configuration
for OpenTelemetry via the [General SDK Configuration][sdk-configuration] through
@@ -207,5 +207,5 @@ To enable Google Cloud Exporter:
To enable OTLP Exporter, provide Collector endpoint:
```bash
./toolbox --telemetry-otlp="127.0.0.1:4553"
./toolbox --telemetry-otlp="http://127.0.0.1:4553"
```

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.25.0\" # x-release-please-version\n",
"version = \"0.21.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -71,22 +71,6 @@ redeploying your application.
## Getting Started
### (Non-production) Running Toolbox
You can run Toolbox directly with a [configuration file](../configure.md):
```sh
npx @toolbox-sdk/server --tools-file tools.yaml
```
This runs the latest version of the toolbox server with your configuration file.
{{< notice note >}}
This method should only be used for non-production use cases such as
experimentation. For any production use-cases, please consider [Installing the
server](#installing-the-server) and then [running it](#running-the-server).
{{< /notice >}}
### Installing the server
For the latest version, check the [releases page][releases] and use the
@@ -103,7 +87,7 @@ To install Toolbox as a binary on Linux (AMD64):
```sh
# see releases page for other versions
export VERSION=0.25.0
export VERSION=0.21.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -114,7 +98,7 @@ To install Toolbox as a binary on macOS (Apple Silicon):
```sh
# see releases page for other versions
export VERSION=0.25.0
export VERSION=0.21.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/arm64/toolbox
chmod +x toolbox
```
@@ -125,29 +109,19 @@ To install Toolbox as a binary on macOS (Intel):
```sh
# see releases page for other versions
export VERSION=0.25.0
export VERSION=0.21.0
curl -L -o toolbox https://storage.googleapis.com/genai-toolbox/v$VERSION/darwin/amd64/toolbox
chmod +x toolbox
```
{{% /tab %}}
{{% tab header="Windows (Command Prompt)" lang="en" %}}
To install Toolbox as a binary on Windows (Command Prompt):
```cmd
:: see releases page for other versions
set VERSION=0.25.0
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
```
{{% /tab %}}
{{% tab header="Windows (PowerShell)" lang="en" %}}
To install Toolbox as a binary on Windows (PowerShell):
{{% tab header="Windows (AMD64)" lang="en" %}}
To install Toolbox as a binary on Windows (AMD64):
```powershell
# see releases page for other versions
$VERSION = "0.25.0"
curl.exe -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v$VERSION/windows/amd64/toolbox.exe"
:: see releases page for other versions
set VERSION=0.21.0
curl -o toolbox.exe "https://storage.googleapis.com/genai-toolbox/v%VERSION%/windows/amd64/toolbox.exe"
```
{{% /tab %}}
@@ -158,7 +132,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.25.0
export VERSION=0.21.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -177,7 +151,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.25.0
go install github.com/googleapis/genai-toolbox@v0.21.0
```
{{% /tab %}}
@@ -320,10 +294,6 @@ let client = new ToolboxClient(URL);
const toolboxTools = await client.loadToolset('toolsetName');
{{< /highlight >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
{{% /tab %}}
{{% tab header="LangChain/Langraph" lang="en" %}}
@@ -348,10 +318,6 @@ const getTool = (toolboxTool) => tool(currTool, {
const tools = toolboxTools.map(getTool);
{{< /highlight >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
{{% /tab %}}
{{% tab header="Genkit" lang="en" %}}
@@ -387,10 +353,6 @@ const getTool = (toolboxTool) => ai.defineTool({
const tools = toolboxTools.map(getTool);
{{< /highlight >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
{{% /tab %}}
{{% tab header="LlamaIndex" lang="en" %}}
@@ -418,32 +380,12 @@ const tools = toolboxTools.map(getTool);
{{< /highlight >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
{{% /tab %}}
{{% tab header="ADK TS" lang="en" %}}
{{< highlight javascript >}}
import { ToolboxClient } from '@toolbox-sdk/adk';
// Replace with the actual URL where your Toolbox service is running
const URL = 'http://127.0.0.1:5000';
let client = new ToolboxClient(URL);
const tools = await client.loadToolset();
// Use the client and tools as per requirement
{{< /highlight >}}
For detailed samples on using the Toolbox JS SDK with ADK JS, see the [project's
README.](https://github.com/googleapis/mcp-toolbox-sdk-js/tree/main/packages/toolbox-adk/README.md)
{{% /tab %}}
{{< /tabpane >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
#### Go

View File

@@ -101,62 +101,44 @@ pip install google-genai
{{< /tab >}}
{{< /tabpane >}}
1. Create the agent:
{{< tabpane persist=header >}}
{{% tab header="ADK" text=true %}}
1. Create a new file named `hotel_agent.py` and copy the following
code to create an agent:
{{< tabpane persist=header >}}
{{< tab header="ADK" lang="python" >}}
1. Create a new agent project. This will create a new directory named `my_agent`
with a file `agent.py`.
{{< include "quickstart/python/adk/quickstart.py" >}}
```bash
adk create my_agent
```
<br/>
{{< /tab >}}
{{< tab header="LangChain" lang="python" >}}
1. Update `my_agent/agent.py` with the following content to connect to Toolbox:
```py
{{< include "quickstart/python/adk/quickstart.py" >}}
```
<br/>
1. Create a `.env` file with your Google API key:
```bash
echo 'GOOGLE_API_KEY="YOUR_API_KEY"' > my_agent/.env
```
{{% /tab %}}
{{% tab header="LangChain" text=true %}}
Create a new file named `agent.py` and copy the following code:
```py
{{< include "quickstart/python/langchain/quickstart.py" >}}
```
{{% /tab %}}
{{% tab header="LlamaIndex" text=true %}}
Create a new file named `agent.py` and copy the following code:
```py
{{< /tab >}}
{{< tab header="LlamaIndex" lang="python" >}}
{{< include "quickstart/python/llamaindex/quickstart.py" >}}
```
{{% /tab %}}
{{% tab header="Core" text=true %}}
Create a new file named `agent.py` and copy the following code:
```py
{{< /tab >}}
{{< tab header="Core" lang="python" >}}
{{< include "quickstart/python/core/quickstart.py" >}}
```
{{% /tab %}}
{{< /tab >}}
{{< /tabpane >}}
{{< tabpane text=true persist=header >}}
{{% tab header="ADK" lang="en" %}}
To learn more about Agent Development Kit, check out the [ADK
Documentation](https://google.github.io/adk-docs/get-started/python/).
documentation.](https://google.github.io/adk-docs/)
{{% /tab %}}
{{% tab header="Langchain" lang="en" %}}
To learn more about Agents in LangChain, check out the [LangGraph Agent
Documentation](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent).
documentation.](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
{{% /tab %}}
{{% tab header="LlamaIndex" lang="en" %}}
To learn more about Agents in LlamaIndex, check out the [LlamaIndex
AgentWorkflow
Documentation](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/).
documentation.](https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_basic/)
{{% /tab %}}
{{% tab header="Core" lang="en" %}}
To learn more about tool calling with Google GenAI, check out the
@@ -165,37 +147,11 @@ Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#man
{{% /tab %}}
{{< /tabpane >}}
4. Run your agent, and observe the results:
1. Run your agent, and observe the results:
{{< tabpane persist=header >}}
{{% tab header="ADK" text=true %}}
Run your agent locally for testing:
```sh
adk run my_agent
```
<br/>
Alternatively, serve it via a web interface:
```sh
adk web --port 8000
```
<br/>
For more information, refer to the ADK documentation on [Running
Agents](https://google.github.io/adk-docs/get-started/python/#run-your-agent)
and [Deploying to Cloud](https://google.github.io/adk-docs/deploy/).
{{% /tab %}}
{{< tab header="Langchain" lang="bash" >}}
python agent.py
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
python agent.py
{{< /tab >}}
{{< tab header="Core" lang="bash" >}}
python agent.py
{{< /tab >}}
{{< /tabpane >}}
```sh
python hotel_agent.py
```
{{< notice info >}}
For more information, visit the [Python SDK

View File

@@ -40,24 +40,11 @@ from Toolbox.
```
1. In a new terminal, install the
SDK package.
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="bash" >}}
npm install @toolbox-sdk/core
{{< /tab >}}
{{< tab header="GenkitJS" lang="bash" >}}
npm install @toolbox-sdk/core
{{< /tab >}}
{{< tab header="LlamaIndex" lang="bash" >}}
npm install @toolbox-sdk/core
{{< /tab >}}
{{< tab header="GoogleGenAI" lang="bash" >}}
npm install @toolbox-sdk/core
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
npm install @toolbox-sdk/adk
{{< /tab >}}
{{< /tabpane >}}
[SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
```bash
npm install @toolbox-sdk/core
```
1. Install other required dependencies
@@ -74,9 +61,6 @@ npm install llamaindex @llamaindex/google @llamaindex/workflow
{{< tab header="GoogleGenAI" lang="bash" >}}
npm install @google/genai
{{< /tab >}}
{{< tab header="ADK" lang="bash" >}}
npm install @google/adk
{{< /tab >}}
{{< /tabpane >}}
1. Create a new file named `hotelAgent.js` and copy the following code to create
@@ -107,12 +91,6 @@ npm install @google/adk
{{< /tab >}}
{{< tab header="ADK" lang="js" >}}
{{< include "quickstart/js/adk/quickstart.js" >}}
{{< /tab >}}
{{< /tabpane >}}
1. Run your agent, and observe the results:

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -1,245 +0,0 @@
---
title: "Prompts using Gemini CLI"
type: docs
weight: 5
description: >
How to get started using Toolbox prompts locally with PostgreSQL and [Gemini CLI](https://pypi.org/project/gemini-cli/).
---
## Before you begin
This guide assumes you have already done the following:
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
[install-postgres]: https://www.postgresql.org/download/
## Step 1: Set up your database
In this section, we will create a database, insert some data that needs to be
accessed by our agent, and create a database user for Toolbox to connect with.
1. Connect to postgres using the `psql` command:
```bash
psql -h 127.0.0.1 -U postgres
```
Here, `postgres` denotes the default postgres superuser.
{{< notice info >}}
#### **Having trouble connecting?**
* **Password Prompt:** If you are prompted for a password for the `postgres`
user and do not know it (or a blank password doesn't work), your PostgreSQL
installation might require a password or a different authentication method.
* **`FATAL: role "postgres" does not exist`:** This error means the default
`postgres` superuser role isn't available under that name on your system.
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
You can typically check with `sudo systemctl status postgresql` and start it
with `sudo systemctl start postgresql` on Linux systems.
<br/>
#### **Common Solution**
For password issues or if the `postgres` role seems inaccessible directly, try
switching to the `postgres` operating system user first. This user often has
permission to connect without a password for local connections (this is called
peer authentication).
```bash
sudo -i -u postgres
psql -h 127.0.0.1
```
Once you are in the `psql` shell using this method, you can proceed with the
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
`exit` to return to your normal user shell.
If desired, once connected to `psql` as the `postgres` OS user, you can set a
password for the `postgres` *database* user using: `ALTER USER postgres WITH
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
postgres` and a password next time.
{{< /notice >}}
1. Create a new database and a new user:
{{< notice tip >}}
For a real application, it's best to follow the principle of least permission
and only grant the privileges your application needs.
{{< /notice >}}
```sql
CREATE USER toolbox_user WITH PASSWORD 'my-password';
CREATE DATABASE toolbox_db;
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
```
1. End the database session:
```bash
\q
```
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
need to type `exit` after `\q` to leave the `postgres` user's shell
session.)
1. Connect to your database with your new user:
```bash
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
```
1. Create the required tables using the following commands:
```sql
CREATE TABLE users (
id SERIAL PRIMARY KEY,
username VARCHAR(50) NOT NULL,
email VARCHAR(100) UNIQUE NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE TABLE restaurants (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL,
location VARCHAR(100)
);
CREATE TABLE reviews (
id SERIAL PRIMARY KEY,
user_id INT REFERENCES users(id),
restaurant_id INT REFERENCES restaurants(id),
rating INT CHECK (rating >= 1 AND rating <= 5),
review_text TEXT,
is_published BOOLEAN DEFAULT false,
moderation_status VARCHAR(50) DEFAULT 'pending_manual_review',
created_at TIMESTAMPTZ DEFAULT NOW()
);
```
1. Insert dummy data into the tables.
```sql
INSERT INTO users (id, username, email) VALUES
(123, 'jane_d', 'jane.d@example.com'),
(124, 'john_s', 'john.s@example.com'),
(125, 'sam_b', 'sam.b@example.com');
INSERT INTO restaurants (id, name, location) VALUES
(455, 'Pizza Palace', '123 Main St'),
(456, 'The Corner Bistro', '456 Oak Ave'),
(457, 'Sushi Spot', '789 Pine Ln');
INSERT INTO reviews (user_id, restaurant_id, rating, review_text, is_published, moderation_status) VALUES
(124, 455, 5, 'Best pizza in town! The crust was perfect.', true, 'approved'),
(125, 457, 4, 'Great sushi, very fresh. A bit pricey but worth it.', true, 'approved'),
(123, 457, 5, 'Absolutely loved the dragon roll. Will be back!', true, 'approved'),
(123, 456, 4, 'The atmosphere was lovely and the food was great. My photo upload might have been weird though.', false, 'pending_manual_review'),
(125, 456, 1, 'This review contains inappropriate language.', false, 'rejected');
```
1. End the database session:
```bash
\q
```
## Step 2: Configure Toolbox
Create a file named `tools.yaml`. This file defines the database connection, the
SQL tools available, and the prompts the agents will use.
```yaml
sources:
my-foodiefind-db:
kind: postgres
host: 127.0.0.1
port: 5432
database: toolbox_db
user: toolbox_user
password: my-password
tools:
find_user_by_email:
kind: postgres-sql
source: my-foodiefind-db
description: Find a user's ID by their email address.
parameters:
- name: email
type: string
description: The email address of the user to find.
statement: SELECT id FROM users WHERE email = $1;
find_restaurant_by_name:
kind: postgres-sql
source: my-foodiefind-db
description: Find a restaurant's ID by its exact name.
parameters:
- name: name
type: string
description: The name of the restaurant to find.
statement: SELECT id FROM restaurants WHERE name = $1;
find_review_by_user_and_restaurant:
kind: postgres-sql
source: my-foodiefind-db
description: Find the full record for a specific review using the user's ID and the restaurant's ID.
parameters:
- name: user_id
type: integer
description: The numerical ID of the user.
- name: restaurant_id
type: integer
description: The numerical ID of the restaurant.
statement: SELECT * FROM reviews WHERE user_id = $1 AND restaurant_id = $2;
prompts:
investigate_missing_review:
description: "Investigates a user's missing review by finding the user, restaurant, and the review itself, then analyzing its status."
arguments:
- name: "user_email"
description: "The email of the user who wrote the review."
- name: "restaurant_name"
description: "The name of the restaurant being reviewed."
messages:
- content: >-
**Goal:** Find the review written by the user with email '{{.user_email}}' for the restaurant named '{{.restaurant_name}}' and understand its status.
**Workflow:**
1. Use the `find_user_by_email` tool with the email '{{.user_email}}' to get the `user_id`.
2. Use the `find_restaurant_by_name` tool with the name '{{.restaurant_name}}' to get the `restaurant_id`.
3. Use the `find_review_by_user_and_restaurant` tool with the `user_id` and `restaurant_id` you just found.
4. Analyze the results from the final tool call. Examine the `is_published` and `moderation_status` fields and explain the review's status to the user in a clear, human-readable sentence.
```
## Step 3: Connect to Gemini CLI
Configure the Gemini CLI to talk to your local Toolbox MCP server.
1. Open or create your Gemini settings file: `~/.gemini/settings.json`.
2. Add the following configuration to the file:
```json
{
"mcpServers": {
"MCPToolbox": {
"httpUrl": "http://localhost:5000/mcp"
}
},
"mcp": {
"allowed": ["MCPToolbox"]
}
}
```
3. Start Gemini CLI using
```sh
gemini
```
In case Gemini CLI is already running, use `/mcp refresh` to refresh the MCP server.
4. Use gemini slash commands to run your prompt:
```sh
/investigate_missing_review --user_email="jane.d@example.com" --restaurant_name="The Corner Bistro"
```

View File

@@ -5,7 +5,7 @@ go 1.24.4
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
google.golang.org/adk v0.1.0
google.golang.org/genai v1.36.0
google.golang.org/genai v1.35.0
)
require (
@@ -28,11 +28,11 @@ require (
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
google.golang.org/grpc v1.76.0 // indirect

View File

@@ -88,18 +88,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
@@ -108,8 +108,8 @@ google.golang.org/adk v0.1.0 h1:+w/fHuqRVolotOATlujRA+2DKUuDrFH2poRdEX2QjB8=
google.golang.org/adk v0.1.0/go.mod h1:NvtSLoNx7UzZIiUAI1KoJQLMmt9sG3oCgiCx1TLqKFw=
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=

View File

@@ -4,7 +4,7 @@ go 1.24.6
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.4.0
google.golang.org/genai v1.36.0
google.golang.org/genai v1.35.0
)
require (
@@ -25,11 +25,11 @@ require (
go.opentelemetry.io/otel v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
google.golang.org/grpc v1.76.0 // indirect

View File

@@ -84,26 +84,26 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
google.golang.org/api v0.255.0 h1:OaF+IbRwOottVCYV2wZan7KUq7UeNUQn1BcPc4K7lE4=
google.golang.org/api v0.255.0/go.mod h1:d1/EtvCLdtiWEV4rAEHDHGh2bCnqsWhw+M8y2ECN4a8=
google.golang.org/genai v1.36.0 h1:sJCIjqTAmwrtAIaemtTiKkg2TO1RxnYEusTmEQ3nGxM=
google.golang.org/genai v1.36.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genai v1.35.0 h1:Jo6g25CzVqFzGrX5mhWyBgQqXAUzxcx5jeK7U74zv9c=
google.golang.org/genai v1.35.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f h1:vLd1CJuJOUgV6qijD7KT5Y2ZtC97ll4dxjTUappMnbo=
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f/go.mod h1:PI3KrSadr00yqfv6UDvgZGFsmLqeRIwt8x4p5Oo7CdM=
google.golang.org/genproto/googleapis/api v0.0.0-20251014184007-4626949a642f h1:OiFuztEyBivVKDvguQJYWq1yDcfAHIID/FVrPR4oiI0=

View File

@@ -39,11 +39,11 @@ require (
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/sdk v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genai v1.34.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect

View File

@@ -123,18 +123,18 @@ go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJr
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

View File

@@ -33,12 +33,12 @@ require (
go.opentelemetry.io/otel v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/sync v0.17.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
golang.org/x/time v0.14.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto v0.0.0-20251014184007-4626949a642f // indirect

View File

@@ -100,18 +100,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

View File

@@ -26,11 +26,11 @@ require (
go.opentelemetry.io/otel v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.38.0 // indirect
go.opentelemetry.io/otel/trace v1.38.0 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/net v0.46.0 // indirect
golang.org/x/oauth2 v0.32.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/sys v0.37.0 // indirect
golang.org/x/text v0.30.0 // indirect
google.golang.org/api v0.255.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
google.golang.org/grpc v1.76.0 // indirect

View File

@@ -94,18 +94,18 @@ go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +0,0 @@
{
"name": "adk",
"version": "1.0.0",
"description": "",
"main": "quickstart.js",
"type": "module",
"scripts": {
"test": "node --test"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@google/adk": "^0.1.3",
"@toolbox-sdk/adk": "^0.1.5"
}
}

View File

@@ -1,56 +0,0 @@
import { InMemoryRunner, LlmAgent, LogLevel } from '@google/adk';
import { ToolboxClient } from '@toolbox-sdk/adk';
const prompt = `
You're a helpful hotel assistant. You handle hotel searching, booking, and
cancellations. When the user searches for a hotel, mention its name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
`;
const queries = [
"Find hotels with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
];
process.env.GOOGLE_GENAI_API_KEY = process.env.GOOGLE_API_KEY || 'your-api-key'; // Replace it with your API key
export async function main() {
const userId = 'test_user';
const client = new ToolboxClient('http://127.0.0.1:5000');
const tools = await client.loadToolset("my-toolset");
const rootAgent = new LlmAgent({
name: 'hotel_agent',
model: 'gemini-2.5-flash',
description: 'Agent for hotel bookings and administration.',
instruction: prompt,
tools: tools,
});
const appName = rootAgent.name;
const runner = new InMemoryRunner({ agent: rootAgent, appName, logLevel: LogLevel.ERROR, });
const session = await runner.sessionService.createSession({ appName, userId });
for (const query of queries) {
await runPrompt(runner, userId, session.id, query);
}
}
async function runPrompt(runner, userId, sessionId, prompt) {
const content = { role: 'user', parts: [{ text: prompt }] };
const stream = runner.runAsync({ userId, sessionId, newMessage: content });
const responses = await Array.fromAsync(stream);
const accumulatedResponse = responses
.flatMap((e) => e.content?.parts?.map((p) => p.text) ?? [])
.join('');
console.log(`\nMODEL RESPONSE: ${accumulatedResponse}\n`);
}
main();

View File

@@ -569,12 +569,11 @@
}
},
"node_modules/jws": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"dependencies": {
"jwa": "^2.0.1",
"jwa": "^2.0.0",
"safe-buffer": "^5.0.1"
}
},

View File

@@ -3376,23 +3376,22 @@
}
},
"node_modules/body-parser": {
"version": "1.20.4",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
"license": "MIT",
"version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
"dependencies": {
"bytes": "~3.1.2",
"bytes": "3.1.2",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "~1.2.0",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"on-finished": "~2.4.1",
"qs": "~6.14.0",
"raw-body": "~2.5.3",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.13.0",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "~1.0.0"
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8",
@@ -3407,40 +3406,11 @@
"ms": "2.0.0"
}
},
"node_modules/body-parser/node_modules/http-errors": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"license": "MIT",
"dependencies": {
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/body-parser/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
},
"node_modules/body-parser/node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
@@ -3464,7 +3434,6 @@
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -3861,39 +3830,38 @@
}
},
"node_modules/express": {
"version": "4.22.1",
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
"license": "MIT",
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "~1.20.3",
"content-disposition": "~0.5.4",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
"cookie": "~0.7.1",
"cookie-signature": "~1.0.6",
"cookie": "0.7.1",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "~1.3.1",
"fresh": "~0.5.2",
"http-errors": "~2.0.0",
"finalhandler": "1.3.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"merge-descriptors": "1.0.3",
"methods": "~1.1.2",
"on-finished": "~2.4.1",
"on-finished": "2.4.1",
"parseurl": "~1.3.3",
"path-to-regexp": "~0.1.12",
"path-to-regexp": "0.1.12",
"proxy-addr": "~2.0.7",
"qs": "~6.14.0",
"qs": "6.13.0",
"range-parser": "~1.2.1",
"safe-buffer": "5.2.1",
"send": "~0.19.0",
"serve-static": "~1.16.2",
"send": "0.19.0",
"serve-static": "1.16.2",
"setprototypeof": "1.2.0",
"statuses": "~2.0.1",
"statuses": "2.0.1",
"type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
@@ -4936,7 +4904,6 @@
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
},
@@ -5694,12 +5661,11 @@
}
},
"node_modules/qs": {
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"license": "BSD-3-Clause",
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"dependencies": {
"side-channel": "^1.1.0"
"side-channel": "^1.0.6"
},
"engines": {
"node": ">=0.6"
@@ -5717,49 +5683,19 @@
}
},
"node_modules/raw-body": {
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
"license": "MIT",
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"dependencies": {
"bytes": "~3.1.2",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"unpipe": "~1.0.0"
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/raw-body/node_modules/http-errors": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"license": "MIT",
"dependencies": {
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/raw-body/node_modules/statuses": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
@@ -5877,8 +5813,7 @@
"node_modules/safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"license": "MIT"
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "7.7.2",

View File

@@ -9,7 +9,7 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@langchain/google-genai": "^2.0.0",
"@langchain/google-genai": "^1.0.0",
"@langchain/langgraph": "^1.0.0",
"@toolbox-sdk/core": "^0.1.2",
"langchain": "^1.0.0"
@@ -18,7 +18,8 @@
"node_modules/@cfworker/json-schema": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz",
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="
"integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==",
"peer": true
},
"node_modules/@google/generative-ai": {
"version": "0.24.1",
@@ -45,10 +46,9 @@
}
},
"node_modules/@langchain/core": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.8.tgz",
"integrity": "sha512-kIUidOgc0ZdyXo4Ahn9Zas+OayqOfk4ZoKPi7XaDipNSWSApc2+QK5BVcjvwtzxstsNOrmXJiJWEN6WPF/MvAw==",
"license": "MIT",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.0.2.tgz",
"integrity": "sha512-6mOn4bZyO6XT0GGrEijRtMVrmYJGZ8y1BcwyTPDptFz38lP0CEzrKEYB++h+u3TEcAd3eO25l1aGw/zVlVgw2Q==",
"peer": true,
"dependencies": {
"@cfworker/json-schema": "^4.0.2",
@@ -56,9 +56,10 @@
"camelcase": "6",
"decamelize": "1.2.0",
"js-tiktoken": "^1.0.12",
"langsmith": ">=0.4.0 <1.0.0",
"langsmith": "^0.3.64",
"mustache": "^4.2.0",
"p-queue": "^6.6.2",
"p-retry": "4",
"uuid": "^10.0.0",
"zod": "^3.25.76 || ^4"
},
@@ -67,10 +68,9 @@
}
},
"node_modules/@langchain/google-genai": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-2.1.3.tgz",
"integrity": "sha512-ZdlFK/N10GyU6ATzkM01Sk1rlHBoy36Q/MawGD1SyXdD2lQxZxuQZjFWewj6uzWQ2Nnjj70EvU/kmmHVPn6sfQ==",
"license": "MIT",
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-1.0.0.tgz",
"integrity": "sha512-ICUBZl/46nG6+Yhe5v7kp/2TQBGOzqEkpfKPLDeNyJ4x9OOL46xsW3ZZrHJjhGMQuR6/JMmQMTU9kLoYgsd1Tg==",
"dependencies": {
"@google/generative-ai": "^0.24.0",
"uuid": "^11.1.0"
@@ -79,7 +79,7 @@
"node": ">=20"
},
"peerDependencies": {
"@langchain/core": "1.1.8"
"@langchain/core": "^1.0.0"
}
},
"node_modules/@langchain/google-genai/node_modules/uuid": {
@@ -90,7 +90,6 @@
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"bin": {
"uuid": "dist/esm/bin/uuid"
}
@@ -225,6 +224,7 @@
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -308,6 +308,7 @@
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -420,6 +421,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -821,6 +823,7 @@
"version": "1.0.21",
"resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.21.tgz",
"integrity": "sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==",
"peer": true,
"dependencies": {
"base64-js": "^1.5.1"
}
@@ -844,24 +847,22 @@
}
},
"node_modules/jws": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"dependencies": {
"jwa": "^2.0.1",
"jwa": "^2.0.0",
"safe-buffer": "^5.0.1"
}
},
"node_modules/langchain": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.2.3.tgz",
"integrity": "sha512-3k986xJuqg4az53JxV5LnGlOzIXF1d9Kq6Y9s7XjitvzhpsbFuTDV5/kiF4cx3pkNGyw0mUXC4tLz9RxucO0hw==",
"license": "MIT",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-1.0.2.tgz",
"integrity": "sha512-He/xvjVl8DHESvdaW6Dpyba72OaLCAfS2CyOm1aWrlJ4C38dKXyTIxphtld8hiii6MWX7qMSmu2EaUwWBx2STg==",
"dependencies": {
"@langchain/langgraph": "^1.0.0",
"@langchain/langgraph-checkpoint": "^1.0.0",
"langsmith": ">=0.4.0 <1.0.0",
"langsmith": "~0.3.74",
"uuid": "^10.0.0",
"zod": "^3.25.76 || ^4"
},
@@ -869,19 +870,19 @@
"node": ">=20"
},
"peerDependencies": {
"@langchain/core": "1.1.8"
"@langchain/core": "^1.0.0"
}
},
"node_modules/langsmith": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.3.tgz",
"integrity": "sha512-vuBAagBZulXj0rpZhUTxmHhrYIBk53z8e2Q8ty4OHVkahN4ul7Im3OZxD9jsXZB0EuncK1xRYtY8J3BW4vj1zw==",
"license": "MIT",
"version": "0.3.77",
"resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.77.tgz",
"integrity": "sha512-wbS/9IX/hOAsOEOtPj8kCS8H0tFHaelwQ97gTONRtIfoPPLd9MMUmhk0KQB5DdsGAI5abg966+f0dZ/B+YRRzg==",
"dependencies": {
"@types/uuid": "^10.0.0",
"chalk": "^4.1.2",
"console-table-printer": "^2.12.1",
"p-queue": "^6.6.2",
"p-retry": "4",
"semver": "^7.6.3",
"uuid": "^10.0.0"
},
@@ -969,6 +970,7 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
"integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==",
"peer": true,
"bin": {
"mustache": "bin/mustache"
}
@@ -1407,7 +1409,6 @@
"version": "3.25.76",
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@@ -11,7 +11,7 @@
"author": "",
"license": "ISC",
"dependencies": {
"@langchain/google-genai": "^2.0.0",
"@langchain/google-genai": "^1.0.0",
"@langchain/langgraph": "^1.0.0",
"@toolbox-sdk/core": "^0.1.2",
"langchain": "^1.0.0"

View File

@@ -882,12 +882,11 @@
}
},
"node_modules/jws": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
"dependencies": {
"jwa": "^2.0.1",
"jwa": "^2.0.0",
"safe-buffer": "^5.0.1"
}
},

View File

@@ -1,15 +1,71 @@
from google.adk import Agent
from google.adk.apps import App
from google.adk.agents import Agent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService
from google.genai import types
from toolbox_core import ToolboxSyncClient
# TODO(developer): update the TOOLBOX_URL to your toolbox endpoint
client = ToolboxSyncClient("http://127.0.0.1:5000")
import asyncio
import os
root_agent = Agent(
name='root_agent',
model='gemini-2.5-flash',
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
tools=client.load_toolset(),
)
# TODO(developer): replace this with your Google API key
app = App(root_agent=root_agent, name="my_agent")
api_key = os.environ.get("GOOGLE_API_KEY") or "your-api-key" # Set your API key here
os.environ["GOOGLE_API_KEY"] = api_key
async def main():
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
prompt = """
You're a helpful hotel assistant. You handle hotel searching, booking and
cancellations. When the user searches for a hotel, mention it's name, id,
location and price tier. Always mention hotel ids while performing any
searches. This is very important for any operations. For any bookings or
cancellations, please provide the appropriate confirmation. Be sure to
update checkin or checkout dates if mentioned by the user.
Don't ask for confirmations from the user.
"""
root_agent = Agent(
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant.',
instruction=prompt,
tools=toolbox_client.load_toolset("my-toolset"),
)
session_service = InMemorySessionService()
artifacts_service = InMemoryArtifactService()
session = await session_service.create_session(
state={}, app_name='hotel_agent', user_id='123'
)
runner = Runner(
app_name='hotel_agent',
agent=root_agent,
artifact_service=artifacts_service,
session_service=session_service,
)
queries = [
"Find hotels in Basel with Basel in its name.",
"Can you book the Hilton Basel for me?",
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
"My check in dates would be from April 10, 2024 to April 19, 2024.",
]
for query in queries:
content = types.Content(role='user', parts=[types.Part(text=query)])
events = runner.run(session_id=session.id,
user_id='123', new_message=content)
responses = (
part.text
for event in events
for part in event.content.parts
if part.text is not None
)
for text in responses:
print(text)
asyncio.run(main())

View File

@@ -1,3 +1,3 @@
google-adk==1.21.0
toolbox-core==0.5.4
pytest==9.0.2
google-adk==1.19.0
toolbox-core==0.5.3
pytest==9.0.1

View File

@@ -1,3 +1,3 @@
google-genai==1.57.0
toolbox-core==0.5.4
pytest==9.0.2
google-genai==1.52.0
toolbox-core==0.5.3
pytest==9.0.1

View File

@@ -1,5 +1,5 @@
langchain==1.2.2
langchain-google-vertexai==3.2.1
langgraph==1.0.5
toolbox-langchain==0.5.4
pytest==9.0.2
langchain==1.0.8
langchain-google-vertexai==3.0.3
langgraph==1.0.3
toolbox-langchain==0.5.3
pytest==9.0.1

View File

@@ -1,4 +1,4 @@
llama-index==0.14.12
llama-index-llms-google-genai==0.8.3
toolbox-llamaindex==0.5.4
pytest==9.0.2
llama-index==0.14.8
llama-index-llms-google-genai==0.7.3
toolbox-llamaindex==0.5.3
pytest==9.0.1

View File

@@ -44,28 +44,15 @@ class TestExecution:
@pytest.fixture(scope="function")
def script_output(self, capsys):
"""Run the quickstart function and return its output."""
# TODO: Add better validation for ADK once we have a way to capture its
# output.
if ORCH_NAME == "adk":
return quickstart.app.root_agent.name
else:
asyncio.run(quickstart.main())
asyncio.run(quickstart.main())
return capsys.readouterr()
def test_script_runs_without_errors(self, script_output):
"""Test that the script runs and produces no stderr."""
if ORCH_NAME == "adk":
return
assert script_output.err == "", f"Script produced stderr: {script_output.err}"
def test_keywords_in_output(self, script_output, golden_keywords):
"""Test that expected keywords are present in the script's output."""
if ORCH_NAME == "adk":
assert script_output == "root_agent"
return
output = script_output.out
missing_keywords = [kw for kw in golden_keywords if kw not in output]
assert not missing_keywords, f"Missing keywords in output: {missing_keywords}"

View File

@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -52,7 +52,6 @@ instance, database and users:
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
## Install MCP Toolbox
@@ -298,7 +297,6 @@ instances and interacting with your database:
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -52,7 +52,6 @@ database and users:
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
## Install MCP Toolbox
@@ -298,7 +297,6 @@ instances and interacting with your database:
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -52,7 +52,6 @@ instance, database and users:
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
## Install MCP Toolbox
@@ -298,7 +297,6 @@ instances and interacting with your database:
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs

View File

@@ -18,7 +18,6 @@ to expose your developer assistant tools to a Looker instance:
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Antigravity][antigravity]
[toolbox]: https://github.com/googleapis/genai-toolbox
[gemini-cli]: #configure-your-mcp-client
@@ -28,7 +27,6 @@ to expose your developer assistant tools to a Looker instance:
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[antigravity]: #connect-with-antigravity
## Set up Looker
@@ -40,55 +38,6 @@ to expose your developer assistant tools to a Looker instance:
listening at a different port, and you will need to use
`https://looker.example.com:19999` instead.
## Connect with Antigravity
You can connect Looker to Antigravity in the following ways:
* Using the MCP Store
* Using a custom configuration
{{< notice note >}}
You don't need to download the MCP Toolbox binary to use these methods.
{{< /notice >}}
{{< tabpane text=true >}}
{{% tab header="MCP Store" lang="en" %}}
The most straightforward way to connect to Looker in Antigravity is by using the built-in MCP Store.
1. Open Antigravity and open the editor's agent panel.
1. Click the **"..."** icon at the top of the panel and select **MCP Servers**.
1. Locate **Looker** in the list of available servers and click Install.
1. Follow the on-screen prompts to securely link your accounts where applicable.
After you install Looker in the MCP Store, resources and tools from the server are automatically available to the editor.
{{% /tab %}}
{{% tab header="Custom config" lang="en" %}}
To connect to a custom MCP server, follow these steps:
1. Open Antigravity and navigate to the MCP store using the **"..."** drop-down at the top of the editor's agent panel.
1. To open the **mcp_config.json** file, click **MCP Servers** and then click **Manage MCP Servers > View raw config**.
1. Add the following configuration, replace the environment variables with your values, and save.
```json
{
"mcpServers": {
"looker": {
"command": "npx",
"args": ["-y", "@toolbox-sdk/server", "--prebuilt", "looker", "--stdio"],
"env": {
"LOOKER_BASE_URL": "https://looker.example.com",
"LOOKER_CLIENT_ID": "your-client-id",
"LOOKER_CLIENT_SECRET": "your-client-secret"
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
@@ -100,19 +49,19 @@ After you install Looker in the MCP Store, resources and tools from the server a
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -341,7 +290,7 @@ assistant to list models, explores, dimensions, and measures. Run a
query, retrieve the SQL for a query, and run a saved Look.
The full tool list is available in the [Prebuilt Tools
Reference](../../reference/prebuilt-tools.md/#looker).
Reference](../../reference/prebuilt-tools/#looker).
The following tools are available to the LLM:
@@ -374,8 +323,6 @@ instance and create new saved content.
data
1. **make_dashboard**: Create a saved dashboard in Looker and return the URL
1. **add_dashboard_element**: Add a tile to a dashboard
1. **add_dashboard_filter**: Add a filter to a dashboard
1. **generate_embed_url**: Generate an embed url for content
### Looker Instance Health Tools

View File

@@ -45,19 +45,19 @@ instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ expose your developer assistant tools to a MySQL instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -44,19 +44,19 @@ expose your developer assistant tools to a Neo4j instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -43,19 +43,19 @@ to expose your developer assistant tools to a SQLite instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.25.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.21.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -1,147 +0,0 @@
---
title: "Deploy ADK Agent and MCP Toolbox"
type: docs
weight: 4
description: >
How to deploy your ADK Agent to Vertex AI Agent Engine and connect it to an MCP Toolbox deployed on Cloud Run.
---
## Before you begin
This guide assumes you have already done the following:
1. Completed the [Python Quickstart
(Local)](../getting-started/local_quickstart.md) and have a working ADK
agent running locally.
2. Installed the [Google Cloud CLI](https://cloud.google.com/sdk/docs/install).
3. A Google Cloud project with billing enabled.
## Step 1: Deploy MCP Toolbox to Cloud Run
Before deploying your agent, your MCP Toolbox server needs to be accessible from
the cloud. We will deploy MCP Toolbox to Cloud Run.
Follow the [Deploy to Cloud Run](deploy_toolbox.md) guide to deploy your MCP
Toolbox instance.
{{% alert title="Important" %}}
After deployment, note down the Service URL of your MCP Toolbox Cloud Run
service. You will need this to configure your agent.
{{% /alert %}}
## Step 2: Prepare your Agent for Deployment
We will use the `agent-starter-pack` tool to enhance your local agent project
with the necessary configuration for deployment to Vertex AI Agent Engine.
1. Open a terminal and navigate to the **parent directory** of your agent
project (the directory containing the `my_agent` folder).
2. Run the following command to enhance your project:
```bash
uvx agent-starter-pack enhance --adk -d agent_engine
```
3. Follow the interactive prompts to configure your deployment settings. This
process will generate deployment configuration files (like a `Makefile` and
`Dockerfile`) in your project directory.
4. Add `toolbox-core` as a dependency to the new project:
```bash
uv add toolbox-core
```
## Step 3: Configure Google Cloud Authentication
Ensure your local environment is authenticated with Google Cloud to perform the
deployment.
1. Login with Application Default Credentials (ADC):
```bash
gcloud auth application-default login
```
2. Set your active project:
```bash
gcloud config set project <YOUR_PROJECT_ID>
```
## Step 4: Connect Agent to Deployed MCP Toolbox
You need to update your agent's code to connect to the Cloud Run URL of your MCP
Toolbox instead of the local address.
1. Recall that you can find the Cloud Run deployment URL of the MCP Toolbox
server using the following command:
```bash
gcloud run services describe toolbox --format 'value(status.url)'
```
2. Open your agent file (`my_agent/agent.py`).
3. Update the `ToolboxSyncClient` initialization to use your Cloud Run URL.
{{% alert color="info" %}}
Since Cloud Run services are secured by default, you also need to provide an
authentication token.
{{% /alert %}}
Replace your existing client initialization code with the following:
```python
from google.adk import Agent
from google.adk.apps import App
from toolbox_core import ToolboxSyncClient, auth_methods
# TODO(developer): Replace with your Toolbox Cloud Run Service URL
TOOLBOX_URL = "https://your-toolbox-service-xyz.a.run.app"
# Initialize the client with the Cloud Run URL and Auth headers
client = ToolboxSyncClient(
TOOLBOX_URL,
client_headers={"Authorization": auth_methods.get_google_id_token(TOOLBOX_URL)}
)
root_agent = Agent(
name='root_agent',
model='gemini-2.5-flash',
instruction="You are a helpful AI assistant designed to provide accurate and useful information.",
tools=client.load_toolset(),
)
app = App(root_agent=root_agent, name="my_agent")
```
{{% alert title="Important" %}}
Ensure that the `name` parameter in the `App` initialization matches the name of
your agent's parent directory (e.g., `my_agent`).
```python
...
app = App(root_agent=root_agent, name="my_agent")
```
{{% /alert %}}
## Step 5: Deploy to Agent Engine
Run the deployment command:
```bash
make backend
```
This command will build your agent's container image and deploy it to Vertex AI.
## Step 6: Test your Deployment
Once the deployment command (`make backend`) completes, it will output the URL
for the Agent Engine Playground. You can click on this URL to open the
Playground in your browser and start chatting with your agent to test the tools.
For additional test scenarios, refer to the [Test deployed
agent](https://google.github.io/adk-docs/deploy/agent-engine/#test-deployment)
section in the ADK documentation.

View File

@@ -67,18 +67,6 @@ networks:
```
{{< notice tip >}}
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
list of hosts for validation. E.g. `command: [ "toolbox",
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
"--allowed-hosts", "localhost:5000"]`
To implement CORs, use the `--allowed-origins` flag to specify a
list of origins permitted to access the server. E.g. `command: [ "toolbox",
"--tools-file", "/config/tools.yaml", "--address", "0.0.0.0",
"--allowed-origins", "https://foo.bar"]`
{{< /notice >}}
1. Run the following command to bring up the Toolbox and Postgres instance
```bash

View File

@@ -188,16 +188,6 @@ description: >
path: tools.yaml
```
{{< notice tip >}}
To prevent DNS rebinding attack, use the `--allowed-origins` flag to specify a
list of origins permitted to access the server. E.g. `args: ["--address",
"0.0.0.0", "--allowed-hosts", "foo.bar:5000"]`
To implement CORs, use the `--allowed-origins` flag to specify a
list of origins permitted to access the server. E.g. `args: ["--address",
"0.0.0.0", "--allowed-origins", "https://foo.bar"]`
{{< /notice >}}
1. Create the deployment.
```bash

View File

@@ -104,7 +104,7 @@ section.
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
```
{{< notice note >}}
{{< notice note >}}
**The `$PORT` Environment Variable**
Google Cloud Run dictates the port your application must listen on by setting
the `$PORT` environment variable inside your container. This value defaults to
@@ -140,49 +140,6 @@ deployment will time out.
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
```
### Update deployed server to be secure
To prevent DNS rebinding attack, use the `--allowed-hosts` flag to specify a
list of hosts. In order to do that, you will
have to re-deploy the cloud run service with the new flag.
To implement CORs checks, use the `--allowed-origins` flag to specify a list of
origins permitted to access the server.
1. Set an environment variable to the cloud run url:
```bash
export URL=<cloud run url>
export HOST=<cloud run host>
```
2. Redeploy Toolbox:
```bash
gcloud run deploy toolbox \
--image $IMAGE \
--service-account toolbox-identity \
--region us-central1 \
--set-secrets "/app/tools.yaml=tools:latest" \
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST"
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
```
If you are using a VPC network, use the command below:
```bash
gcloud run deploy toolbox \
--image $IMAGE \
--service-account toolbox-identity \
--region us-central1 \
--set-secrets "/app/tools.yaml=tools:latest" \
--args="--tools-file=/app/tools.yaml","--address=0.0.0.0","--port=8080","--allowed-origins=$URL","--allowed-hosts=$HOST" \
# TODO(dev): update the following to match your VPC if necessary
--network default \
--subnet default
# --allow-unauthenticated # https://cloud.google.com/run/docs/authenticating/public#gcloud
```
## Connecting with Toolbox Client SDK
You can connect to Toolbox Cloud Run instances directly through the SDK.
@@ -205,23 +162,18 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
{{< tabpane persist=header >}}
{{< tab header="Python" lang="python" >}}
import asyncio
from toolbox_core import ToolboxClient, auth_methods
# Replace with the Cloud Run service URL generated in the previous step
URL = "https://cloud-run-url.app"
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
async def main():
async with ToolboxClient(
URL,
client_headers={"Authorization": auth_token_provider},
) as toolbox:
toolset = await toolbox.load_toolset()
# ...
asyncio.run(main())
async with ToolboxClient(
URL,
client_headers={"Authorization": auth_token_provider},
) as toolbox:
{{< /tab >}}
{{< tab header="Javascript" lang="javascript" >}}
import { ToolboxClient } from '@toolbox-sdk/core';

View File

@@ -79,16 +79,12 @@ There are a couple of steps to run and use a Collector.
```
1. Run toolbox with the `--telemetry-otlp` flag. Configure it to send them to
`127.0.0.1:4553` (for HTTP) or the Collector's URL.
`http://127.0.0.1:4553` (for HTTP) or the Collector's URL.
```bash
./toolbox --telemetry-otlp=127.0.0.1:4553
./toolbox --telemetry-otlp=http://127.0.0.1:4553
```
{{< notice tip >}}
To pass an insecure endpoint, set environment variable `OTEL_EXPORTER_OTLP_INSECURE=true`.
{{< /notice >}}
1. Once telemetry datas are collected, you can view them in your telemetry
backend. If you are using GCP exporters, telemetry will be visible in GCP
dashboard at [Metrics Explorer][metrics-explorer] and [Trace

View File

@@ -8,26 +8,24 @@ description: >
## Reference
| Flag (Short) | Flag (Long) | Description | Default |
|--------------|----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
| `-h` | `--help` | help for toolbox | |
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --tools-files or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
| `-v` | `--version` | version for toolbox | |
| Flag (Short) | Flag (Long) | Description | Default |
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
| `-h` | `--help` | help for toolbox | |
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| `-v` | `--version` | version for toolbox | |
## Examples
@@ -47,9 +45,6 @@ description: >
```bash
# Basic server with custom port configuration
./toolbox --tools-file "tools.yaml" --port 8080
# Server with prebuilt + custom tools configurations
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
```
### Tool Configuration Sources
@@ -76,8 +71,8 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
{{< notice tip >}}
The CLI enforces mutual exclusivity between configuration source flags,
preventing simultaneous use of the file-based options ensuring only one of
`--tools-file`, `--tools-files`, or `--tools-folder` is
preventing simultaneous use of `--prebuilt` with file-based options, and
ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is
used at a time.
{{< /notice >}}

View File

@@ -13,12 +13,6 @@ allowing developers to interact with and take action on databases.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for
details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
{{< notice tip >}}
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
`--tools-folder` to combine prebuilt configs with custom tools.
See [Usage Examples](../reference/cli.md#examples).
{{< /notice >}}
## AlloyDB Postgres
* `--prebuilt` value: `alloydb-postgres`
@@ -56,12 +50,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the AlloyDB instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## AlloyDB Postgres Admin
@@ -105,8 +93,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
OAuth access token for authentication. Defaults to `false`.
* `BIGQUERY_SCOPES`: (Optional) A comma-separated list of OAuth scopes to
use for authentication.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
metadata.
@@ -192,8 +178,6 @@ See [Usage Examples](../reference/cli.md#examples).
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
@@ -202,7 +186,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
## Cloud SQL for PostgreSQL
@@ -241,12 +224,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the postgreSQL instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## Cloud SQL for PostgreSQL Observability
@@ -280,7 +257,6 @@ See [Usage Examples](../reference/cli.md#examples).
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
@@ -289,7 +265,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
## Cloud SQL for SQL Server
@@ -341,7 +316,6 @@ See [Usage Examples](../reference/cli.md#examples).
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* `clone_instance`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
* `get_instance`: Gets information about a Cloud SQL instance.
@@ -350,7 +324,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
## Dataplex
@@ -424,8 +397,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `run_dashboard`: Runs the queries associated with a dashboard.
* `make_dashboard`: Creates a new dashboard.
* `add_dashboard_element`: Adds a tile to a dashboard.
* `add_dashboard_filter`: Adds a filter to a dashboard.
* `generate_embed_url`: Generate an embed url for content.
* `health_pulse`: Test the health of a Looker instance.
* `health_analyze`: Analyze the LookML usage of a Looker instance.
* `health_vacuum`: Suggest LookML elements that can be removed.
@@ -472,8 +443,8 @@ See [Usage Examples](../reference/cli.md#examples).
* `--prebuilt` value: `mssql`
* **Environment Variables:**
* `MSSQL_HOST`: (Optional) The hostname or IP address of the SQL Server instance.
* `MSSQL_PORT`: (Optional) The port number for the SQL Server instance.
* `MSSQL_HOST`: The hostname or IP address of the SQL Server instance.
* `MSSQL_PORT`: The port number for the SQL Server instance.
* `MSSQL_DATABASE`: The name of the database to connect to.
* `MSSQL_USER`: The database username.
* `MSSQL_PASSWORD`: The password for the database user.
@@ -554,12 +525,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the PostgreSQL server.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
## Google Cloud Serverless for Apache Spark
@@ -591,7 +556,6 @@ See [Usage Examples](../reference/cli.md#examples).
* `execute_sql`: Executes a DML SQL query.
* `execute_sql_dql`: Executes a DQL SQL query.
* `list_tables`: Lists tables in the database.
* `list_graphs`: Lists graphs in the database.
## Spanner (PostgreSQL dialect)

View File

@@ -1,84 +0,0 @@
---
title: "EmbeddingModels"
type: docs
weight: 2
description: >
EmbeddingModels represent services that transform text into vector embeddings for semantic search.
---
EmbeddingModels represent services that generate vector representations of text
data. In the MCP Toolbox, these models enable **Semantic Queries**,
allowing [Tools](../tools/) to automatically convert human-readable text into
numerical vectors before using them in a query.
This is primarily used in two scenarios:
- **Vector Ingestion**: Converting a text parameter into a vector string during
an `INSERT` operation.
- **Semantic Search**: Converting a natural language query into a vector to
perform similarity searches.
## Example
The following configuration defines an embedding model and applies it to
specific tool parameters.
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your API keys into the configuration file.
{{< /notice >}}
### Step 1 - Define an Embedding Model
Define an embedding model in the `embeddingModels` section:
```yaml
embeddingModels:
gemini-model: # Name of the embedding model
kind: gemini
model: gemini-embedding-001
apiKey: ${GOOGLE_API_KEY}
dimension: 768
```
### Step 2 - Embed Tool Parameters
Use the defined embedding model, embed your query parameters using the
`embeddedBy` field. Only string-typed
parameters can be embedded:
```yaml
tools:
# Vector ingestion tool
insert_embedding:
kind: postgres-sql
source: my-pg-instance
statement: |
INSERT INTO documents (content, embedding)
VALUES ($1, $2);
parameters:
- name: content
type: string
- name: vector_string
type: string
description: The text to be vectorized and stored.
embeddedBy: gemini-model # refers to the name of a defined embedding model
# Semantic search tool
search_embedding:
kind: postgres-sql
source: my-pg-instance
statement: |
SELECT id, content, embedding <-> $1 AS distance
FROM documents
ORDER BY distance LIMIT 1
parameters:
- name: semantic_search_string
type: string
description: The search query that will be converted to a vector.
embeddedBy: gemini-model # refers to the name of a defined embedding model
```
## Kinds of Embedding Models

View File

@@ -1,73 +0,0 @@
---
title: "Gemini Embedding"
type: docs
weight: 1
description: >
Use Google's Gemini models to generate high-performance text embeddings for vector databases.
---
## About
Google Gemini provides state-of-the-art embedding models that convert text into
high-dimensional vectors.
### Authentication
Toolbox uses your [Application Default Credentials
(ADC)][adc] to authorize with the
Gemini API client.
Optionally, you can use an [API key][api-key] obtain an API
Key from the [Google AI Studio][ai-studio].
We recommend using an API key for testing and using application default
credentials for production.
[adc]: https://cloud.google.com/docs/authentication#adc
[api-key]: https://ai.google.dev/gemini-api/docs/api-key#api-keys
[ai-studio]: https://aistudio.google.com/app/apikey
## Behavior
### Automatic Vectorization
When a tool parameter is configured with `embeddedBy: <your-gemini-model-name>`,
the Toolbox intercepts the raw text input from the client and sends it to the
Gemini API. The resulting numerical array is then formatted before being passed
to your database source.
### Dimension Matching
The `dimension` field must match the expected size of your database column
(e.g., a `vector(768)` column in PostgreSQL). This setting is supported by newer
models since 2024 only. You cannot set this value if using the earlier model
(`models/embedding-001`). Check out [available Gemini models][modellist] for more
information.
[modellist]:
https://docs.cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#supported-models
## Example
```yaml
embeddingModels:
gemini-model:
kind: gemini
model: gemini-embedding-001
apiKey: ${GOOGLE_API_KEY}
dimension: 768
```
{{< notice tip >}}
Use environment variable replacement with the format ${ENV_NAME}
instead of hardcoding your secrets into the configuration file.
{{< /notice >}}
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|--------------------------------------------------------------|
| kind | string | true | Must be `gemini`. |
| model | string | true | The Gemini model ID to use (e.g., `gemini-embedding-001`). |
| apiKey | string | false | Your API Key from Google AI Studio. |
| dimension | integer | false | The number of dimensions in the output vector (e.g., `768`). |

View File

@@ -71,34 +71,6 @@ cluster][alloydb-free-trial].
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
List replication stats in a PostgreSQL database.
- [`postgres-list-query-stats`](../tools/postgres/postgres-list-query-stats.md)
List query statistics in a PostgreSQL database.
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
List cardinality of columns in a table in a PostgreSQL database.
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
List statistics of a table in a PostgreSQL database.
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
List publication tables in a PostgreSQL database.
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
List tablespaces in an AlloyDB for PostgreSQL database.
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
List configuration parameters for the PostgreSQL server.
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
Lists the key performance and activity statistics for each database in the AlloyDB
instance.
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
Lists all the user-created roles in PostgreSQL database.
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
Lists all the stored procedure in PostgreSQL database.
### Pre-built Configurations
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)

View File

@@ -94,13 +94,6 @@ intend to run. Common roles include `roles/bigquery.user` (which includes
permissions to run jobs and read data) or `roles/bigbigquery.dataViewer`.
Follow this [guide][set-adc] to set up your ADC.
If you are running on Google Compute Engine (GCE) or Google Kubernetes Engine
(GKE), you might need to explicitly set the access scopes for the service
account. While you can configure scopes when creating the VM or node pool, you
can also specify them in the source configuration using the `scopes` field.
Common scopes include `https://www.googleapis.com/auth/bigquery` or
`https://www.googleapis.com/auth/cloud-platform`.
### Authentication via User's OAuth Access Token
If the `useClientOAuth` parameter is set to `true`, Toolbox will instead use the
@@ -131,10 +124,6 @@ sources:
# - "my_dataset_1"
# - "other_project.my_dataset_2"
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
# scopes: # Optional: List of OAuth scopes to request.
# - "https://www.googleapis.com/auth/bigquery"
# - "https://www.googleapis.com/auth/drive.readonly"
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
```
Initialize a BigQuery source that uses the client's access token:
@@ -151,10 +140,6 @@ sources:
# - "my_dataset_1"
# - "other_project.my_dataset_2"
# impersonateServiceAccount: "service-account@project-id.iam.gserviceaccount.com" # Optional: Service account to impersonate
# scopes: # Optional: List of OAuth scopes to request.
# - "https://www.googleapis.com/auth/bigquery"
# - "https://www.googleapis.com/auth/drive.readonly"
# maxQueryResultRows: 50 # Optional: Limits the number of rows returned by queries. Defaults to 50.
```
## Reference
@@ -167,6 +152,4 @@ sources:
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
| scopes | []string | false | A list of OAuth 2.0 scopes to use for the credentials. If not provided, default scopes are used. |
| impersonateServiceAccount | string | false | Service account email to impersonate when making BigQuery and Dataplex API calls. The authenticated principal must have the `roles/iam.serviceAccountTokenCreator` role on the target service account. [Learn More](https://cloud.google.com/iam/docs/service-account-impersonation) |
| maxQueryResultRows | int | false | The maximum number of rows to return from a query. Defaults to 50. |

View File

@@ -1,40 +0,0 @@
---
title: "Gemini Data Analytics"
type: docs
weight: 1
description: >
A "cloud-gemini-data-analytics" source provides a client for the Gemini Data Analytics API.
aliases:
- /resources/sources/cloud-gemini-data-analytics
---
## About
The `cloud-gemini-data-analytics` source provides a client to interact with the [Gemini Data Analytics API](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/reference/rest). This allows tools to send natural language queries to the API.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC) (Recommended):** By default, the source uses ADC to authenticate with the API. The Toolbox server will fetch the credentials from its running environment (server-side authentication). This is the recommended method.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source expects the authentication token to be provided by the caller when making a request to the Toolbox server (typically via an HTTP Bearer token). The Toolbox server will then forward this token to the underlying Gemini Data Analytics API calls.
## Example
```yaml
sources:
my-gda-source:
kind: cloud-gemini-data-analytics
projectId: my-project-id
my-oauth-gda-source:
kind: cloud-gemini-data-analytics
projectId: my-project-id
useClientOAuth: true
```
## Reference
| **field** | **type** | **required** | **description** |
| -------------- | :------: | :----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| kind | string | true | Must be "cloud-gemini-data-analytics". |
| projectId | string | true | The Google Cloud Project ID where the API is enabled. |
| useClientOAuth | boolean | false | If true, the source uses the token provided by the caller (forwarded to the API). Otherwise, it uses server-side Application Default Credentials (ADC). Defaults to `false`. |

View File

@@ -31,9 +31,6 @@ to a database by following these instructions][csql-mysql-quickstart].
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
List active queries in Cloud SQL for MySQL.
- [`mysql-get-query-plan`](../tools/mysql/mysql-get-query-plan.md)
Provide information about how MySQL executes a SQL statement (EXPLAIN).
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a Cloud SQL for MySQL database.
@@ -91,40 +88,13 @@ mTLS.
[public-ip]: https://cloud.google.com/sql/docs/mysql/configure-ip
[conn-overview]: https://cloud.google.com/sql/docs/mysql/connect-overview
### Authentication
### Database User
This source supports both password-based authentication and IAM
authentication (using your [Application Default Credentials][adc]).
#### Standard Authentication
To connect using user/password, [create
a MySQL user][cloud-sql-users] and input your credentials in the `user` and
`password` fields.
```yaml
user: ${USER_NAME}
password: ${PASSWORD}
```
Currently, this source only uses standard authentication. You will need to [create
a MySQL user][cloud-sql-users] to login to the database with.
[cloud-sql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
#### IAM Authentication
To connect using IAM authentication:
1. Prepare your database instance and user following this [guide][iam-guide].
2. You could choose one of the two ways to log in:
- Specify your IAM email as the `user`.
- Leave your `user` field blank. Toolbox will fetch the [ADC][adc]
automatically and log in using the email associated with it.
3. Leave the `password` field blank.
[iam-guide]: https://cloud.google.com/sql/docs/mysql/iam-logins
[cloudsql-users]: https://cloud.google.com/sql/docs/mysql/create-manage-users
## Example
```yaml
@@ -154,6 +124,6 @@ instead of hardcoding your secrets into the configuration file.
| region | string | true | Name of the GCP region that the cluster was created in (e.g. "us-central1"). |
| instance | string | true | Name of the Cloud SQL instance within the cluster (e.g. "my-instance"). |
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
| user | string | false | Name of the MySQL user to connect as (e.g "my-mysql-user"). Defaults to IAM auth using [ADC][adc] email if unspecified. |
| password | string | false | Password of the MySQL user (e.g. "my-password"). Defaults to attempting IAM authentication if unspecified. |
| user | string | true | Name of the MySQL user to connect as (e.g. "my-pg-user"). |
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
| ipType | string | false | IP Type of the Cloud SQL instance, must be either `public`, `private`, or `psc`. Default: `public`. |

View File

@@ -58,7 +58,6 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-list-sequences`](../tools/postgres/postgres-list-sequences.md)
List sequences in a PostgreSQL database.
- [`postgres-long-running-transactions`](../tools/postgres/postgres-long-running-transactions.md)
List long running transactions in a PostgreSQL database.
@@ -68,34 +67,6 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-replication-stats`](../tools/postgres/postgres-replication-stats.md)
List replication stats in a PostgreSQL database.
- [`postgres-list-query-stats`](../tools/postgres/postgres-list-query-stats.md)
List query statistics in a PostgreSQL database.
- [`postgres-get-column-cardinality`](../tools/postgres/postgres-get-column-cardinality.md)
List cardinality of columns in a table in a PostgreSQL database.
- [`postgres-list-table-stats`](../tools/postgres/postgres-list-table-stats.md)
List statistics of a table in a PostgreSQL database.
- [`postgres-list-publication-tables`](../tools/postgres/postgres-list-publication-tables.md)
List publication tables in a PostgreSQL database.
- [`postgres-list-tablespaces`](../tools/postgres/postgres-list-tablespaces.md)
List tablespaces in a PostgreSQL database.
- [`postgres-list-pg-settings`](../tools/postgres/postgres-list-pg-settings.md)
List configuration parameters for the PostgreSQL server.
- [`postgres-list-database-stats`](../tools/postgres/postgres-list-database-stats.md)
Lists the key performance and activity statistics for each database in the postgreSQL
instance.
- [`postgres-list-roles`](../tools/postgres/postgres-list-roles.md)
Lists all the user-created roles in PostgreSQL database.
- [`postgres-list-stored-procedure`](../tools/postgres/postgres-list-stored-procedure.md)
Lists all the stored procedure in PostgreSQL database.
### Pre-built Configurations
- [Cloud SQL for Postgres using

View File

@@ -229,38 +229,22 @@ Finds resources that were created within, before, or after a given date or time.
### Aspect Search
To search for entries based on their attached aspects, use the following query syntax.
`has:x`
Matches `x` as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
aspect:x Matches x as a substring of the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect=x Matches x as the full path to the aspect type of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID
aspect:xOPERATORvalue
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format projectid.location.ASPECT_TYPE_ID.FIELD_NAME
`has=x`
Matches `x` as the full path to the aspect type of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID`
The list of supported {OPERATOR}s depends on the type of field in the aspect, as follows:
- String: = (exact match) and : (substring)
- All number types: =, :, <, >, <=, >=, =>, =<
- Enum: =
- Datetime: same as for numbers, but the values to compare are treated as datetimes instead of numbers
- Boolean: =
`xOPERATORvalue`
Searches for aspect field values. Matches x as a substring of the full path to the aspect type and field name of an aspect that is attached to the entry, in the format `projectid.location.ASPECT_TYPE_ID.FIELD_NAME`
The list of supported operators depends on the type of field in the aspect, as follows:
* **String**: `=` (exact match)
* **All number types**: `=`, `:`, `<`, `>`, `<=`, `>=`, `=>`, `=<`
* **Enum**: `=` (exact match only)
* **Datetime**: same as for numbers, but the values to compare are treated as datetimes instead of numbers
* **Boolean**: `=`
Only top-level fields of the aspect are searchable.
* Syntax for system aspect types:
* `ASPECT_TYPE_ID.FIELD_NAME`
* `dataplex-types.ASPECT_TYPE_ID.FIELD_NAME`
* `dataplex-types.LOCATION.ASPECT_TYPE_ID.FIELD_NAME`
For example, the following queries match entries where the value of the `type` field in the `bigquery-dataset` aspect is `default`:
* `bigquery-dataset.type=default`
* `dataplex-types.bigquery-dataset.type=default`
* `dataplex-types.global.bigquery-dataset.type=default`
* Syntax for custom aspect types:
* If the aspect is created in the global region: `PROJECT_ID.ASPECT_TYPE_ID.FIELD_NAME`
* If the aspect is created in a specific region: `PROJECT_ID.REGION.ASPECT_TYPE_ID.FIELD_NAME`
For example, the following queries match entries where the value of the `is-enrolled` field in the `employee-info` aspect is `true`.
* `example-project.us-central1.employee-info.is-enrolled=true`
* `example-project.employee-info.is-enrolled=true`
Only top-level fields of the aspect are searchable. For example, all of the following queries match entries where the value of the is-enrolled field in the employee-info aspect type is true. Other entries that match on the substring are also returned.
- aspect:example-project.us-central1.employee-info.is-enrolled=true
- aspect:example-project.us-central1.employee=true
- aspect:employee=true
Example:-
You can use following filters
@@ -274,25 +258,6 @@ Logical AND and logical OR are supported. For example, foo OR bar.
You can negate a predicate with a - (hyphen) or NOT prefix. For example, -name:foo returns resources with names that don't match the predicate foo.
Logical operators are case-sensitive. `OR` and `AND` are acceptable whereas `or` and `and` are not.
### Abbreviated syntax
An abbreviated search syntax is also available, using `|` (vertical bar) for `OR` operators and `,` (comma) for `AND` operators.
For example, to search for entries inside one of many projects using the `OR` operator, you can use the following abbreviated syntax:
`projectid:(id1|id2|id3|id4)`
The same search without using abbreviated syntax looks like the following:
`projectid:id1 OR projectid:id2 OR projectid:id3 OR projectid:id4`
To search for entries with matching column names, use the following:
* **AND**: `column:(name1,name2,name3)`
* **OR**: `column:(name1|name2|name3)`
This abbreviated syntax works for the qualified predicates except for `label` in keyword search.
### Request
1. Always try to rewrite the prompt using search syntax.

View File

@@ -91,17 +91,18 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|----------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /. |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
| project | string | false | The project id to use in Google Cloud. |
| location | string | false | The location to use in Google Cloud. (default: us) |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header name is provided, it will be used instead of "Authorization". |
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |
| **field** | **type** | **required** | **description** |
|----------------------|:--------:|:------------:|-------------------------------------------------------------------------------------------|
| kind | string | true | Must be "looker". |
| base_url | string | true | The URL of your Looker server with no trailing /. |
| client_id | string | false | The client id assigned by Looker. |
| client_secret | string | false | The client secret assigned by Looker. |
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
| project | string | false | The project id to use in Google Cloud. |
| location | string | false | The location to use in Google Cloud. (default: us) |
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) If a header |
| | | | name is provided, it will be used instead of "Authorization". |
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
| show_hidden_explores | string | false | Show or hide hidden explores. (default: true) |
| show_hidden_fields | string | false | Show or hide hidden fields. (default: true) |

Some files were not shown because too many files have changed in this diff Show More