Merge branch 'sample-testing-strategy-js-sh' of https://github.com/googleapis/genai-toolbox into sample-testing-strategy-js-poc

This commit is contained in:
Harsh Jha
2025-09-23 17:46:44 +05:30
206 changed files with 7772 additions and 2031 deletions

View File

@@ -18,7 +18,7 @@ steps:
script: |
#!/usr/bin/env bash
docker buildx create --name container-builder --driver docker-container --bootstrap --use
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
docker buildx build --platform linux/amd64,linux/arm64 --build-arg COMMIT_SHA=$(git rev-parse --short HEAD) -t ${_DOCKER_URI}:$REF_NAME --push .
- id: "install-dependencies"
name: golang:1
@@ -43,7 +43,7 @@ steps:
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -65,7 +65,7 @@ steps:
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -87,7 +87,7 @@ steps:
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -109,7 +109,7 @@ steps:
script: |
#!/usr/bin/env bash
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"

View File

@@ -304,7 +304,7 @@ steps:
.ci/test_with_coverage.sh \
"Cloud SQL MySQL" \
cloudsqlmysql \
mysql
mysql || echo "Integration tests failed." # ignore test failures
- id: "mysql"
name: golang:1
@@ -326,7 +326,7 @@ steps:
.ci/test_with_coverage.sh \
"MySQL" \
mysql \
mysql
mysql || echo "Integration tests failed." # ignore test failures
- id: "mssql"
name: golang:1
@@ -475,7 +475,8 @@ steps:
- "OCEANBASE_PORT=$_OCEANBASE_PORT"
- "OCEANBASE_DATABASE=$_OCEANBASE_DATABASE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
secretEnv:
["CLIENT_ID", "OCEANBASE_HOST", "OCEANBASE_USER", "OCEANBASE_PASSWORD"]
volumes:
- name: "go"
path: "/gopath"
@@ -597,7 +598,7 @@ steps:
firebirdsql firebirdexecutesql
- id: "clickhouse"
name : golang:1
name: golang:1
waitFor: ["compile-test-binary"]
entrypoint: /bin/bash
env:
@@ -651,7 +652,8 @@ steps:
- "YUGABYTEDB_PORT=$_YUGABYTEDB_PORT"
- "YUGABYTEDB_LOADBALANCE=$_YUGABYTEDB_LOADBALANCE"
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
secretEnv: ["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
secretEnv:
["YUGABYTEDB_USER", "YUGABYTEDB_PASS", "YUGABYTEDB_HOST", "CLIENT_ID"]
volumes:
- name: "go"
path: "/gopath"
@@ -745,7 +747,6 @@ availableSecrets:
- versionName: projects/$PROJECT_ID/secrets/yugabytedb_pass/versions/latest
env: YUGABYTEDB_PASS
options:
logging: CLOUD_LOGGING_ONLY
automapSubstitutions: true

View File

@@ -1,10 +0,0 @@
{
"js": {
"postgresql-client": "15+248+deb12u1",
"wget": "1.21.3-1+deb12u1",
"gettext-base": "0.21-12",
"jq": "1.6-2.1",
"netcat-openbsd": "1.219-1"
},
"cloud_sql_proxy": "v2.10.0"
}

View File

@@ -23,8 +23,8 @@ steps:
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/run_js_quickstart_tests.sh
.ci/run_js_quickstart_tests.sh
chmod +x .ci/quickstart_test/run_js_tests.sh
.ci/quickstart_test/run_js_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'

View File

@@ -0,0 +1,47 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
steps:
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0'
id: 'python-quickstart-test'
entrypoint: 'bash'
args:
# The '-c' flag tells bash to execute the following string as a command.
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
- -c
- |
set -ex
export VERSION=$(cat ./cmd/version.txt)
chmod +x .ci/quickstart_test/run_py_tests.sh
.ci/quickstart_test/run_py_tests.sh
env:
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
- 'GCP_PROJECT=${_GCP_PROJECT}'
- 'DATABASE_NAME=${_DATABASE_NAME}'
- 'DB_USER=${_DB_USER}'
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
availableSecrets:
secretManager:
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5
env: 'TOOLS_YAML_CONTENT'
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
env: 'GOOGLE_API_KEY'
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
env: 'DB_PASSWORD'
timeout: 1000s
options:
logging: CLOUD_LOGGING_ONLY

View File

@@ -18,37 +18,22 @@ set -e
TABLE_NAME="hotels_js"
QUICKSTART_JS_DIR="docs/en/getting-started/quickstart/js"
SQL_FILE=".ci/setup_hotels_sample.sql"
DEPS_FILE=".ci/quickstart_dependencies.json"
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
# Initialize process IDs to empty at the top of the script
PROXY_PID=""
TOOLBOX_PID=""
install_system_packages() {
apt-get update
apt-get install -y jq
# Define the jq filter
jq_filter='
.js
| to_entries
| .[]
| select(.key != "jq" and .value != null)
| "\(.key)=\(.value)"
'
# Process the file with the filter and load the results into an array
mapfile -t install_list < <(jq -r "$jq_filter" "$DEPS_FILE")
if (( ${#install_list[@]} > 0 )); then
apt-get install -y "${install_list[@]}"
fi
apt-get update && apt-get install -y \
postgresql-client \
wget \
gettext-base \
netcat-openbsd
}
start_cloud_sql_proxy() {
CLOUD_SQL_PROXY_VERSION=$(jq -r '.cloud_sql_proxy' "$DEPS_FILE")
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/${CLOUD_SQL_PROXY_VERSION}/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!

View File

@@ -0,0 +1,115 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -e
TABLE_NAME="hotels_python"
QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python"
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
PROXY_PID=""
TOOLBOX_PID=""
install_system_packages() {
apt-get update && apt-get install -y \
postgresql-client \
python3-venv \
wget \
gettext-base \
netcat-openbsd
}
start_cloud_sql_proxy() {
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
chmod +x /usr/local/bin/cloud-sql-proxy
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
PROXY_PID=$!
for i in {1..30}; do
if nc -z 127.0.0.1 5432; then
echo "Cloud SQL Proxy is up and running."
return
fi
sleep 1
done
echo "Cloud SQL Proxy failed to start within the timeout period."
exit 1
}
setup_toolbox() {
TOOLBOX_YAML="/tools.yaml"
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
chmod +x "/toolbox"
/toolbox --tools-file "$TOOLBOX_YAML" &
TOOLBOX_PID=$!
sleep 2
}
setup_orch_table() {
export TABLE_NAME
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
}
run_orch_test() {
local orch_dir="$1"
local orch_name
orch_name=$(basename "$orch_dir")
(
set -e
setup_orch_table
cd "$orch_dir"
local VENV_DIR=".venv"
python3 -m venv "$VENV_DIR"
source "$VENV_DIR/bin/activate"
pip install -r requirements.txt
echo "--- Running tests for $orch_name ---"
cd ..
ORCH_NAME="$orch_name" pytest
rm -rf "$VENV_DIR"
)
}
cleanup_all() {
echo "--- Final cleanup: Shutting down processes and dropping table ---"
if [ -n "$TOOLBOX_PID" ]; then
kill $TOOLBOX_PID || true
fi
if [ -n "$PROXY_PID" ]; then
kill $PROXY_PID || true
fi
}
trap cleanup_all EXIT
# Main script execution
install_system_packages
start_cloud_sql_proxy
export PGHOST=127.0.0.1
export PGPORT=5432
export PGPASSWORD="$DB_PASSWORD"
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
setup_toolbox
for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do
if [ ! -d "$ORCH_DIR" ]; then
continue
fi
run_orch_test "$ORCH_DIR"
done

View File

@@ -24,7 +24,7 @@ steps:
if [[ $_PUSH_LATEST == 'true' ]]; then
export TAGS="$TAGS -t ${_DOCKER_URI}:latest"
fi
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse HEAD) $TAGS --push .
docker buildx build --platform linux/amd64,linux/arm64 --build-arg BUILD_TYPE=container.release --build-arg COMMIT_SHA=$(git rev-parse --short HEAD) $TAGS --push .
- id: "install-dependencies"
name: golang:1
@@ -50,7 +50,7 @@ steps:
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.linux.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.linux.amd64
- id: "store-linux-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -61,6 +61,30 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.linux.amd64 gs://$_BUCKET_NAME/$VERSION/linux/amd64/toolbox
- id: "build-linux-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
volumes:
- name: 'go'
path: '/gopath'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.linux.amd64
- id: "store-linux-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-linux-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.linux.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/linux/amd64/toolbox
- id: "build-darwin-arm64"
name: golang:1
waitFor:
@@ -74,7 +98,7 @@ steps:
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.arm64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.arm64
- id: "store-darwin-arm64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -85,6 +109,30 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.arm64 gs://$_BUCKET_NAME/$VERSION/darwin/arm64/toolbox
- id: "build-darwin-arm64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
volumes:
- name: 'go'
path: '/gopath'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.arm64
- id: "store-darwin-arm64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-arm64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.darwin.arm64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/arm64/toolbox
- id: "build-darwin-amd64"
name: golang:1
waitFor:
@@ -98,7 +146,7 @@ steps:
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.darwin.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.darwin.amd64
- id: "store-darwin-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -109,6 +157,30 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.darwin.amd64 gs://$_BUCKET_NAME/$VERSION/darwin/amd64/toolbox
- id: "build-darwin-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
volumes:
- name: 'go'
path: '/gopath'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.darwin.amd64
- id: "store-darwin-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-darwin-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.darwin.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/darwin/amd64/toolbox
- id: "build-windows-amd64"
name: golang:1
waitFor:
@@ -122,7 +194,7 @@ steps:
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse HEAD)" -o toolbox.windows.amd64
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.windows.amd64
- id: "store-windows-amd64"
name: "gcr.io/cloud-builders/gcloud:latest"
@@ -133,6 +205,30 @@ steps:
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.windows.amd64 gs://$_BUCKET_NAME/$VERSION/windows/amd64/toolbox.exe
- id: "build-windows-amd64-geminicli"
name: golang:1
waitFor:
- "install-dependencies"
env:
- 'GOPATH=/gopath'
volumes:
- name: 'go'
path: '/gopath'
script: |
#!/usr/bin/env bash
export VERSION=$(cat ./cmd/version.txt)
CGO_ENABLED=0 GOOS=windows GOARCH=amd64 \
go build -ldflags "-X github.com/googleapis/genai-toolbox/cmd.buildType=geminicli.binary -X github.com/googleapis/genai-toolbox/cmd.commitSha=$(git rev-parse --short HEAD)" -o toolbox.geminicli.windows.amd64
- id: "store-windows-amd64-geminicli"
name: "gcr.io/cloud-builders/gcloud:latest"
waitFor:
- "build-windows-amd64-geminicli"
script: |
#!/usr/bin/env bash
export VERSION=v$(cat ./cmd/version.txt)
gcloud storage cp toolbox.geminicli.windows.amd64 gs://$_BUCKET_NAME/geminicli/$VERSION/windows/amd64/toolbox.exe
options:
automapSubstitutions: true
dynamicSubstitutions: true

View File

@@ -1,5 +1,77 @@
# Changelog
## [0.15.0](https://github.com/googleapis/genai-toolbox/compare/v0.14.0...v0.15.0) (2025-09-18)
### ⚠ BREAKING CHANGES
* **prebuilt:** update prebuilt tool names to use consistent guidance ([#1421](https://github.com/googleapis/genai-toolbox/issues/1421))
* **tools/alloydb-wait-for-operation:** Add `alloydb-admin` source to `alloydb-wait-for-operation` tool ([#1449](https://github.com/googleapis/genai-toolbox/issues/1449))
### Features
* Add AlloyDB admin source ([#1369](https://github.com/googleapis/genai-toolbox/issues/1369)) ([33beb71](https://github.com/googleapis/genai-toolbox/commit/33beb7187d2e0f968fc949a00c780073d1bc7cdd))
* Add Cloud monitoring source and tool ([#1311](https://github.com/googleapis/genai-toolbox/issues/1311)) ([d661f53](https://github.com/googleapis/genai-toolbox/commit/d661f5343f2ad28fbf0481db16440aec823eece6))
* Add YugabyteDB Source and Tool ([#732](https://github.com/googleapis/genai-toolbox/issues/732)) ([664711f](https://github.com/googleapis/genai-toolbox/commit/664711f4b35409bd1c57af92f625b70a0dc9a4e6))
* **prebuilt:** Update default values for prebuilt tools ([#1355](https://github.com/googleapis/genai-toolbox/issues/1355)) ([70e832b](https://github.com/googleapis/genai-toolbox/commit/70e832bd08a98a95b925e590f31c8d3f2d8b6aa0))
* **prebuilt/cloud-sql:** Add list instances tool for cloudsql ([#1310](https://github.com/googleapis/genai-toolbox/issues/1310)) ([0171228](https://github.com/googleapis/genai-toolbox/commit/01712284b480774ffa68930affae290ee2e3fcfd))
* **prebuilt/cloud-sql:** Add cloud sql create database tool. ([#1453](https://github.com/googleapis/genai-toolbox/issues/1453)) ([a1bc044](https://github.com/googleapis/genai-toolbox/commit/a1bc04477b0f822ffaab039098682f1776b8a472))
* **prebuilt/cloud-sql:** Add `cloud-sql-get-instances` tool ([#1383](https://github.com/googleapis/genai-toolbox/issues/1383)) ([77919c7](https://github.com/googleapis/genai-toolbox/commit/77919c7d8e4aac16eeb703c0cc61ca774dc4f94e))
* **prebuilt/cloud-sql:** Add create user tool for cloud sql ([#1406](https://github.com/googleapis/genai-toolbox/issues/1406)) ([3a6b517](https://github.com/googleapis/genai-toolbox/commit/3a6b51752f077b225b8c2e2e7308a69a68eec3c0))
* **prebuilt/cloud-sql:** Add list databases tool for cloud sql ([#1454](https://github.com/googleapis/genai-toolbox/issues/1454)) ([e6a6c61](https://github.com/googleapis/genai-toolbox/commit/e6a6c615d5480e8930ad173d44d243f5bd99eebc))
* **prebuilt/cloud-sql:** Package cloud sql tools ([#1455](https://github.com/googleapis/genai-toolbox/issues/1455)) ([bf6266b](https://github.com/googleapis/genai-toolbox/commit/bf6266ba1131bd1c5829ac112a8c45c8a5919fea))
* **prebuilt/cloud-sql-mssql:** Add create instance tool for mssql ([#1440](https://github.com/googleapis/genai-toolbox/issues/1440)) ([b176523](https://github.com/googleapis/genai-toolbox/commit/b17652309d8a02b1f20c6c576b1617b23c8e481f))
* **prebuilt/cloud-sql-mysql:** Add create instance tool for Cloud SQL MySQL ([#1434](https://github.com/googleapis/genai-toolbox/issues/1434)) ([15b628d](https://github.com/googleapis/genai-toolbox/commit/15b628d2d2feb2ecdd418394b9265a6c77c77f6d))
* **prebuilt/cloud-sql-mysql:** Add env var support for IP Type ([#1232](https://github.com/googleapis/genai-toolbox/issues/1232)) ([#1347](https://github.com/googleapis/genai-toolbox/issues/1347)) ([0cd3f16](https://github.com/googleapis/genai-toolbox/commit/0cd3f16f877f426b45e35625ba0af03789459591))
* **prebuilt/cloudsqlpg:** Add cloud sql pg create instance tool ([#1403](https://github.com/googleapis/genai-toolbox/issues/1403)) ([d302499](https://github.com/googleapis/genai-toolbox/commit/d30249961b5a2ddc2c3809b481085d1ca034ead0))
* **prebuilt/mysql:** Add a new tool to show query plan of a given query in MySQL ([#1474](https://github.com/googleapis/genai-toolbox/issues/1474)) ([1a42e05](https://github.com/googleapis/genai-toolbox/commit/1a42e05675645ac4f1b89edef7a71ac61b637a76))
* **prebuilt/mysql:** Add `queryParams` field in MySQL prebuilt config ([#1318](https://github.com/googleapis/genai-toolbox/issues/1318)) ([4b32c2a](https://github.com/googleapis/genai-toolbox/commit/4b32c2a7701ce5ccc56d019055283e73e7046372))
* **prebuilt/neo4j:** Add prebuiltconfig support for neo4j ([#1352](https://github.com/googleapis/genai-toolbox/issues/1352)) ([f819e26](https://github.com/googleapis/genai-toolbox/commit/f819e2644315a589ec283494f244c1b8407cae59))
* **prebuilt/observability:** Add cloud sql observability tools ([#1425](https://github.com/googleapis/genai-toolbox/issues/1425)) ([236be89](https://github.com/googleapis/genai-toolbox/commit/236be89961fe423c1ec992d3d1f699f77a6e5b29))
* **prebuilt/postgres:** Add postgres prebuilt tools ([#1473](https://github.com/googleapis/genai-toolbox/issues/1473)) ([edca9dc](https://github.com/googleapis/genai-toolbox/commit/edca9dc7d772baf1a234485020fa69d76f71bfcc))
* **prebuilt/sqlite:** Prebuilt tools for the sqlite. ([#1227](https://github.com/googleapis/genai-toolbox/issues/1227)) ([681c2b4](https://github.com/googleapis/genai-toolbox/commit/681c2b4f3a65837d972c138c623c08fb6b1f1785))
* **source/alloydb-admin:** Add user agent and attach alloydb api in `alloydb-admin` source ([#1448](https://github.com/googleapis/genai-toolbox/issues/1448)) ([9710014](https://github.com/googleapis/genai-toolbox/commit/971001400f25796784f8aeb3ec5cb1a2df2e4c69))
* **source/bigquery:** Add support for datasets selection ([#1313](https://github.com/googleapis/genai-toolbox/issues/1313)) ([aa39724](https://github.com/googleapis/genai-toolbox/commit/aa3972470fd0f6f5901c5d85dd05f1e2ae973e7b))
* **source/cloud-monitoring:** Add support for user agent in cloud monitoring source ([#1472](https://github.com/googleapis/genai-toolbox/issues/1472)) ([92680b1](https://github.com/googleapis/genai-toolbox/commit/92680b18d6159300ae66f80ddb4c6bf0547d45a1))
* **source/cloud-sql-admin:** Add User agent and attach sqldmin in `cloud-sql-admin` source. ([#1441](https://github.com/googleapis/genai-toolbox/issues/1441)) ([56b6574](https://github.com/googleapis/genai-toolbox/commit/56b6574fc2c506c7c7df7f2a25686e3e4aae0e8a))
* **source/cloudsqladmin:** Add cloud sql admin source ([#1408](https://github.com/googleapis/genai-toolbox/issues/1408)) ([4f46782](https://github.com/googleapis/genai-toolbox/commit/4f4678292762507494515ce61188cd0310805c40))
* **tool/cloudsql:** Add cloud sql wait for operation tool with exponential backoff ([#1306](https://github.com/googleapis/genai-toolbox/issues/1306)) ([3aef2bb](https://github.com/googleapis/genai-toolbox/commit/3aef2bb7be8274bb4718739faeaa5f97b50dbf19))
* **tools/alloydb-create-cluster:** Add custom tool kind for AlloyDB create cluster ([#1331](https://github.com/googleapis/genai-toolbox/issues/1331)) ([76bb876](https://github.com/googleapis/genai-toolbox/commit/76bb876d546780908c1a69ef3b1a92781af28a3b))
* **tools/alloydb-create-instance:** Add new custom tool kind for AlloyDB ([#1379](https://github.com/googleapis/genai-toolbox/issues/1379)) ([091cd9a](https://github.com/googleapis/genai-toolbox/commit/091cd9aa1aabe1cb3de2ce2be5c707a8f77ad647))
* **tools/alloydb-create-user:** Add new custom tool kind for AlloyDB create user ([#1380](https://github.com/googleapis/genai-toolbox/issues/1380)) ([ab3fd26](https://github.com/googleapis/genai-toolbox/commit/ab3fd261af373dcdaf4555292c63d7095d7a02df))
* **tools/alloydb-get-cluster:** Add new tool for AlloyDB ([#1420](https://github.com/googleapis/genai-toolbox/issues/1420)) ([c181dab](https://github.com/googleapis/genai-toolbox/commit/c181dabc91bdc1c24c89a3c7bba0049d9af4cf2b))
* **tools/alloydb-get-instance:** Add new for AlloyDB ([#1435](https://github.com/googleapis/genai-toolbox/issues/1435)) ([f2d9e3b](https://github.com/googleapis/genai-toolbox/commit/f2d9e3b57963082f0db70880d5c02b1cbe3eb75d))
* **tools/alloydb-get-user:** Add new tool for AlloyDB ([#1436](https://github.com/googleapis/genai-toolbox/issues/1436)) ([677254e](https://github.com/googleapis/genai-toolbox/commit/677254e6d9c532fa9f0fb0b0e4062446640ab75f))
* **tools/alloydb-list-cluster:** Add custom tool kind for AlloyDB ([#1319](https://github.com/googleapis/genai-toolbox/issues/1319)) ([d4a9eb0](https://github.com/googleapis/genai-toolbox/commit/d4a9eb0ce217c7969aff61868e53c7dc7757d28d))
* **tools/alloydb-list-instances:** Add custom tool kind for AlloyDB ([#1357](https://github.com/googleapis/genai-toolbox/issues/1357)) ([93c1b30](https://github.com/googleapis/genai-toolbox/commit/93c1b30fced113d6721ade9fdcfb92b5ed6c0ad6))
* **tools/alloydb-list-users:** Add new custom tool kind for AlloyDB ([#1377](https://github.com/googleapis/genai-toolbox/issues/1377)) ([3a8a65c](https://github.com/googleapis/genai-toolbox/commit/3a8a65ceaa92368563e237087c4a38ed7c0d3fd5))
* **tools/bigquery-analyze-contribution:** Add analyze contribution tool ([#1223](https://github.com/googleapis/genai-toolbox/issues/1223)) ([81d239b](https://github.com/googleapis/genai-toolbox/commit/81d239b053a6978250878a6809905dcc9424909e))
* **tools/bigquery-conversational-analytics:** Add allowed datasets support ([#1411](https://github.com/googleapis/genai-toolbox/issues/1411)) ([345bd6a](https://github.com/googleapis/genai-toolbox/commit/345bd6af520bb9ce8a43834951e68fea7bbe6a02))
* **tools/bigquery-search-catalog:** Add new tool to BigQuery ([#1382](https://github.com/googleapis/genai-toolbox/issues/1382)) ([bffb39d](https://github.com/googleapis/genai-toolbox/commit/bffb39dea3cc946a1e611e3523241443b1e4f047))
* **tools/bigquery:** Add `useClientOAuth` to BigQuery prebuilt source config ([#1431](https://github.com/googleapis/genai-toolbox/issues/1431)) ([fe2999a](https://github.com/googleapis/genai-toolbox/commit/fe2999a691ac92b2bf35cb7cfd504df2f3ce84b3))
* **tools/clickhouse-list-databases:** Add `list-databases` tool to clickhouse source ([#1274](https://github.com/googleapis/genai-toolbox/issues/1274)) ([e515d92](https://github.com/googleapis/genai-toolbox/commit/e515d9254f3b8e89f89322d490eb3cedce85d2bb))
* **tools/firestore-get-rules:** Add `databaseId` to the Firestore source and `firestore-get-rules` tool ([#1505](https://github.com/googleapis/genai-toolbox/issues/1505)) ([7450482](https://github.com/googleapis/genai-toolbox/commit/7450482bb2479eab7d1c8f0d40755a8d11aa3b26))
* **tools/firestore:** Add `firestore-query` tool ([#1305](https://github.com/googleapis/genai-toolbox/issues/1305)) ([cce602f](https://github.com/googleapis/genai-toolbox/commit/cce602f28097353f6a3017cec1fa5f75283f111d))
* **tools/looker:** Query tracking for MCP Toolbox in Looker System Activity views ([#1410](https://github.com/googleapis/genai-toolbox/issues/1410)) ([2036c8e](https://github.com/googleapis/genai-toolbox/commit/2036c8efd2fb9edc26df599629d3131c6c367f4b))
* **tools/mssql-list-tables:** Add new tool for sql server ([#1433](https://github.com/googleapis/genai-toolbox/issues/1433)) ([b036047](https://github.com/googleapis/genai-toolbox/commit/b036047a21f63265c9d9637ac1a671792c9c2e80))
* **tools/mysql-list-active-queries:** Add a new tool to list ongoing queries in a MySQL instance ([#1471](https://github.com/googleapis/genai-toolbox/issues/1471)) ([ed54cd6](https://github.com/googleapis/genai-toolbox/commit/ed54cd6cfd17a3bdd84025d4eb8264763da36a98))
* **tools/mysql-list-table-fragmentation:** Add a new tool to list table fragmentation in a MySQL instance ([#1479](https://github.com/googleapis/genai-toolbox/issues/1479)) ([fe651d8](https://github.com/googleapis/genai-toolbox/commit/fe651d822f88832833e869ec049c6c084eae7e51))
* **tools/mysql-list-tables-missing-index:** Add a new tool to list tables that do not have primary or unique keys in a MySQL instance ([#1493](https://github.com/googleapis/genai-toolbox/issues/1493)) ([9eb821a](https://github.com/googleapis/genai-toolbox/commit/9eb821a6dca408ba993f904aa42b5b4f70674ba7))
* **tools/mysql-list-tables:** Add new tool for MySQL ([#1287](https://github.com/googleapis/genai-toolbox/issues/1287)) ([6c8460b](https://github.com/googleapis/genai-toolbox/commit/6c8460b0e507315d407c91ba1c821f4820cc1620))
* **tools/postgres-list-active-queries:** Add new `postgres-list-active-queries` tool ([#1400](https://github.com/googleapis/genai-toolbox/issues/1400)) ([b2b06c7](https://github.com/googleapis/genai-toolbox/commit/b2b06c72c29fd99a0c7118b85e6f7bcf6853d173))
* **tools/postgres-list-tables:** Add new tool to postgres source ([#1284](https://github.com/googleapis/genai-toolbox/issues/1284)) ([71f360d](https://github.com/googleapis/genai-toolbox/commit/71f360d31522f429a646b705ce7d1d11dac4cf68))
* **tools/spanner-list-tables:** Add new tool `spanner-list-tables` ([#1404](https://github.com/googleapis/genai-toolbox/issues/1404)) ([7d384dc](https://github.com/googleapis/genai-toolbox/commit/7d384dc28f8c37dddc2f6cefc0bbeb4c201e3167))
### Bug Fixes
* **bigquery:** Add `Bearer` parsing to auth token ([#1386](https://github.com/googleapis/genai-toolbox/issues/1386)) ([b5f9780](https://github.com/googleapis/genai-toolbox/commit/b5f9780a59e15eca2591dee32f5da42435e03039))
* **source/alloydb-admin, source/cloudsql-admin:** Post append new user agent ([#1494](https://github.com/googleapis/genai-toolbox/issues/1494)) ([30f1d3a](https://github.com/googleapis/genai-toolbox/commit/30f1d3a983aa317f1e1a98f9fe753005b56c52bd))
* **tools/alloydb:** Update parameter names and set default description for AlloyDB control plane tools ([#1468](https://github.com/googleapis/genai-toolbox/issues/1468)) ([6c140d7](https://github.com/googleapis/genai-toolbox/commit/6c140d718a66b45c7ec2d5a267331adb7680f689))
* **tools/bigquery-conversational-analytics:** Fix authentication scope error in Cloud Run ([#1381](https://github.com/googleapis/genai-toolbox/issues/1381)) ([80b7488](https://github.com/googleapis/genai-toolbox/commit/80b7488ad248ab1d98ee6713e1f6737f67f6754b))
* **tools/mysql-list-tables:** Update `mysql-list-tables` table_names parameter with default value ([#1439](https://github.com/googleapis/genai-toolbox/issues/1439)) ([da24661](https://github.com/googleapis/genai-toolbox/commit/da246610e105df10a9dc1bce19fa35d408c039f3))
* **tools/neo4j:** Implement value conversion for Neo4j types to JSON-compatible ([#1428](https://github.com/googleapis/genai-toolbox/issues/1428)) ([4babc4e](https://github.com/googleapis/genai-toolbox/commit/4babc4e11b3b64db8d8c9d6b65e47744f5174f7f))
## [0.14.0](https://github.com/googleapis/genai-toolbox/compare/v0.13.0...v0.14.0) (2025-09-05)

View File

@@ -27,7 +27,6 @@ This project follows
> [!NOTE]
> New contributions should always include both unit and integration tests.
All submissions, including submissions by project members, require review. We
use GitHub pull requests for this purpose. Consult
@@ -37,14 +36,14 @@ information on using pull requests.
### Code reviews
* Within 2-5 days, a reviewer will review your PR. They may approve it, or request
changes.
changes.
* When requesting changes, reviewers should self-assign the PR to ensure
they are aware of any updates.
* If additional changes are needed, push additional commits to your PR branch -
this helps the reviewer know which parts of the PR have changed.
this helps the reviewer know which parts of the PR have changed.
* Commits will be
squashed when merged.
* Please follow up with changes promptly.
* Please follow up with changes promptly.
* If a PR is awaiting changes by the
author for more than 10 days, maintainers may mark that PR as Draft. PRs that
are inactive for more than 30 days may be closed.
@@ -53,12 +52,16 @@ are inactive for more than 30 days may be closed.
Please create an
[issue](https://github.com/googleapis/genai-toolbox/issues) before
implementation to ensure we can accept the contribution and no duplicated work. This issue
should include an overview of the API design. If you have any questions, reach out on our
[Discord](https://discord.gg/Dmm69peqjh) to chat directly with the team.
implementation to ensure we can accept the contribution and no duplicated work.
This issue should include an overview of the API design. If you have any
questions, reach out on our [Discord](https://discord.gg/Dmm69peqjh) to chat
directly with the team.
> [!NOTE]
> New tools can be added for [pre-existing data sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources). However, any new database source should also include at least one new tool type.
> New tools can be added for [pre-existing data
> sources](https://github.com/googleapis/genai-toolbox/tree/main/internal/sources).
> However, any new database source should also include at least one new tool
> type.
### Adding a New Database Source
@@ -196,7 +199,7 @@ detailed description of your changes and any requests for long term testing
resources.
* **Title:** All pull request title should follow the formatting of
[Conventional
[Conventional
Commit](https://www.conventionalcommits.org/) guidelines: `<type>[optional
scope]: description`. For example, if you are adding a new field in postgres
source, the title should be `feat(source/postgres): add support for

View File

@@ -59,12 +59,14 @@ cancel_hotel: <- tool name
Tool name is the identifier used by a Large Language Model (LLM) to invoke a
specific tool.
* Custom tools: The user can define any name they want. The below guidelines
do not apply.
* Pre-built tools: The tool name is predefined and cannot be changed. It
should follow the guidelines.
The following guidelines apply to tool names:
* Should use underscores over hyphens (e.g., `list_collections` instead of
`list-collections`).
* Should not have the product name in the name (e.g., `list_collections` instead
@@ -79,6 +81,7 @@ The following guidelines apply to tool names:
Tool kind serves as a category or type that a user can assign to a tool.
The following guidelines apply to tool kinds:
* Should user hyphens over underscores (e.g. `firestore-list-collections` or
`firestore_list_colelctions`).
* Should use product name in name (e.g. `firestore-list-collections` over

View File

@@ -117,7 +117,7 @@ To install Toolbox as a binary:
<!-- {x-release-please-start-version} -->
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.15.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -130,7 +130,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.15.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -154,7 +154,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.14.0
go install github.com/googleapis/genai-toolbox@v0.15.0
```
<!-- {x-release-please-end} -->
@@ -175,7 +175,8 @@ To run Toolbox from binary:
```
**NOTE:**
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
Toolbox enables dynamic reloading by default. To disable, use the
`--disable-reload` flag.
</details>
@@ -194,7 +195,8 @@ us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION \
```
**NOTE:**
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps the container's port `5000` to your host's port `5000`.
The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps
the container's port `5000` to your host's port `5000`.
</details>
@@ -202,14 +204,18 @@ The `-v` flag mounts your local `tools.yaml` into the container, and `-p` maps t
<summary>Source</summary>
To run the server directly from source, navigate to the project root directory and run:
To run the server directly from source, navigate to the project root directory
and run:
```sh
go run .
```
**NOTE:**
This command runs the project from source, and is more suitable for development and testing. It does **not** compile a binary into your `$GOPATH`. If you want to compile a binary instead, refer the [Developer Documentation](./DEVELOPER.md#building-the-binary).
This command runs the project from source, and is more suitable for development
and testing. It does **not** compile a binary into your `$GOPATH`. If you want
to compile a binary instead, refer the [Developer
Documentation](./DEVELOPER.md#building-the-binary).
</details>
@@ -217,7 +223,9 @@ This command runs the project from source, and is more suitable for development
<summary>Homebrew</summary>
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox` binary is available in your system path. You can start the server with the same command:
If you installed Toolbox using [Homebrew](https://brew.sh/), the `toolbox`
binary is available in your system path. You can start the server with the same
command:
```sh
toolbox --tools-file "tools.yaml"
@@ -232,7 +240,6 @@ For more detailed documentation on deploying to different environments, check
out the resources in the [How-to
section](https://googleapis.github.io/genai-toolbox/how-to/)
### Integrating your application
Once your server is up and running, you can load the tools into your
@@ -777,6 +784,7 @@ Since the project is in a pre-release stage (version `0.x.y`), we follow the
standard conventions for initial development:
### Pre-1.0.0 Versioning
While the major version is `0`, the public API should be considered unstable.
The version will be incremented as follows:
@@ -786,6 +794,7 @@ The version will be incremented as follows:
backward-compatible bug fixes.
### Post-1.0.0 Versioning
Once the project reaches a stable `1.0.0` release, the versioning will follow
the more common convention:

View File

@@ -61,10 +61,12 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
@@ -121,7 +123,10 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"

View File

@@ -1249,6 +1249,10 @@ func TestPrebuiltTools(t *testing.T) {
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
sqlite_config, _ := prebuiltconfigs.Get("sqlite")
neo4jconfig, _ := prebuiltconfigs.Get("neo4j")
alloydbobsvconfig, _ := prebuiltconfigs.Get("alloydb-postgres-observability")
cloudsqlpgobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-postgres-observability")
cloudsqlmysqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mysql-observability")
cloudsqlmssqlobsvconfig, _ := prebuiltconfigs.Get("cloud-sql-mssql-observability")
// Set environment variables
t.Setenv("API_KEY", "your_api_key")
@@ -1395,7 +1399,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"bigquery_database_tools": tools.ToolsetConfig{
Name: "bigquery_database_tools",
ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
ToolNames: []string{"analyze_contribution", "ask_data_insights", "execute_sql", "forecast", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids", "search_catalog"},
},
},
},
@@ -1405,7 +1409,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"clickhouse_database_tools": tools.ToolsetConfig{
Name: "clickhouse_database_tools",
ToolNames: []string{"execute_sql", "list_databases"},
ToolNames: []string{"execute_sql", "list_databases", "list_tables"},
},
},
},
@@ -1425,7 +1429,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_database_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_database_tools",
ToolNames: []string{"execute_sql", "list_tables"},
ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"},
},
},
},
@@ -1465,7 +1469,7 @@ func TestPrebuiltTools(t *testing.T) {
wantToolset: server.ToolsetConfigs{
"mysql_database_tools": tools.ToolsetConfig{
Name: "mysql_database_tools",
ToolNames: []string{"execute_sql", "list_tables"},
ToolNames: []string{"execute_sql", "list_tables", "get_query_plan", "list_active_queries", "list_tables_missing_unique_indexes", "list_table_fragmentation"},
},
},
},
@@ -1539,6 +1543,46 @@ func TestPrebuiltTools(t *testing.T) {
},
},
},
{
name: "alloydb postgres observability prebuilt tools",
in: alloydbobsvconfig,
wantToolset: server.ToolsetConfigs{
"alloydb_postgres_cloud_monitoring_tools": tools.ToolsetConfig{
Name: "alloydb_postgres_cloud_monitoring_tools",
ToolNames: []string{"get_system_metrics", "get_query_metrics"},
},
},
},
{
name: "cloudsql postgres observability prebuilt tools",
in: cloudsqlpgobsvconfig,
wantToolset: server.ToolsetConfigs{
"cloud_sql_postgres_cloud_monitoring_tools": tools.ToolsetConfig{
Name: "cloud_sql_postgres_cloud_monitoring_tools",
ToolNames: []string{"get_system_metrics", "get_query_metrics"},
},
},
},
{
name: "cloudsql mysql observability prebuilt tools",
in: cloudsqlmysqlobsvconfig,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mysql_cloud_monitoring_tools": tools.ToolsetConfig{
Name: "cloud_sql_mysql_cloud_monitoring_tools",
ToolNames: []string{"get_system_metrics", "get_query_metrics"},
},
},
},
{
name: "cloudsql mssql observability prebuilt tools",
in: cloudsqlmssqlobsvconfig,
wantToolset: server.ToolsetConfigs{
"cloud_sql_mssql_cloud_monitoring_tools": tools.ToolsetConfig{
Name: "cloud_sql_mssql_cloud_monitoring_tools",
ToolNames: []string{"get_system_metrics"},
},
},
},
}
for _, tc := range tcs {

View File

@@ -1 +1 @@
0.14.0
0.15.0

View File

@@ -234,7 +234,7 @@
},
"outputs": [],
"source": [
"version = \"0.14.0\" # x-release-please-version\n",
"version = \"0.15.0\" # x-release-please-version\n",
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
"\n",
"# Make the binary executable\n",

View File

@@ -22,6 +22,7 @@ etc., you could use environment variables instead with the format `${ENV_NAME}`.
user: ${USER_NAME}
password: ${PASSWORD}
```
A default value can be specified like `${ENV_NAME:default}`.
```yaml

View File

@@ -86,7 +86,7 @@ To install Toolbox as a binary:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.15.0
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
chmod +x toolbox
```
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
```sh
# see releases page for other versions
export VERSION=0.14.0
export VERSION=0.15.0
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
```
@@ -108,6 +108,7 @@ To install Toolbox using Homebrew on macOS or Linux:
```sh
brew install mcp-toolbox
```
{{% /tab %}}
{{% tab header="Compile from source" lang="en" %}}
@@ -115,7 +116,7 @@ To install from source, ensure you have the latest version of
[Go installed](https://go.dev/doc/install), and then run the following command:
```sh
go install github.com/googleapis/genai-toolbox@v0.14.0
go install github.com/googleapis/genai-toolbox@v0.15.0
```
{{% /tab %}}
@@ -138,8 +139,9 @@ Toolbox enables dynamic reloading by default. To disable, use the
#### Launching Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets
with features such as authorized parameters. To learn more, visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
tools and toolsets with features such as authorized parameters. To learn more,
visit [Toolbox UI](../../how-to/toolbox-ui/index.md).
```sh
./toolbox --ui
@@ -147,7 +149,8 @@ with features such as authorized parameters. To learn more, visit [Toolbox UI](.
#### Homebrew Users
If you installed Toolbox using Homebrew, the `toolbox` binary is available in your system path. You can start the server with the same command:
If you installed Toolbox using Homebrew, the `toolbox` binary is available in
your system path. You can start the server with the same command:
```sh
toolbox --tools-file "tools.yaml"
@@ -185,7 +188,8 @@ async with ToolboxClient("http://127.0.0.1:5000") as client:
{{< /highlight >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md).
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-core/README.md).
{{% /tab %}}
{{% tab header="LangChain" lang="en" %}}
@@ -206,7 +210,8 @@ async with ToolboxClient("http://127.0.0.1:5000") as client:
{{< /highlight >}}
For more detailed instructions on using the Toolbox LangChain SDK, see the
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md).
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-python/blob/main/packages/toolbox-langchain/README.md).
{{% /tab %}}
{{% tab header="Llamaindex" lang="en" %}}
@@ -228,7 +233,8 @@ async with ToolboxClient("http://127.0.0.1:5000") as client:
{{< /highlight >}}
For more detailed instructions on using the Toolbox Llamaindex SDK, see the
[project's README](https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md).
[project's
README](https://github.com/googleapis/genai-toolbox-llamaindex-python/blob/main/README.md).
{{% /tab %}}
{{< /tabpane >}}
@@ -343,7 +349,8 @@ const tools = toolboxTools.map(getTool);
{{< /tabpane >}}
For more detailed instructions on using the Toolbox Core SDK, see the
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
#### Go
@@ -590,7 +597,8 @@ func main() {
{{< /tabpane >}}
For more detailed instructions on using the Toolbox Go SDK, see the
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
[project's
README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
For end-to-end samples on using the Toolbox Go SDK with orchestration
frameworks, see the [project's

View File

@@ -14,12 +14,14 @@ description: >
This guide assumes you have already done the following:
1. Installed [Python 3.9+][install-python] (including [pip][install-pip] and
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv]).
your preferred virtual environment tool for managing dependencies e.g.
[venv][install-venv]).
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-venv]:
https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
@@ -36,9 +38,10 @@ This guide assumes you have already done the following:
In this section, we will write and run an agent that will load the Tools
from Toolbox.
{{< notice tip>}} If you prefer to experiment within a Google Colab environment,
you can connect to a
[local runtime](https://research.google.com/colaboratory/local-runtimes.html).
{{< notice tip>}}
If you prefer to experiment within a Google Colab environment, you can connect
to a [local
runtime](https://research.google.com/colaboratory/local-runtimes.html).
{{< /notice >}}
1. In a new terminal, install the SDK package.
@@ -148,5 +151,6 @@ Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#man
```
{{< notice info >}}
For more information, visit the [Python SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-python).
For more information, visit the [Python SDK
repo](https://github.com/googleapis/mcp-toolbox-sdk-python).
{{</ notice >}}

View File

@@ -17,12 +17,15 @@ This guide assumes you have already done the following:
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
## Step 1: Set up your database
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
## Step 2: Install and configure Toolbox
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
## Step 3: Connect your agent to Toolbox
@@ -51,14 +54,12 @@ from Toolbox.
{{< include "quickstart/go/langchain/quickstart.go" >}}
{{< /tab >}}
{{< tab header="Genkit Go" lang="go" >}}
{{< include "quickstart/go/genkit/quickstart.go" >}}
{{< /tab >}}
{{< tab header="Go GenAI" lang="go" >}}
@@ -71,7 +72,6 @@ from Toolbox.
{{< include "quickstart/go/openAI/quickstart.go" >}}
{{< /tab >}}
{{< /tabpane >}}

View File

@@ -17,12 +17,15 @@ This guide assumes you have already done the following:
[install-postgres]: https://www.postgresql.org/download/
### Cloud Setup (Optional)
{{< regionInclude "quickstart/shared/cloud_setup.md" "cloud_setup" >}}
## Step 1: Set up your database
{{< regionInclude "quickstart/shared/database_setup.md" "database_setup" >}}
## Step 2: Install and configure Toolbox
{{< regionInclude "quickstart/shared/configure_toolbox.md" "configure_toolbox" >}}
## Step 3: Connect your agent to Toolbox
@@ -36,7 +39,8 @@ from Toolbox.
npm init -y
```
1. In a new terminal, install the [SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
1. In a new terminal, install the
[SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
```bash
npm install @toolbox-sdk/core
@@ -59,7 +63,8 @@ npm install @google/genai
{{< /tab >}}
{{< /tabpane >}}
1. Create a new file named `hotelAgent.js` and copy the following code to create an agent:
1. Create a new file named `hotelAgent.js` and copy the following code to create
an agent:
{{< tabpane persist=header >}}
{{< tab header="LangChain" lang="js" >}}
@@ -95,5 +100,6 @@ npm install @google/genai
```
{{< notice info >}}
For more information, visit the [JS SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
For more information, visit the [JS SDK
repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
{{</ notice >}}

View File

@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -4,34 +4,34 @@ go 1.24.6
require (
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
google.golang.org/genai v1.24.0
google.golang.org/genai v1.25.0
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go v0.121.6 // indirect
cloud.google.com/go/auth v0.16.5 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
cloud.google.com/go/compute/metadata v0.8.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/crypto v0.41.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
google.golang.org/api v0.248.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c // indirect
google.golang.org/grpc v1.74.2 // indirect
google.golang.org/protobuf v1.36.7 // indirect
)

View File

@@ -0,0 +1,118 @@
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c=
cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI=
cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI=
cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ=
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
cloud.google.com/go/secretmanager v1.15.0 h1:RtkCMgTpaBMbzozcRUGfZe46jb9a3qh5EdEtVRUATF8=
cloud.google.com/go/secretmanager v1.15.0/go.mod h1:1hQSAhKK7FldiYw//wbR/XPfPc08eQ81oBsnRUHEvUc=
cloud.google.com/go/storage v1.56.1 h1:n6gy+yLnHn0hTwBFzNn8zJ1kqWfR91wzdM8hjRF4wP0=
cloud.google.com/go/storage v1.56.1/go.mod h1:C9xuCZgFl3buo2HZU/1FncgvvOgTAs/rnh4gF4lMg0s=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=
github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=
github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=
github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/go-jose/go-jose/v4 v4.1.0 h1:cYSYxd3pw5zd2FSXk2vGdn9igQU2PS8MuxrCOCl0FdY=
github.com/go-jose/go-jose/v4 v4.1.0/go.mod h1:GG/vqmYm3Von2nYiB2vGTXzdoNKE5tix5tuc6iAd+sw=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0 h1:m/en3Pp3iGc99SYejUS9iQ01QXsDKMVA+Z456P6mAxY=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0/go.mod h1:DYgNeEV9WHpesG9O2Esr0o+s7Nag0H3h3U2JstK1g6I=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y=
google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k=
google.golang.org/genai v1.23.0 h1:0VkQPd1CVT5FbykwkWvnB7jq1d+PZFuVf0n57UyyOzs=
google.golang.org/genai v1.23.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo=
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -8,10 +8,10 @@ require (
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go v0.121.6 // indirect
cloud.google.com/go/auth v0.16.5 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
cloud.google.com/go/compute/metadata v0.8.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
@@ -23,7 +23,7 @@ require (
github.com/google/s2a-go v0.1.9 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/invopop/jsonschema v0.13.0 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
@@ -38,15 +38,15 @@ require (
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/sdk v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/crypto v0.41.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
google.golang.org/api v0.248.0 // indirect
google.golang.org/genai v1.11.1 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c // indirect
google.golang.org/grpc v1.74.2 // indirect
google.golang.org/protobuf v1.36.7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@@ -0,0 +1,158 @@
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c=
cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI=
cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI=
cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ=
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
cloud.google.com/go/secretmanager v1.15.0 h1:RtkCMgTpaBMbzozcRUGfZe46jb9a3qh5EdEtVRUATF8=
cloud.google.com/go/secretmanager v1.15.0/go.mod h1:1hQSAhKK7FldiYw//wbR/XPfPc08eQ81oBsnRUHEvUc=
cloud.google.com/go/storage v1.56.1 h1:n6gy+yLnHn0hTwBFzNn8zJ1kqWfR91wzdM8hjRF4wP0=
cloud.google.com/go/storage v1.56.1/go.mod h1:C9xuCZgFl3buo2HZU/1FncgvvOgTAs/rnh4gF4lMg0s=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=
github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=
github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=
github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/firebase/genkit/go v0.6.2 h1:FaVJtcprfXZz0gXTtARJqUiovu/R2wuJycNn/18aNMc=
github.com/firebase/genkit/go v0.6.2/go.mod h1:blRYK6oNgwBDX6F+gInACru6q527itviv+xruiMSUuU=
github.com/go-jose/go-jose/v4 v4.1.0 h1:cYSYxd3pw5zd2FSXk2vGdn9igQU2PS8MuxrCOCl0FdY=
github.com/go-jose/go-jose/v4 v4.1.0/go.mod h1:GG/vqmYm3Von2nYiB2vGTXzdoNKE5tix5tuc6iAd+sw=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/goccy/go-yaml v1.17.1 h1:LI34wktB2xEE3ONG/2Ar54+/HJVBriAGJ55PHls4YuY=
github.com/goccy/go-yaml v1.17.1/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca h1:LuQ8KS5N04c37jyaq6jelLdNi0GfI6QJb8lpnYaDW9Y=
github.com/google/dotprompt/go v0.0.0-20250611200215-bb73406b05ca/go.mod h1:dnIk+MSMnipm9uZyPIgptq7I39aDxyjBiaev/OG0W0Y=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0 h1:m/en3Pp3iGc99SYejUS9iQ01QXsDKMVA+Z456P6mAxY=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0/go.mod h1:DYgNeEV9WHpesG9O2Esr0o+s7Nag0H3h3U2JstK1g6I=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E=
github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a h1:v2cBA3xWKv2cIOVhnzX/gNgkNXqiHfUgJtA3r61Hf7A=
github.com/mbleigh/raymond v0.0.0-20250414171441-6b3a58ab9e0a/go.mod h1:Y6ghKH+ZijXn5d9E7qGGZBmjitx7iitZdQiIW97EpTU=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y=
google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k=
google.golang.org/genai v1.11.1 h1:MgI2JVDaIQ1YMuzKFwgPciB+K6kQ8MCBMVL9u7Oa8qw=
google.golang.org/genai v1.11.1/go.mod h1:HFXR1zT3LCdLxd/NW6IOSCczOYyRAxwaShvYbgPSeVw=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo=
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -8,12 +8,12 @@ require (
)
require (
cloud.google.com/go v0.121.1 // indirect
cloud.google.com/go v0.121.6 // indirect
cloud.google.com/go/ai v0.7.0 // indirect
cloud.google.com/go/aiplatform v1.85.0 // indirect
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/aiplatform v1.89.0 // indirect
cloud.google.com/go/auth v0.16.5 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
cloud.google.com/go/compute/metadata v0.8.0 // indirect
cloud.google.com/go/iam v1.5.2 // indirect
cloud.google.com/go/longrunning v0.6.7 // indirect
cloud.google.com/go/vertexai v0.12.0 // indirect
@@ -25,7 +25,7 @@ require (
github.com/google/s2a-go v0.1.9 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
@@ -33,17 +33,17 @@ require (
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/crypto v0.41.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
golang.org/x/sync v0.16.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
golang.org/x/time v0.12.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
google.golang.org/api v0.248.0 // indirect
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c // indirect
google.golang.org/grpc v1.74.2 // indirect
google.golang.org/protobuf v1.36.7 // indirect
)

View File

@@ -0,0 +1,134 @@
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c=
cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI=
cloud.google.com/go/ai v0.7.0 h1:P6+b5p4gXlza5E+u7uvcgYlzZ7103ACg70YdZeC6oGE=
cloud.google.com/go/ai v0.7.0/go.mod h1:7ozuEcraovh4ABsPbrec3o4LmFl9HigNI3D5haxYeQo=
cloud.google.com/go/aiplatform v1.89.0 h1:niSJYc6ldWWVM9faXPo1Et1MVSQoLvVGriD7fwbJdtE=
cloud.google.com/go/aiplatform v1.89.0/go.mod h1:TzZtegPkinfXTtXVvZZpxx7noINFMVDrLkE7cEWhYEk=
cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI=
cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ=
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE=
cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY=
cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
cloud.google.com/go/secretmanager v1.15.0 h1:RtkCMgTpaBMbzozcRUGfZe46jb9a3qh5EdEtVRUATF8=
cloud.google.com/go/secretmanager v1.15.0/go.mod h1:1hQSAhKK7FldiYw//wbR/XPfPc08eQ81oBsnRUHEvUc=
cloud.google.com/go/storage v1.56.1 h1:n6gy+yLnHn0hTwBFzNn8zJ1kqWfR91wzdM8hjRF4wP0=
cloud.google.com/go/storage v1.56.1/go.mod h1:C9xuCZgFl3buo2HZU/1FncgvvOgTAs/rnh4gF4lMg0s=
cloud.google.com/go/vertexai v0.12.0 h1:zTadEo/CtsoyRXNx3uGCncoWAP1H2HakGqwznt+iMo8=
cloud.google.com/go/vertexai v0.12.0/go.mod h1:8u+d0TsvBfAAd2x5R6GMgbYhsLgo3J7lmP4bR8g2ig8=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=
github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=
github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=
github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/go-jose/go-jose/v4 v4.1.0 h1:cYSYxd3pw5zd2FSXk2vGdn9igQU2PS8MuxrCOCl0FdY=
github.com/go-jose/go-jose/v4 v4.1.0/go.mod h1:GG/vqmYm3Von2nYiB2vGTXzdoNKE5tix5tuc6iAd+sw=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/generative-ai-go v0.15.1 h1:n8aQUpvhPOlGVuM2DRkJ2jvx04zpp42B778AROJa+pQ=
github.com/google/generative-ai-go v0.15.1/go.mod h1:AAucpWZjXsDKhQYWvCYuP6d0yB1kX998pJlOW1rAesw=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0 h1:m/en3Pp3iGc99SYejUS9iQ01QXsDKMVA+Z456P6mAxY=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0/go.mod h1:DYgNeEV9WHpesG9O2Esr0o+s7Nag0H3h3U2JstK1g6I=
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw=
github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tmc/langchaingo v0.1.13 h1:rcpMWBIi2y3B90XxfE4Ao8dhCQPVDMaNPnN5cGB1CaA=
github.com/tmc/langchaingo v0.1.13/go.mod h1:vpQ5NOIhpzxDfTZK9B6tf2GM/MoaHewPWM5KXXGh7hg=
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y=
google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4=
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo=
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=

View File

@@ -8,15 +8,15 @@ require (
)
require (
cloud.google.com/go/auth v0.16.2 // indirect
cloud.google.com/go/auth v0.16.5 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.7.0 // indirect
cloud.google.com/go/compute/metadata v0.8.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
@@ -26,13 +26,13 @@ require (
go.opentelemetry.io/otel v1.36.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
golang.org/x/crypto v0.39.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/crypto v0.41.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
google.golang.org/api v0.242.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
google.golang.org/grpc v1.73.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
google.golang.org/api v0.248.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c // indirect
google.golang.org/grpc v1.74.2 // indirect
google.golang.org/protobuf v1.36.7 // indirect
)

View File

@@ -0,0 +1,150 @@
cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss=
cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
cloud.google.com/go v0.121.1 h1:S3kTQSydxmu1JfLRLpKtxRPA7rSrYPRPEUmL/PavVUw=
cloud.google.com/go v0.121.1/go.mod h1:nRFlrHq39MNVWu+zESP2PosMWA0ryJw8KUBZ2iZpxbw=
cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4=
cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA=
cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI=
cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ=
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU=
cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo=
cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
cloud.google.com/go/secretmanager v1.15.0 h1:RtkCMgTpaBMbzozcRUGfZe46jb9a3qh5EdEtVRUATF8=
cloud.google.com/go/secretmanager v1.15.0/go.mod h1:1hQSAhKK7FldiYw//wbR/XPfPc08eQ81oBsnRUHEvUc=
cloud.google.com/go/storage v1.55.0 h1:NESjdAToN9u1tmhVqhXCaCwYBuvEhZLLv0gBr+2znf0=
cloud.google.com/go/storage v1.55.0/go.mod h1:ztSmTTwzsdXe5syLVS0YsbFxXuvEmEyZj7v7zChEmuY=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.52.0 h1:QFgWzcdmJlgEAwJz/zePYVJQxfoJGRtgIqZfIUFg5oQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.52.0/go.mod h1:ayYHuYU7iNcNtEs1K9k6D/Bju7u1VEHMQm5qQ1n3GtM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.52.0 h1:wbMd4eG/fOhsCa6+IP8uEDvWF5vl7rNoUWmP5f72Tbs=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.52.0/go.mod h1:gdIm9TxRk5soClCwuB0FtdXsbqtw0aqPwBEurK9tPkw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k=
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=
github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=
github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=
github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/go-jose/go-jose/v4 v4.1.0 h1:cYSYxd3pw5zd2FSXk2vGdn9igQU2PS8MuxrCOCl0FdY=
github.com/go-jose/go-jose/v4 v4.1.0/go.mod h1:GG/vqmYm3Von2nYiB2vGTXzdoNKE5tix5tuc6iAd+sw=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0=
github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w=
github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
github.com/googleapis/mcp-toolbox-sdk-go v0.2.0 h1:y242XXymvSDJ84FhDvSqpyjq4bOtRDy6yOxs7QR8etY=
github.com/googleapis/mcp-toolbox-sdk-go v0.2.0/go.mod h1:Zd5cooy5sH5ThiTwzhKtZZxTkLGbPlqDZ9c8er969Ug=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0 h1:m/en3Pp3iGc99SYejUS9iQ01QXsDKMVA+Z456P6mAxY=
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0/go.mod h1:DYgNeEV9WHpesG9O2Esr0o+s7Nag0H3h3U2JstK1g6I=
github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM=
github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw=
go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
google.golang.org/api v0.242.0 h1:7Lnb1nfnpvbkCiZek6IXKdJ0MFuAZNAJKQfA1ws62xg=
google.golang.org/api v0.242.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50=
google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y=
google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k=
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo=
google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok=
google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc=
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -0,0 +1,81 @@
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"bytes"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
)
func TestQuickstartSample(t *testing.T) {
framework := os.Getenv("ORCH_NAME")
if framework == "" {
t.Skip("Skipping test: ORCH_NAME environment variable is not set.")
}
t.Logf("--- Testing: %s ---", framework)
if framework == "openAI" {
if os.Getenv("OPENAI_API_KEY") == "" {
t.Skip("Skipping test: OPENAI_API_KEY environment variable is not set for openAI framework.")
}
} else {
if os.Getenv("GOOGLE_API_KEY") == "" {
t.Skipf("Skipping test for %s: GOOGLE_API_KEY environment variable is not set.", framework)
}
}
sampleDir := filepath.Join(".", framework)
if _, err := os.Stat(sampleDir); os.IsNotExist(err) {
t.Fatalf("Test setup failed: directory for framework '%s' not found.", framework)
}
cmd := exec.Command("go", "run", ".")
cmd.Dir = sampleDir
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
err := cmd.Run()
actualOutput := stdout.String()
if err != nil {
t.Fatalf("Script execution failed with error: %v\n--- STDERR ---\n%s", err, stderr.String())
}
if len(actualOutput) == 0 {
t.Fatal("Script ran successfully but produced no output.")
}
goldenFile, err := os.ReadFile("../golden.txt")
if err != nil {
t.Fatalf("Could not read golden.txt to check for keywords: %v", err)
}
keywords := strings.Split(string(goldenFile), "\n")
var missingKeywords []string
outputLower := strings.ToLower(actualOutput)
for _, keyword := range keywords {
kw := strings.TrimSpace(keyword)
if kw != "" && !strings.Contains(outputLower, strings.ToLower(kw)) {
missingKeywords = append(missingKeywords, kw)
}
}
if len(missingKeywords) > 0 {
t.Fatalf("FAIL: The following keywords were missing from the output: [%s]", strings.Join(missingKeywords, ", "))
}
}

View File

@@ -143,9 +143,10 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",

View File

@@ -3338,9 +3338,10 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
"integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.4",

View File

@@ -12,7 +12,7 @@
"@llamaindex/google": "^0.3.20",
"@llamaindex/workflow": "^1.1.22",
"@toolbox-sdk/core": "^0.1.2",
"llamaindex": "^0.11.28"
"llamaindex": "^0.12.0"
}
},
"node_modules/@aws-crypto/sha256-js": {
@@ -153,58 +153,11 @@
}
}
},
"node_modules/@llama-flow/core": {
"version": "0.4.4",
"resolved": "https://registry.npmjs.org/@llama-flow/core/-/core-0.4.4.tgz",
"integrity": "sha512-hwK1EQ+atUG/E7XcDV3KsTaA8op29pb8gbpVurpsqbLnGFkdTT4F/6V7Hy1cC2o/yOY+DKc/rxoIsH1uJS0cZg==",
"peer": true,
"peerDependencies": {
"@modelcontextprotocol/sdk": "^1.7.0",
"hono": "^4.7.4",
"next": "^15.2.2",
"p-retry": "^6.2.1",
"rxjs": "^7.8.2",
"zod": "^3.24.2"
},
"peerDependenciesMeta": {
"@modelcontextprotocol/sdk": {
"optional": true
},
"hono": {
"optional": true
},
"next": {
"optional": true
},
"p-retry": {
"optional": true
},
"rxjs": {
"optional": true
},
"zod": {
"optional": true
}
}
},
"node_modules/@llamaindex/cloud": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/@llamaindex/cloud/-/cloud-4.1.2.tgz",
"integrity": "sha512-UgCMsf7W4t5lqO8wMQZNA4mHYk8i1QbYr3RnLOCmHhfcJPGnoAupyVF2IBL0L0p++LThDyKHSxCdCM+OQZV3jQ==",
"dependencies": {
"p-retry": "^6.2.1",
"zod": "^3.25.76"
},
"peerDependencies": {
"@llama-flow/core": "^0.4.1",
"@llamaindex/core": "0.6.20",
"@llamaindex/env": "0.1.30"
}
},
"node_modules/@llamaindex/core": {
"version": "0.6.20",
"resolved": "https://registry.npmjs.org/@llamaindex/core/-/core-0.6.20.tgz",
"integrity": "sha512-3Sq8eoNyHjF9yFdzHJKjHxVioQPKcp7YLp6PiwkWryev30H3EFIloAr5CvgppRHGmwlhbTwHsGp+SSfpWIdg/g==",
"peer": true,
"dependencies": {
"@llamaindex/env": "0.1.30",
"@types/node": "^24.0.13",
@@ -247,20 +200,6 @@
"@llamaindex/env": "0.1.30"
}
},
"node_modules/@llamaindex/node-parser": {
"version": "2.0.20",
"resolved": "https://registry.npmjs.org/@llamaindex/node-parser/-/node-parser-2.0.20.tgz",
"integrity": "sha512-9x/VqqrybWqFERui9H62AQqfJw0J0Hnzxcb501Ttt46nGnJXYIJ6DgcxA14EKlgEuiPRuYFMcXtO399ewnhteQ==",
"dependencies": {
"html-to-text": "^9.0.5"
},
"peerDependencies": {
"@llamaindex/core": "0.6.20",
"@llamaindex/env": "0.1.30",
"tree-sitter": "^0.22.0",
"web-tree-sitter": "^0.24.3"
}
},
"node_modules/@llamaindex/workflow": {
"version": "1.1.22",
"resolved": "https://registry.npmjs.org/@llamaindex/workflow/-/workflow-1.1.22.tgz",
@@ -392,11 +331,6 @@
"undici-types": "~7.10.0"
}
},
"node_modules/@types/retry": {
"version": "0.12.2",
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.2.tgz",
"integrity": "sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow=="
},
"node_modules/agent-base": {
"version": "7.1.4",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
@@ -909,17 +843,6 @@
"node": ">= 14"
}
},
"node_modules/is-network-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.1.0.tgz",
"integrity": "sha512-tUdRRAnhT+OtCZR/LxZelH/C7QtjtFrTu5tXCA8pl55eTUElUHT+GPYV8MBMBvea/j+NxQqVt3LbWMRir7Gx9g==",
"engines": {
"node": ">=16"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
@@ -975,15 +898,15 @@
}
},
"node_modules/llamaindex": {
"version": "0.11.28",
"resolved": "https://registry.npmjs.org/llamaindex/-/llamaindex-0.11.28.tgz",
"integrity": "sha512-1YfNayHuTYz9d6QABTGI6C3WafTFcQ9WzTYv7CURQ6pMWqYnCfZuhC1koqTazJUgEmY4Wy7TkV8YaQiSMmAjbA==",
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/llamaindex/-/llamaindex-0.12.0.tgz",
"integrity": "sha512-PoN3BMSr4F5h7ThQjXRZppyiCxuENIm/Set6hDk/DGLZrvIDgTuA2YSS29OSd2dXOQEtux5xGPl1d46nJ6w+Fw==",
"license": "MIT",
"dependencies": {
"@llamaindex/cloud": "4.1.2",
"@llamaindex/core": "0.6.20",
"@llamaindex/core": "0.6.22",
"@llamaindex/env": "0.1.30",
"@llamaindex/node-parser": "2.0.20",
"@llamaindex/workflow": "1.1.22",
"@llamaindex/node-parser": "2.0.22",
"@llamaindex/workflow": "1.1.24",
"@types/lodash": "^4.17.7",
"@types/node": "^24.0.13",
"lodash": "^4.17.21",
@@ -993,6 +916,62 @@
"node": ">=18.0.0"
}
},
"node_modules/llamaindex/node_modules/@finom/zod-to-json-schema": {
"version": "3.24.11",
"resolved": "https://registry.npmjs.org/@finom/zod-to-json-schema/-/zod-to-json-schema-3.24.11.tgz",
"integrity": "sha512-fL656yBPiWebtfGItvtXLWrFNGlF1NcDFS0WdMQXMs9LluVg0CfT5E2oXYp0pidl0vVG53XkW55ysijNkU5/hA==",
"license": "ISC",
"peerDependencies": {
"zod": "^4.0.14"
}
},
"node_modules/llamaindex/node_modules/@llamaindex/core": {
"version": "0.6.22",
"resolved": "https://registry.npmjs.org/@llamaindex/core/-/core-0.6.22.tgz",
"integrity": "sha512-/BXyemkvpxMaUhOkbwJ2PTvzKjSWkL8+6QLpz/n+pk8xBwMMe1GVBgli/J57gCyi8GbrlBafBj6GaPOgWub2Eg==",
"dependencies": {
"@finom/zod-to-json-schema": "3.24.11",
"@llamaindex/env": "0.1.30",
"@types/node": "^24.0.13",
"magic-bytes.js": "^1.10.0",
"zod": "^4.1.5"
}
},
"node_modules/llamaindex/node_modules/@llamaindex/node-parser": {
"version": "2.0.22",
"resolved": "https://registry.npmjs.org/@llamaindex/node-parser/-/node-parser-2.0.22.tgz",
"integrity": "sha512-uj5O89WShAAyiSZ8f8tU7hnLJ6pSmlY2a6hkAOs8odkUgT87dEqaPHpsK7w0iJdEFiob7GoLeRhv2K624FooXg==",
"dependencies": {
"html-to-text": "^9.0.5"
},
"peerDependencies": {
"@llamaindex/core": "0.6.22",
"@llamaindex/env": "0.1.30",
"tree-sitter": "^0.22.0",
"web-tree-sitter": "^0.24.3"
}
},
"node_modules/llamaindex/node_modules/@llamaindex/workflow": {
"version": "1.1.24",
"resolved": "https://registry.npmjs.org/@llamaindex/workflow/-/workflow-1.1.24.tgz",
"integrity": "sha512-VyKsbRkFlnT5dRNKbgLXQV+ZpQ+CAFgmC9LaZv6hD/fIKo6wq1wQW/ZqLZgZt569xeHgxmrXPB6KHdqn/AhPbQ==",
"dependencies": {
"@llamaindex/workflow-core": "^1.3.2"
},
"peerDependencies": {
"@llamaindex/core": "0.6.22",
"@llamaindex/env": "0.1.30"
}
},
"node_modules/llamaindex/node_modules/zod": {
"version": "4.1.9",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.1.9.tgz",
"integrity": "sha512-HI32jTq0AUAC125z30E8bQNz0RQ+9Uc+4J7V97gLYjZVKRjeydPgGt6dvQzFrav7MYOUGFqqOGiHpA/fdbd0cQ==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@@ -1091,22 +1070,6 @@
"node-gyp-build-test": "build-test.js"
}
},
"node_modules/p-retry": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-6.2.1.tgz",
"integrity": "sha512-hEt02O4hUct5wtwg4H4KcWgDdm+l1bOaEy/hWzd8xtXB9BqxTWBBhb+2ImAtH4Cv4rPjV76xN3Zumqk3k3AhhQ==",
"dependencies": {
"@types/retry": "0.12.2",
"is-network-error": "^1.0.0",
"retry": "^0.13.1"
},
"engines": {
"node": ">=16.17"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/parseley": {
"version": "0.12.1",
"resolved": "https://registry.npmjs.org/parseley/-/parseley-0.12.1.tgz",
@@ -1137,14 +1100,6 @@
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/retry": {
"version": "0.13.1",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
"integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
"engines": {
"node": ">= 4"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -1273,6 +1228,7 @@
"version": "3.24.6",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz",
"integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
"peer": true,
"peerDependencies": {
"zod": "^3.24.1"
}

View File

@@ -14,6 +14,6 @@
"@llamaindex/google": "^0.3.20",
"@llamaindex/workflow": "^1.1.22",
"@toolbox-sdk/core": "^0.1.2",
"llamaindex": "^0.11.28"
"llamaindex": "^0.12.0"
}
}

View File

@@ -11,6 +11,7 @@ import os
# TODO(developer): replace this with your Google API key
api_key = os.environ.get("GOOGLE_API_KEY") or "your-api-key" # Set your API key here
os.environ["GOOGLE_API_KEY"] = api_key
async def main():
with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:

View File

@@ -1,3 +1,3 @@
google-adk==1.13.0
toolbox-core==0.5.0
pytest==7.0.0
google-adk==1.14.1
toolbox-core==0.5.2
pytest==8.4.2

View File

@@ -1,3 +1,3 @@
google-genai==1.33.0
toolbox-core==0.5.0
pytest==7.0.0
google-genai==1.38.0
toolbox-core==0.5.2
pytest==8.4.2

View File

@@ -1,5 +1,5 @@
langchain==0.3.27
langchain-google-vertexai==2.0.28
langchain-google-vertexai==2.1.2
langgraph==0.6.7
toolbox-langchain==0.5.0
toolbox-langchain==0.5.2
pytest==8.4.2

View File

@@ -1,4 +1,4 @@
llama-index==0.13.6
llama-index-llms-google-genai==0.3.0
toolbox-llamaindex==0.5.0
llama-index==0.14.2
llama-index-llms-google-genai==0.5.1
toolbox-llamaindex==0.5.2
pytest==8.4.2

View File

@@ -5,7 +5,8 @@ If you plan to use **Google Clouds Vertex AI** with your agent (e.g., using
local development:
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
1. [Set up Application Default Credentials
(ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
1. Set your project and enable Vertex AI
```bash
@@ -13,8 +14,4 @@ local development:
gcloud services enable aiplatform.googleapis.com
```
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
[install-pip]: https://pip.pypa.io/en/stable/installation/
[install-venv]: https://packaging.python.org/en/latest/tutorials/installing-packages/#creating-virtual-environments
[install-postgres]: https://www.postgresql.org/download/
<!-- [END cloud_setup] -->

View File

@@ -13,7 +13,7 @@ In this section, we will download Toolbox, configure our tools in a
<!-- {x-release-please-start-version} -->
```bash
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/$OS/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/$OS/toolbox
```
<!-- {x-release-please-end} -->

View File

@@ -11,4 +11,4 @@ description: >
<link rel="canonical" href="https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
<meta http-equiv="refresh" content="0;url=https://cloud.google.com/alloydb/docs/create-database-with-mcp-toolbox"/>
</head>
</html>
</html>

View File

@@ -0,0 +1,301 @@
---
title: "Cloud SQL for SQL Server Admin using MCP"
type: docs
weight: 5
description: >
Create and manage Cloud SQL for SQL Server (Admin) using Toolbox.
---
This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your
developer assistant tools to create and manage Cloud SQL for SQL Server
instance, database and users:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Before you begin
1. In the Google Cloud console, on the [project selector
page](https://console.cloud.google.com/projectselector2/home/dashboard),
select or create a Google Cloud project.
1. [Make sure that billing is enabled for your Google Cloud
project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
1. Grant the necessary IAM roles to the user that will be running the MCP
server. The tools available will depend on the roles granted:
* `roles/cloudsql.viewer`: Provides read-only access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
* All `viewer` tools
* `create_database`
* `roles/cloudsql.admin`: Provides full control over all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"servers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mssql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mssql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Cloud SQL for SQL Server using MCP.
The `cloud-sql-mssql-admin` server provides tools for managing your Cloud SQL
instances and interacting with your database:
* **create_instance**: Creates a new Cloud SQL for SQL Server instance.
* **get_instance**: Gets information about a Cloud SQL instance.
* **list_instances**: Lists Cloud SQL instances in a project.
* **create_database**: Creates a new database in a Cloud SQL instance.
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -0,0 +1,301 @@
---
title: "Cloud SQL for MySQL Admin using MCP"
type: docs
weight: 4
description: >
Create and manage Cloud SQL for MySQL (Admin) using Toolbox.
---
This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your
developer assistant tools to create and manage Cloud SQL for MySQL instance,
database and users:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Before you begin
1. In the Google Cloud console, on the [project selector
page](https://console.cloud.google.com/projectselector2/home/dashboard),
select or create a Google Cloud project.
1. [Make sure that billing is enabled for your Google Cloud
project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
1. Grant the necessary IAM roles to the user that will be running the MCP
server. The tools available will depend on the roles granted:
* `roles/cloudsql.viewer`: Provides read-only access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
* All `viewer` tools
* `create_database`
* `roles/cloudsql.admin`: Provides full control over all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"servers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-mysql-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-mysql-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Cloud SQL for MySQL using MCP.
The `cloud-sql-mysql-admin` server provides tools for managing your Cloud SQL
instances and interacting with your database:
* **create_instance**: Creates a new Cloud SQL for MySQL instance.
* **get_instance**: Gets information about a Cloud SQL instance.
* **list_instances**: Lists Cloud SQL instances in a project.
* **create_database**: Creates a new database in a Cloud SQL instance.
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -0,0 +1,301 @@
---
title: "Cloud SQL for PostgreSQL Admin using MCP"
type: docs
weight: 3
description: >
Create and manage Cloud SQL for PostgreSQL (Admin) using Toolbox.
---
This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your
developer assistant tools to create and manage Cloud SQL for PostgreSQL
instance, database and users:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
* [Visual Studio Code][vscode] (Copilot)
* [Cline][cline] (VS Code extension)
* [Claude desktop][claudedesktop]
* [Claude code][claudecode]
* [Gemini CLI][geminicli]
* [Gemini Code Assist][geminicodeassist]
[toolbox]: https://github.com/googleapis/genai-toolbox
[cursor]: #configure-your-mcp-client
[windsurf]: #configure-your-mcp-client
[vscode]: #configure-your-mcp-client
[cline]: #configure-your-mcp-client
[claudedesktop]: #configure-your-mcp-client
[claudecode]: #configure-your-mcp-client
[geminicli]: #configure-your-mcp-client
[geminicodeassist]: #configure-your-mcp-client
## Before you begin
1. In the Google Cloud console, on the [project selector
page](https://console.cloud.google.com/projectselector2/home/dashboard),
select or create a Google Cloud project.
1. [Make sure that billing is enabled for your Google Cloud
project](https://cloud.google.com/billing/docs/how-to/verify-billing-enabled#confirm_billing_is_enabled_on_a_project).
1. Grant the necessary IAM roles to the user that will be running the MCP
server. The tools available will depend on the roles granted:
* `roles/cloudsql.viewer`: Provides read-only access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
* All `viewer` tools
* `create_database`
* `roles/cloudsql.admin`: Provides full control over all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
1. Make the binary executable:
```bash
chmod +x toolbox
```
1. Verify the installation:
```bash
./toolbox --version
```
## Configure your MCP Client
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude code to apply the new configuration.
{{% /tab %}}
{{% tab header="Claude desktop" lang="en" %}}
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap
the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
1. [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration and save:
```json
{
"servers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration and save:
```json
{
"mcpServers": {
"cloud-sql-postgres-admin": {
"command": "./PATH/TO/toolbox",
"args": ["--prebuilt","cloud-sql-postgres-admin","--stdio"],
"env": {
}
}
}
}
```
{{% /tab %}}
{{< /tabpane >}}
## Use Tools
Your AI tool is now connected to Cloud SQL for PostgreSQL using MCP.
The `cloud-sql-postgres-admin` server provides tools for managing your Cloud SQL
instances and interacting with your database:
* **create_instance**: Creates a new Cloud SQL for PostgreSQL instance.
* **get_instance**: Gets information about a Cloud SQL instance.
* **list_instances**: Lists Cloud SQL instances in a project.
* **create_database**: Creates a new database in a Cloud SQL instance.
* **list_databases**: Lists all databases for a Cloud SQL instance.
* **create_user**: Creates a new user in a Cloud SQL instance.
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -48,19 +48,19 @@ to expose your developer assistant tools to a Looker instance:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -5,7 +5,11 @@ weight: 2
description: "Connect your IDE to SQL Server using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQL Server. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQL Server instance:
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like SQL Server. This guide covers how to use [MCP Toolbox for
Databases][toolbox] to expose your developer assistant tools to a SQL Server
instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
@@ -28,28 +32,32 @@ description: "Connect your IDE to SQL Server using Toolbox."
## Set up the database
1. [Create or select a SQL Server instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
1. [Create or select a SQL Server
instance.](https://www.microsoft.com/en-us/sql-server/sql-server-downloads)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -71,9 +79,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -99,7 +109,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -120,13 +131,16 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -146,13 +160,15 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
}
```
1. You should see a green active status after the server is successfully connected.
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -172,13 +188,17 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -200,9 +220,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -224,9 +246,12 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -248,10 +273,14 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -275,7 +304,9 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
## Use Tools
Your AI tool is now connected to SQL Server using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
Your AI tool is now connected to SQL Server using MCP. Try asking your AI
assistant to list tables, create a table, or define and execute other SQL
statements.
The following tools are available to the LLM:
@@ -283,5 +314,6 @@ The following tools are available to the LLM:
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -5,7 +5,10 @@ weight: 2
description: "Connect your IDE to MySQL using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a MySQL instance:
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like MySQL. This guide covers how to use [MCP Toolbox for Databases][toolbox] to
expose your developer assistant tools to a MySQL instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
@@ -32,24 +35,27 @@ description: "Connect your IDE to MySQL using Toolbox."
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -71,9 +77,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -99,7 +107,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -120,13 +129,16 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -146,13 +158,15 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
}
```
1. You should see a green active status after the server is successfully connected.
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -172,13 +186,17 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -200,9 +218,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -224,9 +244,12 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -248,10 +271,14 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -275,7 +302,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolb
## Use Tools
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
Your AI tool is now connected to MySQL using MCP. Try asking your AI assistant
to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
@@ -283,5 +311,6 @@ The following tools are available to the LLM:
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -5,7 +5,10 @@ weight: 2
description: "Connect your IDE to Neo4j using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a Neo4j instance:
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like Neo4j. This guide covers how to use [MCP Toolbox for Databases][toolbox] to
expose your developer assistant tools to a Neo4j instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
@@ -28,11 +31,15 @@ description: "Connect your IDE to Neo4j using Toolbox."
## Set up the database
1. [Create or select a Neo4j instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
1. [Create or select a Neo4j
instance.](https://neo4j.com/cloud/platform/aura-graph-database/)
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version v0.15.0+:
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
v0.15.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
@@ -71,9 +78,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -98,7 +107,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -118,13 +128,16 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -168,13 +181,17 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -197,9 +214,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -220,9 +239,12 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -243,10 +265,14 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -269,13 +295,16 @@ curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolb
## Use Tools
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant to get the graph schema or execute Cypher statements.
Your AI tool is now connected to Neo4j using MCP. Try asking your AI assistant
to get the graph schema or execute Cypher statements.
The following tools are available to the LLM:
1. **get_schema**: extracts the complete database schema, including details about node labels, relationships, properties, constraints, and indexes.
1. **get_schema**: extracts the complete database schema, including details
about node labels, relationships, properties, constraints, and indexes.
1. **execute_cypher**: executes any arbitrary Cypher statement.
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -56,19 +56,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v0.14.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->

View File

@@ -5,7 +5,10 @@ weight: 2
description: "Connect your IDE to SQLite using Toolbox."
---
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol for connecting Large Language Models (LLMs) to data sources like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox] to expose your developer assistant tools to a SQLite instance:
[Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is
an open protocol for connecting Large Language Models (LLMs) to data sources
like SQLite. This guide covers how to use [MCP Toolbox for Databases][toolbox]
to expose your developer assistant tools to a SQLite instance:
* [Cursor][cursor]
* [Windsurf][windsurf] (Codium)
@@ -32,24 +35,27 @@ description: "Connect your IDE to SQLite using Toolbox."
## Install MCP Toolbox
1. Download the latest version of Toolbox as a binary. Select the [correct binary](https://github.com/googleapis/genai-toolbox/releases) corresponding to your OS and CPU architecture. You are required to use Toolbox version V0.10.0+:
1. Download the latest version of Toolbox as a binary. Select the [correct
binary](https://github.com/googleapis/genai-toolbox/releases) corresponding
to your OS and CPU architecture. You are required to use Toolbox version
V0.10.0+:
<!-- {x-release-please-start-version} -->
{{< tabpane persist=header >}}
{{< tab header="linux/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/linux/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/linux/amd64/toolbox
{{< /tab >}}
{{< tab header="darwin/arm64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/arm64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/arm64/toolbox
{{< /tab >}}
{{< tab header="darwin/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/darwin/amd64/toolbox
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/darwin/amd64/toolbox
{{< /tab >}}
{{< tab header="windows/amd64" lang="bash" >}}
curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbox.exe
curl -O https://storage.googleapis.com/genai-toolbox/v0.15.0/windows/amd64/toolbox.exe
{{< /tab >}}
{{< /tabpane >}}
<!-- {x-release-please-end} -->
@@ -71,9 +77,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
{{< tabpane text=true >}}
{{% tab header="Claude code" lang="en" %}}
1. Install [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Install [Claude
Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview).
1. Create a `.mcp.json` file in your project root if it doesn't exist.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -95,7 +103,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
1. Open [Claude desktop](https://claude.ai/download) and navigate to Settings.
1. Under the Developer tab, tap Edit Config to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -112,13 +121,16 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
```
1. Restart Claude desktop.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the new MCP server available.
1. From the new chat screen, you should see a hammer (MCP) icon appear with the
new MCP server available.
{{% /tab %}}
{{% tab header="Cline" lang="en" %}}
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and tap the **MCP Servers** icon.
1. Open the [Cline](https://github.com/cline/cline) extension in VS Code and
tap the **MCP Servers** icon.
1. Tap Configure MCP Servers to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -134,13 +146,15 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
}
```
1. You should see a green active status after the server is successfully connected.
1. You should see a green active status after the server is successfully
connected.
{{% /tab %}}
{{% tab header="Cursor" lang="en" %}}
1. Create a `.cursor` directory in your project root if it doesn't exist.
1. Create a `.cursor/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -156,13 +170,17 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
}
```
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor Settings > MCP**. You should see a green active status after the server is successfully connected.
1. Open [Cursor](https://www.cursor.com/) and navigate to **Settings > Cursor
Settings > MCP**. You should see a green active status after the server is
successfully connected.
{{% /tab %}}
{{% tab header="Visual Studio Code (Copilot)" lang="en" %}}
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and create a `.vscode` directory in your project root if it doesn't exist.
1. Open [VS Code](https://code.visualstudio.com/docs/copilot/overview) and
create a `.vscode` directory in your project root if it doesn't exist.
1. Create a `.vscode/mcp.json` file if it doesn't exist and open it.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -180,9 +198,11 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
{{% /tab %}}
{{% tab header="Windsurf" lang="en" %}}
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the Cascade assistant.
1. Open [Windsurf](https://docs.codeium.com/windsurf) and navigate to the
Cascade assistant.
1. Tap on the hammer (MCP) icon, then Configure to open the configuration file.
1. Add the following configuration, replace the environment variables with your values, and save:
1. Add the following configuration, replace the environment variables with your
values, and save:
```json
{
@@ -200,9 +220,12 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
{{% /tab %}}
{{% tab header="Gemini CLI" lang="en" %}}
1. Install the [Gemini CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. Install the [Gemini
CLI](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#quickstart).
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -220,10 +243,14 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
{{% /tab %}}
{{% tab header="Gemini Code Assist" lang="en" %}}
1. Install the [Gemini Code Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist) extension in Visual Studio Code.
1. Install the [Gemini Code
Assist](https://marketplace.visualstudio.com/items?itemName=Google.geminicodeassist)
extension in Visual Studio Code.
1. Enable Agent Mode in Gemini Code Assist chat.
1. In your working directory, create a folder named `.gemini`. Within it, create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your values, and then save:
1. In your working directory, create a folder named `.gemini`. Within it,
create a `settings.json` file.
1. Add the following configuration, replace the environment variables with your
values, and then save:
```json
{
@@ -243,7 +270,8 @@ curl -O https://storage.googleapis.com/genai-toolbox/v1.0.0/windows/amd64/toolbo
## Use Tools
Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant to list tables, create a table, or define and execute other SQL statements.
Your AI tool is now connected to SQLite using MCP. Try asking your AI assistant
to list tables, create a table, or define and execute other SQL statements.
The following tools are available to the LLM:
@@ -251,5 +279,6 @@ The following tools are available to the LLM:
1. **execute_sql**: execute any SQL statement
{{< notice note >}}
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs will adapt to the tools available, so this shouldn't affect most users.
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
will adapt to the tools available, so this shouldn't affect most users.
{{< /notice >}}

View File

@@ -103,10 +103,11 @@ section.
```bash
export IMAGE=us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:latest
```
{{< notice note >}}
**The `$PORT` Environment Variable**
Google Cloud Run dictates the port your application must listen on by setting the
`$PORT` environment variable inside your container. This value defaults to
Google Cloud Run dictates the port your application must listen on by setting
the `$PORT` environment variable inside your container. This value defaults to
**8080**. Your application's `--port` argument **must** be set to listen on this
port. If there is a mismatch, the container will fail to start and the
deployment will time out.
@@ -209,18 +210,26 @@ Now, you can use this client to connect to the deployed Cloud Run instance!
## Troubleshooting
{{< notice note >}}
For any deployment or runtime error, the best first step is to check the logs for your service in the Google Cloud Console's Cloud Run section. They often contain the specific error message needed to diagnose the problem.
For any deployment or runtime error, the best first step is to check the logs
for your service in the Google Cloud Console's Cloud Run section. They often
contain the specific error message needed to diagnose the problem.
{{< /notice >}}
* **Deployment Fails with "Container failed to start":** This is almost always
caused by a port mismatch. Ensure your container's `--port` argument is set to
`8080` to match the `$PORT` environment variable provided by Cloud Run.
* **Client Receives Permission Denied Error (401 or 403):** If your client application (e.g., your local SDK) gets a `401 Unauthorized` or `403 Forbidden` error when trying to call your Cloud Run service, it means the client is not properly authenticated as an invoker.
* Ensure the user or service account calling the service has the **Cloud Run Invoker** (`roles/run.invoker`) IAM role.
* If running locally, make sure your Application Default Credentials are set up correctly by running `gcloud auth application-default login`.
* **Service Fails to Access Secrets (in logs):** If your application starts but the logs show errors like "permission denied" when trying to access Secret Manager, it means the Toolbox service account is missing permissions.
* Ensure the `toolbox-identity` service account has the **Secret Manager Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
* **Client Receives Permission Denied Error (401 or 403):** If your client
application (e.g., your local SDK) gets a `401 Unauthorized` or `403
Forbidden` error when trying to call your Cloud Run service, it means the
client is not properly authenticated as an invoker.
* Ensure the user or service account calling the service has the **Cloud Run
Invoker** (`roles/run.invoker`) IAM role.
* If running locally, make sure your Application Default Credentials are set
up correctly by running `gcloud auth application-default login`.
* **Service Fails to Access Secrets (in logs):** If your application starts but
the logs show errors like "permission denied" when trying to access Secret
Manager, it means the Toolbox service account is missing permissions.
* Ensure the `toolbox-identity` service account has the **Secret Manager
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.

View File

@@ -6,7 +6,8 @@ description: >
How to effectively use Toolbox UI.
---
Toolbox UI is a built-in web interface that allows users to visually inspect and test out configured resources such as tools and toolsets.
Toolbox UI is a built-in web interface that allows users to visually inspect and
test out configured resources such as tools and toolsets.
## Launching Toolbox UI
@@ -16,8 +17,9 @@ To launch Toolbox's interactive UI, use the `--ui` flag.
./toolbox --ui
```
Toolbox UI will be served from the same host and port as the Toolbox Server, with the `/ui` suffix. Once Toolbox
is launched, the following INFO log with Toolbox UI's url will be shown:
Toolbox UI will be served from the same host and port as the Toolbox Server,
with the `/ui` suffix. Once Toolbox is launched, the following INFO log with
Toolbox UI's url will be shown:
```bash
INFO "Toolbox UI is up and running at: http://localhost:5000/ui"
@@ -25,11 +27,13 @@ INFO "Toolbox UI is up and running at: http://localhost:5000/ui"
## Navigating the Tools Page
The tools page shows all tools loaded from your configuration file. This corresponds to the default toolset (represented by an empty string). Each tool's name on this page will exactly match its name in the configuration
file.
The tools page shows all tools loaded from your configuration file. This
corresponds to the default toolset (represented by an empty string). Each tool's
name on this page will exactly match its name in the configuration file.
To view details for a specific tool, click on the tool name. The main content area will be populated
with the tool name, description, and available parameters.
To view details for a specific tool, click on the tool name. The main content
area will be populated with the tool name, description, and available
parameters.
![Tools Page](./tools.png)
@@ -45,12 +49,17 @@ with the tool name, description, and available parameters.
### Optional Parameters
Toolbox allows users to add [optional parameters](../../resources/tools/#basic-parameters) with or without a default value.
Toolbox allows users to add [optional
parameters](../../resources/tools/#basic-parameters) with or without a default
value.
To exclude a parameter, uncheck the box to the right of an associated parameter, and that parameter will not be
included in the request body. If the parameter is not sent, Toolbox will either use it as `nil` value or the `default` value, if configured. If the parameter is required, Toolbox will throw an error.
To exclude a parameter, uncheck the box to the right of an associated parameter,
and that parameter will not be included in the request body. If the parameter is
not sent, Toolbox will either use it as `nil` value or the `default` value, if
configured. If the parameter is required, Toolbox will throw an error.
When the box is checked, parameter will be sent exactly as entered in the response field (e.g. empty string).
When the box is checked, parameter will be sent exactly as entered in the
response field (e.g. empty string).
![Optional Parameter checked example](./optional-param-checked.png)
@@ -58,34 +67,41 @@ When the box is checked, parameter will be sent exactly as entered in the respon
### Editing Headers
To edit headers, press the "Edit Headers" button to display the header modal. Within this modal,
users can make direct edits by typing into the header's text area.
To edit headers, press the "Edit Headers" button to display the header modal.
Within this modal, users can make direct edits by typing into the header's text
area.
Toolbox UI validates that the headers are in correct JSON format. Other header-related errors (e.g.,
incorrect header names or values required by the tool) will be reported in the Response section
after running the tool.
Toolbox UI validates that the headers are in correct JSON format. Other
header-related errors (e.g., incorrect header names or values required by the
tool) will be reported in the Response section after running the tool.
![Edit Headers](./edit-headers.png)
#### Google OAuth
Currently, Toolbox supports Google OAuth 2.0 as an AuthService, which allows tools to utilize
authorized parameters. When a tool uses an authorized parameter, the parameter will be displayed
but not editable, as it will be populated from the authentication token.
Currently, Toolbox supports Google OAuth 2.0 as an AuthService, which allows
tools to utilize authorized parameters. When a tool uses an authorized
parameter, the parameter will be displayed but not editable, as it will be
populated from the authentication token.
To provide the token, add your Google OAuth ID Token to the request header using the "Edit Headers"
button and modal described above. The key should be the name of your AuthService as defined in
your tool configuration file, suffixed with `_token`. The value should be your ID token as a string.
To provide the token, add your Google OAuth ID Token to the request header using
the "Edit Headers" button and modal described above. The key should be the name
of your AuthService as defined in your tool configuration file, suffixed with
`_token`. The value should be your ID token as a string.
1. Select a tool that requires [authenticated parameters]()
1. The auth parameter's text field is greyed out. This is because it cannot be entered manually and will
be parsed from the resolved auth token
1. The auth parameter's text field is greyed out. This is because it cannot be
entered manually and will be parsed from the resolved auth token
1. To update request headers with the token, select "Edit Headers"
1. (Optional) If you wish to manually edit the header, checkout the dropdown "How to extract Google OAuth ID Token manually" for guidance on retrieving ID token
1. To edit the header automatically, click the "Auto Setup" button that is associated with your Auth Profile
1. (Optional) If you wish to manually edit the header, checkout the dropdown
"How to extract Google OAuth ID Token manually" for guidance on retrieving ID
token
1. To edit the header automatically, click the "Auto Setup" button that is
associated with your Auth Profile
1. Enter the Client ID defined in your tools configuration file
1. Click "Continue"
1. Click "Sign in With Google" and login with your associated google account. This should automatically populate the header text area with your token
1. Click "Sign in With Google" and login with your associated google account.
This should automatically populate the header text area with your token
1. Click "Save"
1. Click "Run Tool"
@@ -100,10 +116,11 @@ be parsed from the resolved auth token
## Navigating the Toolsets Page
Through the toolsets page, users can search for a specific toolset to retrieve tools from. Simply
enter the toolset name in the search bar, and press "Enter" to retrieve the associated tools.
Through the toolsets page, users can search for a specific toolset to retrieve
tools from. Simply enter the toolset name in the search bar, and press "Enter"
to retrieve the associated tools.
If the toolset name is not defined within the tools configuration file, an error message will be
displayed.
If the toolset name is not defined within the tools configuration file, an error
message will be displayed.
![Toolsets Page](./toolsets.png)

View File

@@ -8,24 +8,24 @@ description: >
## Reference
| Flag (Short) | Flag (Long) | Description | Default |
|---|---|---|---|
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
| `-h` | `--help` | help for toolbox | |
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| `-v` | `--version` | version for toolbox | |
| Flag (Short) | Flag (Long) | Description | Default |
|--------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------|
| `-a` | `--address` | Address of the interface the server will listen on. | `127.0.0.1` |
| | `--disable-reload` | Disables dynamic reloading of tools file. | |
| `-h` | `--help` | help for toolbox | |
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
| `-p` | `--port` | Port the server will listen on. | `5000` |
| | `--prebuilt` | Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
| | `--telemetry-service-name` | Sets the value of the service.name resource attribute for telemetry data. | `toolbox` |
| | `--tools-file` | File path specifying the tool configuration. Cannot be used with --prebuilt, --tools-files, or --tools-folder. | |
| | `--tools-files` | Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --prebuilt, --tools-file, or --tools-folder. | |
| | `--tools-folder` | Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --prebuilt, --tools-file, or --tools-files. | |
| | `--ui` | Launches the Toolbox UI web server. | |
| `-v` | `--version` | version for toolbox | |
## Examples
@@ -59,10 +59,15 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
- `--tools-folder`: Directory containing YAML files to load and merge
**Prebuilt Configurations:**
- `--prebuilt`: Use predefined configurations for specific database types (e.g., 'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values.
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
Reference](prebuilt-tools.md) for allowed values.
{{< notice tip >}}
The CLI enforces mutual exclusivity between configuration source flags, preventing simultaneous use of `--prebuilt` with file-based options, and ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is used at a time.
The CLI enforces mutual exclusivity between configuration source flags,
preventing simultaneous use of `--prebuilt` with file-based options, and
ensuring only one of `--tools-file`, `--tools-files`, or `--tools-folder` is
used at a time.
{{< /notice >}}
### Hot Reload
@@ -72,4 +77,6 @@ Toolbox enables dynamic reloading by default. To disable, use the
### Toolbox UI
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test tools and toolsets with features such as authorized parameters. To learn more, visit [Toolbox UI](../how-to/toolbox-ui/index.md).
To launch Toolbox's interactive UI, use the `--ui` flag. This allows you to test
tools and toolsets with features such as authorized parameters. To learn more,
visit [Toolbox UI](../how-to/toolbox-ui/index.md).

View File

@@ -6,9 +6,12 @@ description: >
This page lists all the prebuilt tools available.
---
Prebuilt tools are reusable, pre-packaged toolsets that are designed to extend the capabilities of agents. These tools are built to be generic and adaptable, allowing developers to interact with and take action on databases.
Prebuilt tools are reusable, pre-packaged toolsets that are designed to extend
the capabilities of agents. These tools are built to be generic and adaptable,
allowing developers to interact with and take action on databases.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for
details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
## AlloyDB Postgres
@@ -19,17 +22,23 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `ALLOYDB_POSTGRES_CLUSTER`: The ID of your AlloyDB cluster.
* `ALLOYDB_POSTGRES_INSTANCE`: The ID of your AlloyDB instance.
* `ALLOYDB_POSTGRES_DATABASE`: The name of the database to connect to.
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* `ALLOYDB_POSTGRES_USER`: (Optional) The database username. Defaults to
IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_PASSWORD`: (Optional) The password for the database
user. Defaults to IAM authentication if unspecified.
* `ALLOYDB_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **AlloyDB Client** (`roles/alloydb.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the database.
* `list_memory_configurations`: Lists memory-related configurations in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
@@ -38,18 +47,36 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
## AlloyDB Postgres Admin
* `--prebuilt` value: `alloydb-postgres-admin`
* **Environment Variables:**
* `API_KEY`: Your API key for the AlloyDB API.
* **Permissions:**
* **AlloyDB Admin** (`roles/alloydb.admin`) IAM role is required on the project.
* **AlloyDB Viewer** (`roles/alloydb.viewer`) is required for `list` and
`get` tools.
* **AlloyDB Admin** (`roles/alloydb.admin`) is required for `create` tools.
* **Tools:**
* `alloydb-create-cluster`: Creates a new AlloyDB cluster.
* `alloydb-operations-get`: Polls the operations API to track the status of long-running operations.
* `alloydb-create-instance`: Creates a new AlloyDB instance within a cluster.
* `alloydb-list-clusters`: Lists all AlloyDB clusters in a project.
* `alloydb-list-instances`: Lists all instances within an AlloyDB cluster.
* `alloydb-list-users`: Lists all database users within an AlloyDB cluster.
* `alloydb-create-user`: Creates a new database user in an AlloyDB cluster.
* `create_cluster`: Creates a new AlloyDB cluster.
* `list_clusters`: Lists all AlloyDB clusters in a project.
* `get_cluster`: Gets information about a specified AlloyDB cluster.
* `create_instance`: Creates a new AlloyDB instance within a cluster.
* `list_instances`: Lists all instances within an AlloyDB cluster.
* `get_instance`: Gets information about a specified AlloyDB instance.
* `create_user`: Creates a new database user in an AlloyDB cluster.
* `list_users`: Lists all database users within an AlloyDB cluster.
* `get_user`: Gets information about a specified database user in an
AlloyDB cluster.
* `wait_for_operation`: Polls the operations API to track the status of
long-running operations.
## AlloyDB Postgres Observability
* `--prebuilt` value: `alloydb-postgres-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for an AlloyDB instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in an AlloyDB instance using a
PromQL query.
## BigQuery
@@ -58,19 +85,30 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `BIGQUERY_PROJECT`: The GCP project ID.
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
* **Permissions:**
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view metadata.
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view all datasets.
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or modify datasets and tables.
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the conversational analytics API.
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
metadata.
* **BigQuery Metadata Viewer** (`roles/bigquery.metadataViewer`) to view
all datasets.
* **BigQuery Data Editor** (`roles/bigquery.dataEditor`) to create or
modify datasets and tables.
* **Gemini for Google Cloud** (`roles/cloudaicompanion.user`) to use the
conversational analytics API.
* **Tools:**
* `analyze_contribution`: Use this tool to perform contribution analysis, also called key driver analysis.
* `ask_data_insights`: Use this tool to perform data analysis, get insights, or answer complex questions about the contents of specific BigQuery tables. For more information on required roles, API setup, and IAM configuration, see the setup and authentication section of the [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
* `analyze_contribution`: Use this tool to perform contribution analysis,
also called key driver analysis.
* `ask_data_insights`: Use this tool to perform data analysis, get
insights, or answer complex questions about the contents of specific
BigQuery tables. For more information on required roles, API setup, and
IAM configuration, see the setup and authentication section of the
[Conversational Analytics API
documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview).
* `execute_sql`: Executes a SQL statement.
* `forecast`: Use this tool to forecast time series data.
* `get_dataset_info`: Gets dataset metadata.
* `get_table_info`: Gets table metadata.
* `list_dataset_ids`: Lists datasets.
* `list_table_ids`: Lists tables.
* `search_catalog`: Search for entries based on the provided query.
## Cloud SQL for MySQL
@@ -85,11 +123,56 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_MYSQL_IP_TYPE`: The IP type i.e. "Public
or "Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `get_query_plan`: Provides information about how MySQL executes a SQL
statement.
## Cloud SQL for MySQL Observability
* `--prebuilt` value: `cloud-sql-mysql-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a MySQL instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in a MySQL instance using a
PromQL query.
## Cloud SQL for MySQL Admin
* `--prebuilt` value: `cloud-sql-mysql-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Cloud SQL for PostgreSQL
@@ -99,22 +182,70 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_POSTGRES_REGION`: The region of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_INSTANCE`: The ID of your Cloud SQL instance.
* `CLOUD_SQL_POSTGRES_DATABASE`: The name of the database to connect to.
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database user. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* `CLOUD_SQL_POSTGRES_USER`: (Optional) The database username. Defaults to
IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_PASSWORD`: (Optional) The password for the database
user. Defaults to IAM authentication if unspecified.
* `CLOUD_SQL_POSTGRES_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the database.
* `list_memory_configurations`: Lists memory-related configurations in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
* `get_query_plan`: Generate the execution plan of a statement.
## Cloud SQL for PostgreSQL Observability
* `--prebuilt` value: `cloud-sql-postgres-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a Postgres instance using a PromQL query.
* `get_query_metrics`: Fetches query level cloud monitoring data
(timeseries metrics) for queries running in Postgres instance using a
PromQL query.
## Cloud SQL for PostgreSQL Admin
* `--prebuilt` value: `cloud-sql-postgres-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Cloud SQL for SQL Server
* `--prebuilt` value: `cloud-sql-mssql`
@@ -126,43 +257,94 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `CLOUD_SQL_MSSQL_IP_ADDRESS`: The IP address of the Cloud SQL instance.
* `CLOUD_SQL_MSSQL_USER`: The database username.
* `CLOUD_SQL_MSSQL_PASSWORD`: The password for the database user.
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or "Private" (Default: Public).
* `CLOUD_SQL_MSSQL_IP_TYPE`: (Optional) The IP type i.e. "Public" or
"Private" (Default: Public).
* **Permissions:**
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* **Cloud SQL Client** (`roles/cloudsql.client`) to connect to the
instance.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## Cloud SQL for SQL Server Observability
* `--prebuilt` value: `cloud-sql-mssql-observability`
* **Permissions:**
* **Monitoring Viewer** (`roles/monitoring.viewer`) is required on the
project to view monitoring data.
* **Tools:**
* `get_system_metrics`: Fetches system level cloud monitoring data
(timeseries metrics) for a SQL Server instance using a PromQL query.
## Cloud SQL for SQL Server Admin
* `--prebuilt` value: `cloud-sql-mssql-admin`
* **Permissions:**
* **Cloud SQL Viewer** (`roles/cloudsql.viewer`): Provides read-only
access to resources.
* `get_instance`
* `list_instances`
* `list_databases`
* `wait_for_operation`
* **Cloud SQL Editor** (`roles/cloudsql.editor`): Provides permissions to
manage existing resources.
* All `viewer` tools
* `create_database`
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
all resources.
* All `editor` and `viewer` tools
* `create_instance`
* `create_user`
* **Tools:**
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
* `get_instance`: Gets information about a Cloud SQL instance.
* `list_instances`: Lists Cloud SQL instances in a project.
* `create_database`: Creates a new database in a Cloud SQL instance.
* `list_databases`: Lists all databases for a Cloud SQL instance.
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
## Dataplex
* `--prebuilt` value: `dataplex`
* **Environment Variables:**
* `DATAPLEX_PROJECT`: The GCP project ID.
* **Permissions:**
* **Dataplex Reader** (`roles/dataplex.viewer`) to search and look up entries.
* **Dataplex Reader** (`roles/dataplex.viewer`) to search and look up
entries.
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
* **Tools:**
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex Catalog.
* `dataplex_search_aspect_types`: Finds aspect types relevant to the query.
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex
Catalog.
* `dataplex_search_aspect_types`: Finds aspect types relevant to the
query.
## Firestore
* `--prebuilt` value: `firestore`
* **Environment Variables:**
* `FIRESTORE_PROJECT`: The GCP project ID.
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to "(default)".
* `FIRESTORE_DATABASE`: (Optional) The Firestore database ID. Defaults to
"(default)".
* **Permissions:**
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list collections, and query collections.
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and validate Firestore rules.
* **Cloud Datastore User** (`roles/datastore.user`) to get documents, list
collections, and query collections.
* **Firebase Rules Viewer** (`roles/firebaserules.viewer`) to get and
validate Firestore rules.
* **Tools:**
* `firestore-get-documents`: Gets multiple documents from Firestore by their paths.
* `firestore-list-collections`: Lists Firestore collections for a given parent path.
* `firestore-delete-documents`: Deletes multiple documents from Firestore.
* `firestore-query-collection`: Retrieves one or more Firestore documents from a collection.
* `firestore-get-rules`: Retrieves the active Firestore security rules.
* `firestore-validate-rules`: Checks the provided Firestore Rules source for syntax and validation errors.
* `get_documents`: Gets multiple documents from Firestore by their paths.
* `add_documents`: Adds a new document to a Firestore collection.
* `update_document`: Updates an existing document in Firestore.
* `list_collections`: Lists Firestore collections for a given parent path.
* `delete_documents`: Deletes multiple documents from Firestore.
* `query_collection`: Retrieves one or more Firestore documents from a
collection.
* `get_rules`: Retrieves the active Firestore security rules.
* `validate_rules`: Checks the provided Firestore Rules source for syntax
and validation errors.
## Looker
@@ -173,7 +355,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
* **Permissions:**
* A Looker account with permissions to access the desired models, explores, and data is required.
* A Looker account with permissions to access the desired models,
explores, and data is required.
* **Tools:**
* `get_models`: Retrieves the list of LookML models.
* `get_explores`: Retrieves the list of explores in a model.
@@ -201,7 +384,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `MSSQL_USER`: The database username.
* `MSSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
@@ -216,10 +400,13 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `MYSQL_USER`: The database username.
* `MYSQL_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `get_query_plan`: Provides information about how MySQL executes a SQL
statement.
## OceanBase
@@ -231,7 +418,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `OCEANBASE_USER`: The database username.
* `OCEANBASE_PASSWORD`: The password for the database user.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
@@ -245,14 +433,18 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `POSTGRES_DATABASE`: The name of the database to connect to.
* `POSTGRES_USER`: The database username.
* `POSTGRES_PASSWORD`: The password for the database user.
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db connection string.
* `POSTGRES_QUERY_PARAMS`: (Optional) Raw query to be added to the db
connection string.
* **Permissions:**
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to execute queries.
* Database-level permissions (e.g., `SELECT`, `INSERT`) are required to
execute queries.
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the database.
* `list_memory_configurations`: Lists memory-related configurations in the database.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
database.
* `list_top_bloated_tables`: List top bloated tables in the database.
* `list_replication_slots`: Lists replication slots in the database.
* `list_invalid_indexes`: Lists invalid indexes in the database.
@@ -266,8 +458,10 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to
execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to
execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query.
* `execute_sql_dql`: Executes a DQL SQL query.
@@ -281,18 +475,23 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `SPANNER_INSTANCE`: The Spanner instance ID.
* `SPANNER_DATABASE`: The Spanner database ID.
* **Permissions:**
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to execute DML queries.
* **Cloud Spanner Database Reader** (`roles/spanner.databaseReader`) to
execute DQL queries and list tables.
* **Cloud Spanner Database User** (`roles/spanner.databaseUser`) to
execute DML queries.
* **Tools:**
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface for Spanner.
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL interface for Spanner.
* `execute_sql`: Executes a DML SQL query using the PostgreSQL interface
for Spanner.
* `execute_sql_dql`: Executes a DQL SQL query using the PostgreSQL
interface for Spanner.
* `list_tables`: Lists tables in the database.
## SQLite
* `--prebuilt` value: `sqlite`
* **Environment Variables:**
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g., `./sample.db`).
* `SQLITE_DATABASE`: The path to the SQLite database file (e.g.,
`./sample.db`).
* **Permissions:**
* File system read/write permissions for the specified database file.
* **Tools:**
@@ -303,7 +502,8 @@ See guides, [Connect from your IDE](../how-to/connect-ide/_index.md), for detail
* `--prebuilt` value: `neo4j`
* **Environment Variables:**
* `NEO4J_URI`: The URI of the Neo4j instance (e.g., `bolt://localhost:7687`).
* `NEO4J_URI`: The URI of the Neo4j instance (e.g.,
`bolt://localhost:7687`).
* `NEO4J_DATABASE`: The name of the Neo4j database to connect to.
* `NEO4J_USERNAME`: The username for the Neo4j instance.
* `NEO4J_PASSWORD`: The password for the Neo4j instance.

View File

@@ -200,7 +200,9 @@ func main() {
### Specifying tokens for existing tools
#### Python
Use the [Python SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
Use the [Python
SDK](https://github.com/googleapis/mcp-toolbox-sdk-python/tree/main).
{{< tabpane persist=header >}}
{{< tab header="Core" lang="Python" >}}

View File

@@ -11,11 +11,17 @@ aliases:
## About
The `alloydb-admin` source provides a client to interact with the [Google AlloyDB API](https://cloud.google.com/alloydb/docs/reference/rest). This allows tools to perform administrative tasks on AlloyDB resources, such as managing clusters, instances, and users.
The `alloydb-admin` source provides a client to interact with the [Google
AlloyDB API](https://cloud.google.com/alloydb/docs/reference/rest). This allows
tools to perform administrative tasks on AlloyDB resources, such as managing
clusters, instances, and users.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
1. **Application Default Credentials (ADC):** By default, the source uses ADC
to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
browser) for each request.
## Example
@@ -30,7 +36,7 @@ sources:
```
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "alloydb-admin". |
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "alloydb-admin". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -31,8 +31,10 @@ apply when creating SQL queries to run against your BigQuery data, such as
avoiding full table scans or complex filters.
[bigquery-docs]: https://cloud.google.com/bigquery/docs
[bigquery-quickstart-cli]: https://cloud.google.com/bigquery/docs/quickstarts/quickstart-command-line
[bigquery-googlesql]: https://cloud.google.com/bigquery/docs/reference/standard-sql/
[bigquery-quickstart-cli]:
https://cloud.google.com/bigquery/docs/quickstarts/quickstart-command-line
[bigquery-googlesql]:
https://cloud.google.com/bigquery/docs/reference/standard-sql/
## Available Tools
@@ -63,10 +65,15 @@ avoiding full table scans or complex filters.
- [`bigquery-sql`](../tools/bigquery/bigquery-sql.md)
Run SQL queries directly against BigQuery datasets.
- [`bigquery-search-catalog`](../tools/bigquery/bigquery-search_catalog.md)
List all entries in Dataplex Catalog (e.g. tables, views, models) that matches
given user query.
### Pre-built Configurations
- [BigQuery using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
Connect your IDE to BigQuery using Toolbox.
- [BigQuery using
MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/bigquery_mcp/)
Connect your IDE to BigQuery using Toolbox.
## Requirements
@@ -77,7 +84,9 @@ user and group access to BigQuery resources like projects, datasets, and tables.
### Authentication via Application Default Credentials (ADC)
By **default**, Toolbox will use your [Application Default Credentials (ADC)][adc] to authorize and authenticate when interacting with [BigQuery][bigquery-docs].
By **default**, Toolbox will use your [Application Default Credentials
(ADC)][adc] to authorize and authenticate when interacting with
[BigQuery][bigquery-docs].
When using this method, you need to ensure the IAM identity associated with your
ADC (such as a service account) has the correct permissions for the queries you

View File

@@ -9,7 +9,7 @@ description: >
## About
[ClickHouse][clickhouse-docs] is a fast, open-source, column-oriented database
[ClickHouse][clickhouse-docs] is a fast, open-source, column-oriented database
[clickhouse-docs]: https://clickhouse.com/docs
@@ -27,10 +27,12 @@ description: >
### Database User
This source uses standard ClickHouse authentication. You will need to [create a
ClickHouse user][clickhouse-users] (or with [ClickHouse Cloud][clickhouse-cloud]) to connect to the database with. The user
should have appropriate permissions for the operations you plan to perform.
ClickHouse user][clickhouse-users] (or with [ClickHouse
Cloud][clickhouse-cloud]) to connect to the database with. The user should have
appropriate permissions for the operations you plan to perform.
[clickhouse-cloud]: https://clickhouse.com/docs/getting-started/quick-start/cloud#connect-with-your-app
[clickhouse-cloud]:
https://clickhouse.com/docs/getting-started/quick-start/cloud#connect-with-your-app
[clickhouse-users]: https://clickhouse.com/docs/en/sql-reference/statements/create/user
### Network Access
@@ -79,13 +81,13 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|------------------------------------------------------------------------------------|
| kind | string | true | Must be "clickhouse". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |
| user | string | true | Name of the ClickHouse user to connect as (e.g. "analytics_user"). |
| password | string | false | Password of the ClickHouse user (e.g. "my-password"). |
| protocol | string | false | Connection protocol: "https" (default) or "http". |
| secure | boolean | false | Whether to use a secure connection (TLS). Default: false. |
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|-------------------------------------------------------------------------------------|
| kind | string | true | Must be "clickhouse". |
| host | string | true | IP address or hostname to connect to (e.g. "127.0.0.1" or "clickhouse.example.com") |
| port | string | true | Port to connect to (e.g. "8443" for HTTPS, "8123" for HTTP) |
| database | string | true | Name of the ClickHouse database to connect to (e.g. "my_database"). |
| user | string | true | Name of the ClickHouse user to connect as (e.g. "analytics_user"). |
| password | string | false | Password of the ClickHouse user (e.g. "my-password"). |
| protocol | string | false | Connection protocol: "https" (default) or "http". |
| secure | boolean | false | Whether to use a secure connection (TLS). Default: false. |

View File

@@ -10,11 +10,16 @@ aliases:
## About
The `cloud-monitoring` source provides a client to interact with the [Google Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows tools to access cloud monitoring metrics explorer and run promql queries.
The `cloud-monitoring` source provides a client to interact with the [Google
Cloud Monitoring API](https://cloud.google.com/monitoring/api). This allows
tools to access cloud monitoring metrics explorer and run promql queries.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
1. **Application Default Credentials (ADC):** By default, the source uses ADC
to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
browser) for each request.
## Example
@@ -30,7 +35,7 @@ sources:
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-monitoring". |
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-monitoring". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -10,11 +10,17 @@ aliases:
## About
The `cloud-sql-admin` source provides a client to interact with the [Google Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This allows tools to perform administrative tasks on Cloud SQL instances, such as creating users and databases.
The `cloud-sql-admin` source provides a client to interact with the [Google
Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This
allows tools to perform administrative tasks on Cloud SQL instances, such as
creating users and databases.
Authentication can be handled in two ways:
1. **Application Default Credentials (ADC):** By default, the source uses ADC to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will expect an OAuth 2.0 access token to be provided by the client (e.g., a web browser) for each request.
1. **Application Default Credentials (ADC):** By default, the source uses ADC
to authenticate with the API.
2. **Client-side OAuth:** If `useClientOAuth` is set to `true`, the source will
expect an OAuth 2.0 access token to be provided by the client (e.g., a web
browser) for each request.
## Example
@@ -30,7 +36,7 @@ sources:
## Reference
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-admin". |
| **field** | **type** | **required** | **description** |
|----------------|:--------:|:------------:|------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-admin". |
| useClientOAuth | boolean | false | If true, the source will use client-side OAuth for authorization. Otherwise, it will use Application Default Credentials. Defaults to `false`. |

View File

@@ -22,19 +22,29 @@ to a database by following these instructions][csql-mysql-quickstart].
## Available Tools
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
Execute pre-defined prepared SQL queries in MySQL.
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
Execute pre-defined prepared SQL queries in Cloud SQL for MySQL.
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in Cloud SQL for MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
List active queries in Cloud SQL for MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a Cloud SQL for MySQL database.
- [`mysql-list-tables-missing-unique-indexes`](../tools/mysql/mysql-list-tables-missing-unique-indexes.md)
List tables in a Cloud SQL for MySQL database that do not have primary or unique indices.
- [`mysql-list-table-fragmentation`](../tools/mysql/mysql-list-table-fragmentation.md)
List table fragmentation in Cloud SQL for MySQL tables.
### Pre-built Configurations
- [Cloud SQL for MySQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
Connect your IDE to Cloud SQL for MySQL using Toolbox.
- [Cloud SQL for MySQL using
MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_mysql_mcp/)
Connect your IDE to Cloud SQL for MySQL using Toolbox.
## Requirements

View File

@@ -18,7 +18,8 @@ If you are new to Cloud SQL for PostgreSQL, you can try [creating and connecting
to a database by following these instructions][csql-pg-quickstart].
[csql-pg-docs]: https://cloud.google.com/sql/docs/postgres
[csql-pg-quickstart]: https://cloud.google.com/sql/docs/postgres/connect-instance-local-computer
[csql-pg-quickstart]:
https://cloud.google.com/sql/docs/postgres/connect-instance-local-computer
## Available Tools
@@ -42,7 +43,8 @@ to a database by following these instructions][csql-pg-quickstart].
### Pre-built Configurations
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
- [Cloud SQL for Postgres using
MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)
Connect your IDE to Cloud SQL for Postgres using Toolbox.

View File

@@ -46,7 +46,7 @@ Your primary objective is to help discover, organize and manage metadata related
Example (Incorrect): Hi there! I see that you are looking for...
Example (Correct): This problem likely stems from...
3. Do not reiterate or summarize the question in the answer.
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
4. Crucially, always convey a tone of uncertainty and caution. Since you are interpreting metadata and have no way to externally verify your answers, never express complete confidence. Frame your responses as interpretations based solely on the provided metadata. Use a suggestive tone, not a prescriptive one:
Example (Correct): "The entry describes..."
Example (Correct): "According to catalog,..."
Example (Correct): "Based on the metadata,..."

View File

@@ -9,7 +9,10 @@ description: >
## About
[Firebird][fb-docs] is a relational database management system offering many ANSI SQL standard features that runs on Linux, Windows, and a variety of Unix platforms. It is known for its small footprint, powerful features, and easy maintenance.
[Firebird][fb-docs] is a relational database management system offering many
ANSI SQL standard features that runs on Linux, Windows, and a variety of Unix
platforms. It is known for its small footprint, powerful features, and easy
maintenance.
[fb-docs]: https://firebirdsql.org/
@@ -25,7 +28,8 @@ description: >
### Database User
This source uses standard authentication. You will need to [create a Firebird user][fb-users] to login to the database with.
This source uses standard authentication. You will need to [create a Firebird
user][fb-users] to login to the database with.
[fb-users]: https://firebirdsql.org/refdocs/langrefupd25-sql-create-user.html
@@ -49,11 +53,11 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "firebird". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "3050") |
| **field** | **type** | **required** | **description** |
|-----------|:--------:|:------------:|------------------------------------------------------------------------------|
| kind | string | true | Must be "firebird". |
| host | string | true | IP address to connect to (e.g. "127.0.0.1") |
| port | string | true | Port to connect to (e.g. "3050") |
| database | string | true | Path to the Firebird database file (e.g. "/var/lib/firebird/data/test.fdb"). |
| user | string | true | Name of the Firebird user to connect as (e.g. "SYSDBA"). |
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |
| user | string | true | Name of the Firebird user to connect as (e.g. "SYSDBA"). |
| password | string | true | Password of the Firebird user (e.g. "masterkey"). |

View File

@@ -16,15 +16,24 @@ reliability, performance, and ease of use.
## Available Tools
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
- [`mysql-sql`](../tools/mysql/mysql-sql.md)
Execute pre-defined prepared SQL queries in MySQL.
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
- [`mysql-execute-sql`](../tools/mysql/mysql-execute-sql.md)
Run parameterized SQL queries in MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
- [`mysql-list-active-queries`](../tools/mysql/mysql-list-active-queries.md)
List active queries in MySQL.
- [`mysql-list-tables`](../tools/mysql/mysql-list-tables.md)
List tables in a MySQL database.
- [`mysql-list-tables-missing-unique-indexes`](../tools/mysql/mysql-list-tables-missing-unique-indexes.md)
List tables in a MySQL database that do not have primary or unique indices.
- [`mysql-list-table-fragmentation`](../tools/mysql/mysql-list-table-fragmentation.md)
List table fragmentation in MySQL tables.
## Requirements
### Database User

View File

@@ -8,7 +8,11 @@ description: >
## About
[OceanBase][oceanbase-docs] is a distributed relational database management system (RDBMS) that provides high availability, scalability, and strong consistency. It's designed to handle large-scale data processing and is compatible with MySQL, making it easy for developers to migrate from MySQL to OceanBase.
[OceanBase][oceanbase-docs] is a distributed relational database management
system (RDBMS) that provides high availability, scalability, and strong
consistency. It's designed to handle large-scale data processing and is
compatible with MySQL, making it easy for developers to migrate from MySQL to
OceanBase.
[oceanbase-docs]: https://www.oceanbase.com/
@@ -16,11 +20,15 @@ description: >
### Database User
This source only uses standard authentication. You will need to create an OceanBase user to login to the database with. OceanBase supports MySQL-compatible user management syntax.
This source only uses standard authentication. You will need to create an
OceanBase user to login to the database with. OceanBase supports
MySQL-compatible user management syntax.
### Network Connectivity
Ensure that your application can connect to the OceanBase cluster. OceanBase typically runs on ports 2881 (for MySQL protocol) or 3881 (for MySQL protocol with SSL).
Ensure that your application can connect to the OceanBase cluster. OceanBase
typically runs on ports 2881 (for MySQL protocol) or 3881 (for MySQL protocol
with SSL).
## Example
@@ -57,16 +65,21 @@ instead of hardcoding your secrets into the configuration file.
### MySQL Compatibility
OceanBase is highly compatible with MySQL, supporting most MySQL SQL syntax, data types, and functions. This makes it easy to migrate existing MySQL applications to OceanBase.
OceanBase is highly compatible with MySQL, supporting most MySQL SQL syntax,
data types, and functions. This makes it easy to migrate existing MySQL
applications to OceanBase.
### High Availability
OceanBase provides automatic failover and data replication across multiple nodes, ensuring high availability and data durability.
OceanBase provides automatic failover and data replication across multiple
nodes, ensuring high availability and data durability.
### Scalability
OceanBase can scale horizontally by adding more nodes to the cluster, making it suitable for large-scale applications.
OceanBase can scale horizontally by adding more nodes to the cluster, making it
suitable for large-scale applications.
### Strong Consistency
OceanBase provides strong consistency guarantees, ensuring that all transactions are ACID compliant.
OceanBase provides strong consistency guarantees, ensuring that all transactions
are ACID compliant.

View File

@@ -9,7 +9,10 @@ description: >
## About
[TiDB][tidb-docs] is an open-source distributed SQL database that supports Hybrid Transactional and Analytical Processing (HTAP) workloads. It is MySQL-compatible and features horizontal scalability, strong consistency, and high availability.
[TiDB][tidb-docs] is an open-source distributed SQL database that supports
Hybrid Transactional and Analytical Processing (HTAP) workloads. It is
MySQL-compatible and features horizontal scalability, strong consistency, and
high availability.
[tidb-docs]: https://docs.pingcap.com/tidb/stable
@@ -17,9 +20,11 @@ description: >
### Database User
This source uses standard MySQL protocol authentication. You will need to [create a TiDB user][tidb-users] to login to the database with.
This source uses standard MySQL protocol authentication. You will need to
[create a TiDB user][tidb-users] to login to the database with.
For TiDB Cloud users, you can create database users through the TiDB Cloud console.
For TiDB Cloud users, you can create database users through the TiDB Cloud
console.
[tidb-users]: https://docs.pingcap.com/tidb/stable/user-account-management
@@ -27,11 +32,14 @@ For TiDB Cloud users, you can create database users through the TiDB Cloud conso
- TiDB Cloud
For TiDB Cloud instances, SSL is automatically enabled when the hostname matches the TiDB Cloud pattern (`gateway*.*.*.tidbcloud.com`). You don't need to explicitly set `ssl: true` for TiDB Cloud connections.
For TiDB Cloud instances, SSL is automatically enabled when the hostname
matches the TiDB Cloud pattern (`gateway*.*.*.tidbcloud.com`). You don't
need to explicitly set `ssl: true` for TiDB Cloud connections.
- Self-Hosted TiDB
For self-hosted TiDB instances, you can optionally enable SSL by setting `ssl: true` in your configuration.
For self-hosted TiDB instances, you can optionally enable SSL by setting
`ssl: true` in your configuration.
## Example

View File

@@ -8,7 +8,9 @@ description: >
## About
[Trino][trino-docs] is a distributed SQL query engine designed for fast analytic queries against data of any size. It allows you to query data where it lives, including Hive, Cassandra, relational databases or even proprietary data stores.
[Trino][trino-docs] is a distributed SQL query engine designed for fast analytic
queries against data of any size. It allows you to query data where it lives,
including Hive, Cassandra, relational databases or even proprietary data stores.
[trino-docs]: https://trino.io/docs/
@@ -24,7 +26,8 @@ description: >
### Trino Cluster
You need access to a running Trino cluster with appropriate user permissions for the catalogs and schemas you want to query.
You need access to a running Trino cluster with appropriate user permissions for
the catalogs and schemas you want to query.
## Example
@@ -47,16 +50,16 @@ instead of hardcoding your secrets into the configuration file.
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "trino". |
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
| password | string | false | Password for basic authentication |
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
| schema | string | true | Default schema to use for queries (e.g. "default") |
| queryTimeout| string | false | Query timeout duration (e.g. "30m", "1h") |
| accessToken | string | false | JWT access token for authentication |
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |
| **field** | **type** | **required** | **description** |
|-----------------|:--------:|:------------:|------------------------------------------------------------------------------|
| kind | string | true | Must be "trino". |
| host | string | true | Trino coordinator hostname (e.g. "trino.example.com") |
| port | string | true | Trino coordinator port (e.g. "8080", "8443") |
| user | string | false | Username for authentication (e.g. "analyst"). Optional for anonymous access. |
| password | string | false | Password for basic authentication |
| catalog | string | true | Default catalog to use for queries (e.g. "hive") |
| schema | string | true | Default schema to use for queries (e.g. "default") |
| queryTimeout | string | false | Query timeout duration (e.g. "30m", "1h") |
| accessToken | string | false | JWT access token for authentication |
| kerberosEnabled | boolean | false | Enable Kerberos authentication (default: false) |
| sslEnabled | boolean | false | Enable SSL/TLS (default: false) |

View File

@@ -8,7 +8,9 @@ description: >
## About
[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database designed for global, internet-scale applications, with full PostgreSQL compatibility.
[YugabyteDB][yugabytedb] is a high-performance, distributed SQL database
designed for global, internet-scale applications, with full PostgreSQL
compatibility.
[yugabytedb]: https://www.yugabyte.com/
@@ -29,16 +31,16 @@ sources:
## Reference
| **field** | **type** | **required** | **description** |
|-----------------------------------|:--------:|:------------:|------------------------------------------------------------------------|
| kind | string | true | Must be "yugabytedb". |
| host | string | true | IP address to connect to. |
| port | integer | true | Port to connect to. The default port is 5433. |
| database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. |
| user | string | true | Name of the YugabyteDB user to connect as. The default user is yugabyte. |
| password | string | true | Password of the YugabyteDB user. The default password is yugabyte. |
| loadBalance | boolean | false | If true, enable uniform load balancing. The default loadBalance value is false. |
| topologyKeys | string | false | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
| ybServersRefreshInterval | integer | false | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300. |
| fallbackToTopologyKeysOnly | boolean | false | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false. |
| failedHostReconnectDelaySecs | integer | false | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5. |
| **field** | **type** | **required** | **description** |
|------------------------------|:--------:|:------------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "yugabytedb". |
| host | string | true | IP address to connect to. |
| port | integer | true | Port to connect to. The default port is 5433. |
| database | string | true | Name of the YugabyteDB database to connect to. The default database name is yugabyte. |
| user | string | true | Name of the YugabyteDB user to connect as. The default user is yugabyte. |
| password | string | true | Password of the YugabyteDB user. The default password is yugabyte. |
| loadBalance | boolean | false | If true, enable uniform load balancing. The default loadBalance value is false. |
| topologyKeys | string | false | Comma-separated geo-locations in the form cloud.region.zone:priority to enable topology-aware load balancing. Ignored if loadBalance is false. It is null by default. |
| ybServersRefreshInterval | integer | false | The interval (in seconds) to refresh the servers list; ignored if loadBalance is false. The default value of ybServersRefreshInterval is 300. |
| fallbackToTopologyKeysOnly | boolean | false | If set to true and topologyKeys are specified, only connect to nodes specified in topologyKeys. By defualt, this is set to false. |
| failedHostReconnectDelaySecs | integer | false | Time (in seconds) to wait before trying to connect to failed nodes. The default value of is 5. |

View File

@@ -8,14 +8,19 @@ aliases: [/resources/tools/alloydb-create-instance]
## About
The `alloydb-create-instance` tool creates a new AlloyDB instance (PRIMARY or READ_POOL) within a specified cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-create-instance` tool creates a new AlloyDB instance (PRIMARY or
READ_POOL) within a specified cluster. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
This tool provisions a new instance with a **public IP address**.
**Permissions & APIs Required:**
Before using, ensure the following on your GCP project:
1. The [AlloyDB API](https://console.cloud.google.com/apis/library/alloydb.googleapis.com) is enabled.
2. The user or service account executing the tool has one of the following IAM roles:
1. The [AlloyDB
API](https://console.cloud.google.com/apis/library/alloydb.googleapis.com)
is enabled.
2. The user or service account executing the tool has one of the following IAM
roles:
- `roles/alloydb.admin` (the AlloyDB Admin predefined IAM role)
@@ -35,7 +40,8 @@ The tool takes the following input parameters:
| `nodeCount` | int | The number of nodes for a read pool. Required only if `instanceType` is `READ_POOL`. Default: `1` | No |
> Note
> The tool sets the `password.enforce_complexity` database flag to `on`, requiring new database passwords to meet complexity rules.
> The tool sets the `password.enforce_complexity` database flag to `on`,
> requiring new database passwords to meet complexity rules.
## Example

View File

@@ -8,13 +8,18 @@ aliases: [/resources/tools/alloydb-create-user]
## About
The `alloydb-create-user` tool creates a new database user (`ALLOYDB_BUILT_IN` or `ALLOYDB_IAM_USER`) within a specified cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-create-user` tool creates a new database user (`ALLOYDB_BUILT_IN`
or `ALLOYDB_IAM_USER`) within a specified cluster. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
**Permissions & APIs Required:**
Before using, ensure the following on your GCP project:
1. The [AlloyDB API](https://console.cloud.google.com/apis/library/alloydb.googleapis.com) is enabled.
2. The user or service account executing the tool has one of the following IAM roles:
1. The [AlloyDB
API](https://console.cloud.google.com/apis/library/alloydb.googleapis.com)
is enabled.
2. The user or service account executing the tool has one of the following IAM
roles:
- `roles/alloydb.admin` (the AlloyDB Admin predefined IAM role)
- `roles/owner` (the Owner basic IAM role)
- `roles/editor` (the Editor basic IAM role)

View File

@@ -8,7 +8,9 @@ aliases: [/resources/tools/alloydb-get-cluster]
## About
The `alloydb-get-cluster` tool retrieves detailed information for a single, specified AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-get-cluster` tool retrieves detailed information for a single,
specified AlloyDB cluster. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :--------- | :----- | :------------------------------------------------- | :------- |

View File

@@ -8,7 +8,9 @@ aliases: [/resources/tools/alloydb-get-user]
## About
The `alloydb-get-user` tool retrieves detailed information for a single, specified AlloyDB user. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-get-user` tool retrieves detailed information for a single,
specified AlloyDB user. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :--------- | :----- | :------------------------------------------------- | :------- |

View File

@@ -8,9 +8,13 @@ aliases: [/resources/tools/alloydb-list-clusters]
## About
The `alloydb-list-clusters` tool retrieves AlloyDB cluster information for all or specified locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-list-clusters` tool retrieves AlloyDB cluster information for all
or specified locations in a given project. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
`alloydb-list-clusters` tool lists the detailed information of AlloyDB cluster(cluster name, state, configuration, etc) for a given project and location. The tool takes the following input parameters:
`alloydb-list-clusters` tool lists the detailed information of AlloyDB
cluster(cluster name, state, configuration, etc) for a given project and
location. The tool takes the following input parameters:
| Parameter | Type | Description | Required |
| :--------- | :----- | :----------------------------------------------------------------------------------------------- | :------- |

View File

@@ -8,9 +8,14 @@ aliases: [/resources/tools/alloydb-list-instances]
## About
The `alloydb-list-instances` tool retrieves AlloyDB instance information for all or specified clusters and locations in a given project. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-list-instances` tool retrieves AlloyDB instance information for all
or specified clusters and locations in a given project. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
`alloydb-list-instances` tool lists the detailed information of AlloyDB instances (instance name, type, IP address, state, configuration, etc) for a given project, cluster and location. The tool takes the following input parameters:
`alloydb-list-instances` tool lists the detailed information of AlloyDB
instances (instance name, type, IP address, state, configuration, etc) for a
given project, cluster and location. The tool takes the following input
parameters:
| Parameter | Type | Description | Required |
| :--------- | :----- | :--------------------------------------------------------------------------------------------------------- | :------- |

View File

@@ -8,7 +8,9 @@ aliases: [/resources/tools/alloydb-list-users]
## About
The `alloydb-list-users` tool lists all database users within an AlloyDB cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
The `alloydb-list-users` tool lists all database users within an AlloyDB
cluster. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md)
source.
The tool takes the following input parameters:
| Parameter | Type | Description | Required |

View File

@@ -8,7 +8,8 @@ description: "Wait for a long-running AlloyDB operation to complete.\n"
The `alloydb-wait-for-operation` tool is a utility tool that waits for a
long-running AlloyDB operation to complete. It does this by polling the AlloyDB
Admin API operation status endpoint until the operation is finished, using
exponential backoff. It is compatible with [alloydb-admin](../../sources/alloydb-admin.md) source.
exponential backoff. It is compatible with
[alloydb-admin](../../sources/alloydb-admin.md) source.
| Parameter | Type | Description | Required |
| :---------- | :----- | :--------------------------------------------------- | :------- |

View File

@@ -10,7 +10,9 @@ aliases:
## About
A `bigquery-analyze-contribution` tool performs contribution analysis in BigQuery by creating a temporary `CONTRIBUTION_ANALYSIS` model and then querying it with `ML.GET_INSIGHTS` to find top contributors for a given metric.
A `bigquery-analyze-contribution` tool performs contribution analysis in
BigQuery by creating a temporary `CONTRIBUTION_ANALYSIS` model and then querying
it with `ML.GET_INSIGHTS` to find top contributors for a given metric.
It's compatible with the following sources:
@@ -18,12 +20,24 @@ It's compatible with the following sources:
`bigquery-analyze-contribution` takes the following parameters:
- **input_data** (string, required): The data that contain the test and control data to analyze. This can be a fully qualified BigQuery table ID (e.g., `my-project.my_dataset.my_table`) or a SQL query that returns the data.
- **contribution_metric** (string, required): The name of the column that contains the metric to analyze. This can be SUM(metric_column_name), SUM(numerator_metric_column_name)/SUM(denominator_metric_column_name) or SUM(metric_sum_column_name)/COUNT(DISTINCT categorical_column_name) depending the type of metric to analyze.
- **is_test_col** (string, required): The name of the column that identifies whether a row is in the test or control group. The column must contain boolean values.
- **dimension_id_cols** (array of strings, optional): An array of column names that uniquely identify each dimension.
- **top_k_insights_by_apriori_support** (integer, optional): The number of top insights to return, ranked by apriori support. Default to '30'.
- **pruning_method** (string, optional): The method to use for pruning redundant insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to `'PRUNE_REDUNDANT_INSIGHTS'`.
- **input_data** (string, required): The data that contain the test and control
data to analyze. This can be a fully qualified BigQuery table ID (e.g.,
`my-project.my_dataset.my_table`) or a SQL query that returns the data.
- **contribution_metric** (string, required): The name of the column that
contains the metric to analyze. This can be SUM(metric_column_name),
SUM(numerator_metric_column_name)/SUM(denominator_metric_column_name) or
SUM(metric_sum_column_name)/COUNT(DISTINCT categorical_column_name) depending
the type of metric to analyze.
- **is_test_col** (string, required): The name of the column that identifies
whether a row is in the test or control group. The column must contain boolean
values.
- **dimension_id_cols** (array of strings, optional): An array of column names
that uniquely identify each dimension.
- **top_k_insights_by_apriori_support** (integer, optional): The number of top
insights to return, ranked by apriori support. Default to '30'.
- **pruning_method** (string, optional): The method to use for pruning redundant
insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to
`'PRUNE_REDUNDANT_INSIGHTS'`.
## Example
@@ -37,11 +51,16 @@ tools:
```
## Sample Prompt
You can prepare a sample table following https://cloud.google.com/bigquery/docs/get-contribution-analysis-insights.
You can prepare a sample table following
https://cloud.google.com/bigquery/docs/get-contribution-analysis-insights.
And use the following sample prompts to call this tool:
- What drives the changes in sales in the table `bqml_tutorial.iowa_liquor_sales_sum_data`? Use the project id myproject.
- Analyze the contribution for the `total_sales` metric in the table `bqml_tutorial.iowa_liquor_sales_sum_data`. The test group is identified by the `is_test` column. The dimensions are `store_name`, `city`, `vendor_name`, `category_name` and `item_description`.
- What drives the changes in sales in the table
`bqml_tutorial.iowa_liquor_sales_sum_data`? Use the project id myproject.
- Analyze the contribution for the `total_sales` metric in the table
`bqml_tutorial.iowa_liquor_sales_sum_data`. The test group is identified by
the `is_test` column. The dimensions are `store_name`, `city`, `vendor_name`,
`category_name` and `item_description`.
## Reference

View File

@@ -10,28 +10,42 @@ aliases:
## About
A `bigquery-conversational-analytics` tool allows you to ask questions about your data in natural language.
A `bigquery-conversational-analytics` tool allows you to ask questions about
your data in natural language.
This function takes a user's question (which can include conversational history for context)
and references to specific BigQuery tables, and sends them to a stateless conversational API.
This function takes a user's question (which can include conversational history
for context) and references to specific BigQuery tables, and sends them to a
stateless conversational API.
The API uses a GenAI agent to understand the question, generate and execute SQL queries
and Python code, and formulate an answer. This function returns a detailed, sequential
log of this entire process, which includes any generated SQL or Python code, the data
retrieved, and the final text answer.
The API uses a GenAI agent to understand the question, generate and execute SQL
queries and Python code, and formulate an answer. This function returns a
detailed, sequential log of this entire process, which includes any generated
SQL or Python code, the data retrieved, and the final text answer.
**Note**: This tool requires additional setup in your project. Please refer to the
official [Conversational Analytics API documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview)
**Note**: This tool requires additional setup in your project. Please refer to
the official [Conversational Analytics API
documentation](https://cloud.google.com/gemini/docs/conversational-analytics-api/overview)
for instructions.
It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
- [bigquery](../../sources/bigquery.md)
The tool takes the following input parameters:
`bigquery-conversational-analytics` accepts the following parameters:
* `user_query_with_context`: The user's question, potentially including conversation history and system instructions for context.
* `table_references`: A JSON string of a list of BigQuery tables to use as context. Each object in the list must contain `projectId`, `datasetId`, and `tableId`. Example: `'[{"projectId": "my-gcp-project", "datasetId": "my_dataset", "tableId": "my_table"}]'`
- **`user_query_with_context`:** The user's question, potentially including
conversation history and system instructions for context.
- **`table_references`:** A JSON string of a list of BigQuery tables to use as
context. Each object in the list must contain `projectId`, `datasetId`, and
`tableId`. Example: `'[{"projectId": "my-gcp-project", "datasetId": "my_dataset", "tableId": "my_table"}]'`
The tool's behavior regarding these parameters is influenced by the `allowedDatasets`
restriction on the `bigquery` source:
- **Without `allowedDatasets` restriction:** The tool can use tables from any
dataset specified in the `table_references` parameter.
- **With `allowedDatasets` restriction:** Before processing the request, the tool
verifies that every table in `table_references` belongs to a dataset in the allowed
list. If any table is from a dataset that is not in the list, the request is denied.
## Example
@@ -46,9 +60,9 @@ tools:
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-conversational-analytics". |
| source | string | true | Name of the source for chat. |
| description | string | true | Description of the tool
that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "bigquery-conversational-analytics". |
| source | string | true | Name of the source for chat. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -15,13 +15,23 @@ It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-forecast` constructs and executes a `SELECT * FROM AI.FORECAST(...)` query based on the provided parameters:
`bigquery-forecast` constructs and executes a `SELECT * FROM AI.FORECAST(...)`
query based on the provided parameters:
- **history_data** (string, required): This specifies the source of the historical time series data. It can be either a fully qualified BigQuery table ID (e.g., my-project.my_dataset.my_table) or a SQL query that returns the data.
- **timestamp_col** (string, required): The name of the column in your history_data that contains the timestamps.
- **data_col** (string, required): The name of the column in your history_data that contains the numeric values to be forecasted.
- **id_cols** (array of strings, optional): If you are forecasting multiple time series at once (e.g., sales for different products), this parameter takes an array of column names that uniquely identify each series. It defaults to an empty array if not provided.
- **horizon** (integer, optional): The number of future time steps you want to predict. It defaults to 10 if not specified.
- **history_data** (string, required): This specifies the source of the
historical time series data. It can be either a fully qualified BigQuery table
ID (e.g., my-project.my_dataset.my_table) or a SQL query that returns the
data.
- **timestamp_col** (string, required): The name of the column in your
history_data that contains the timestamps.
- **data_col** (string, required): The name of the column in your history_data
that contains the numeric values to be forecasted.
- **id_cols** (array of strings, optional): If you are forecasting multiple time
series at once (e.g., sales for different products), this parameter takes an
array of column names that uniquely identify each series. It defaults to an
empty array if not provided.
- **horizon** (integer, optional): The number of future time steps you want to
predict. It defaults to 10 if not specified.
## Example
@@ -42,8 +52,8 @@ You can use the following sample prompts to call this tool:
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-forecast". |
| source | string | true | Name of the source the forecast tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|---------------------------------------------------------|
| kind | string | true | Must be "bigquery-forecast". |
| source | string | true | Name of the source the forecast tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -0,0 +1,64 @@
---
title: "bigquery-search-catalog"
type: docs
weight: 1
description: >
A "bigquery-search-catalog" tool allows to search for entries based on the provided query.
---
## About
A `bigquery-search-catalog` tool returns all entries in Dataplex Catalog (e.g.
tables, views, models) with system=bigquery that matches given user query.
It's compatible with the following sources:
- [bigquery](../../sources/bigquery.md)
`bigquery-search-catalog` takes a required `query` parameter based on which
entries are filtered and returned to the user. It also optionally accepts
following parameters:
- `datasetIds` - The IDs of the bigquery dataset.
- `projectIds` - The IDs of the bigquery project.
- `types` - The type of the data. Accepted values are: CONNECTION, POLICY,
DATASET, MODEL, ROUTINE, TABLE, VIEW.
- `pageSize` - Number of results in the search page. Defaults to `5`.
## Requirements
### IAM Permissions
Bigquery uses [Identity and Access Management (IAM)][iam-overview] to control
user and group access to Dataplex resources. Toolbox will use your
[Application Default Credentials (ADC)][adc] to authorize and authenticate when
interacting with [Dataplex][dataplex-docs].
In addition to [setting the ADC for your server][set-adc], you need to ensure
the IAM identity has been given the correct IAM permissions for the tasks you
intend to perform. See [Dataplex Universal Catalog IAM permissions][iam-permissions]
and [Dataplex Universal Catalog IAM roles][iam-roles] for more information on
applying IAM permissions and roles to an identity.
[iam-overview]: https://cloud.google.com/dataplex/docs/iam-and-access-control
[adc]: https://cloud.google.com/docs/authentication#adc
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
[iam-permissions]: https://cloud.google.com/dataplex/docs/iam-permissions
[iam-roles]: https://cloud.google.com/dataplex/docs/iam-roles
## Example
```yaml
tools:
search_catalog:
kind: bigquery-search-catalog
source: bigquery-source
description: Use this tool to find tables, views, models, routines or connections.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "bigquery-search-catalog". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -12,7 +12,8 @@ aliases:
## About
A `clickhouse-execute-sql` tool executes a SQL statement against a ClickHouse
database. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
database. It's compatible with the [clickhouse](../../sources/clickhouse.md)
source.
`clickhouse-execute-sql` takes one input parameter `sql` and runs the SQL
statement against the specified `source`. This tool includes query logging
@@ -33,14 +34,14 @@ tools:
## Parameters
| **parameter** | **type** | **required** | **description** |
|---------------|:--------:|:------------:|----------------------------------------------------|
| **parameter** | **type** | **required** | **description** |
|---------------|:--------:|:------------:|---------------------------------------------------|
| sql | string | true | The SQL statement to execute against the database |
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|---------------------------------------------------------|
| kind | string | true | Must be "clickhouse-execute-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|-------------------------------------------------------|
| kind | string | true | Must be "clickhouse-execute-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -10,8 +10,9 @@ aliases:
## About
A `clickhouse-list-databases` tool lists all available databases in a
ClickHouse instance. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
A `clickhouse-list-databases` tool lists all available databases in a ClickHouse
instance. It's compatible with the [clickhouse](../../sources/clickhouse.md)
source.
This tool executes the `SHOW DATABASES` command and returns a list of all
databases accessible to the configured user, making it useful for database
@@ -44,10 +45,10 @@ Example response:
## Reference
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-list-databases". |
| source | string | true | Name of the ClickHouse source to list databases from. |
| description | string | true | Description of the tool that is passed to the LLM. |
| authRequired | array of string | false | Authentication services required to use this tool. |
| parameters | array of Parameter | false | Parameters for the tool (typically not used). |
| **field** | **type** | **required** | **description** |
|--------------|:------------------:|:------------:|-------------------------------------------------------|
| kind | string | true | Must be "clickhouse-list-databases". |
| source | string | true | Name of the ClickHouse source to list databases from. |
| description | string | true | Description of the tool that is passed to the LLM. |
| authRequired | array of string | false | Authentication services required to use this tool. |
| parameters | array of Parameter | false | Parameters for the tool (typically not used). |

View File

@@ -0,0 +1,60 @@
---
title: "clickhouse-list-tables"
type: docs
weight: 4
description: >
A "clickhouse-list-tables" tool lists all tables in a specific ClickHouse database.
aliases:
- /resources/tools/clickhouse-list-tables
---
## About
A `clickhouse-list-tables` tool lists all available tables in a specified
ClickHouse database. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
This tool executes the `SHOW TABLES FROM <database>` command and returns a list
of all tables in the specified database that are accessible to the configured
user, making it useful for schema exploration and table discovery tasks.
## Example
```yaml
tools:
list_clickhouse_tables:
kind: clickhouse-list-tables
source: my-clickhouse-instance
description: List all tables in a specific ClickHouse database
```
## Parameters
| **parameter** | **type** | **required** | **description** |
|---------------|:--------:|:------------:|---------------------------------------------|
| database | string | true | The database to list tables from. |
## Return Value
The tool returns an array of objects, where each object contains:
- `name`: The name of the table
- `database`: The database the table belongs to
Example response:
```json
[
{"name": "users", "database": "analytics"},
{"name": "events", "database": "analytics"},
{"name": "products", "database": "analytics"},
{"name": "orders", "database": "analytics"}
]
```
## Reference
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-list-tables". |
| source | string | true | Name of the ClickHouse source to list tables from. |
| description | string | true | Description of the tool that is passed to the LLM. |
| authRequired | array of string | false | Authentication services required to use this tool. |
| parameters | array of Parameter | false | Parameters for the tool (see Parameters section above). |

View File

@@ -10,8 +10,9 @@ aliases:
## About
A `clickhouse-sql` tool executes SQL queries as prepared statements against a
ClickHouse database. It's compatible with the [clickhouse](../../sources/clickhouse.md) source.
A `clickhouse-sql` tool executes SQL queries as prepared statements against a
ClickHouse database. It's compatible with the
[clickhouse](../../sources/clickhouse.md) source.
This tool supports both template parameters (for SQL statement customization)
and regular parameters (for prepared statement values), providing flexible
@@ -71,11 +72,11 @@ tools:
## Reference
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-----------------------------------------------------------|
| kind | string | true | Must be "clickhouse-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The SQL statement template to execute. |
| parameters | array of Parameter | false | Parameters for prepared statement values. |
| templateParameters | array of Parameter | false | Parameters for SQL statement template customization. |
| **field** | **type** | **required** | **description** |
|--------------------|:------------------:|:------------:|-------------------------------------------------------|
| kind | string | true | Must be "clickhouse-sql". |
| source | string | true | Name of the ClickHouse source to execute SQL against. |
| description | string | true | Description of the tool that is passed to the LLM. |
| statement | string | true | The SQL statement template to execute. |
| parameters | array of Parameter | false | Parameters for prepared statement values. |
| templateParameters | array of Parameter | false | Parameters for SQL statement template customization. |

View File

@@ -0,0 +1,7 @@
---
title: "Cloud Monitoring"
type: docs
weight: 1
description: >
Tools that work with Cloud Monitoring source.
---

View File

@@ -0,0 +1,75 @@
---
title: cloud-monitoring-query-prometheus
type: docs
weight: 1
description: The "cloud-monitoring-query-prometheus" tool fetches time series metrics for a project using a given prometheus query.
---
The `cloud-monitoring-query-prometheus` tool fetches timeseries metrics data
from Google Cloud Monitoring for a project using a given prometheus query.
## About
The `cloud-monitoring-query-prometheus` tool allows you to query all metrics
available in Google Cloud Monitoring using the Prometheus Query Language
(PromQL).
It's compatible with any of the following sources:
- [cloud-monitoring](../../sources/cloud-monitoring.md)
## Prerequisites
To use this tool, you need to have the following IAM role on your Google Cloud
project:
- `roles/monitoring.viewer`
## Arguments
| Name | Type | Description |
|-------------|--------|----------------------------------|
| `projectId` | string | The Google Cloud project ID. |
| `query` | string | The Prometheus query to execute. |
## Use Cases
- **Ad-hoc analysis:** Quickly investigate performance issues by executing
direct promql queries for a database instance.
- **Prebuilt Configs:** Use the already added prebuilt tools mentioned in
prebuilt-tools.md to query the databases system/query level metrics.
Here are some common use cases for the `cloud-monitoring-query-prometheus` tool:
- **Monitoring resource utilization:** Track CPU, memory, and disk usage for
your database instance (Can use the [prebuilt
tools](../../../reference/prebuilt-tools.md)).
- **Monitoring query performance:** Monitor latency, execution_time, wait_time
for database instance or even for the queries running (Can use the [prebuilt
tools](../../../reference/prebuilt-tools.md)).
- **System Health:** Get the overall system health for the database instance
(Can use the [prebuilt tools](../../../reference/prebuilt-tools.md)).
## Examples
Here are some examples of how to use the `cloud-monitoring-query-prometheus`
tool.
```yaml
tools:
get_wait_time_metrics:
kind: cloud-monitoring-query-prometheus
source: cloud-monitoring-source
description: |
This tool fetches system wait time information for AlloyDB cluster, instance. Get the `projectID`, `clusterID` and `instanceID` from the user intent. To use this tool, you must provide the Google Cloud `projectId` and a PromQL `query`.
Generate `query` using these metric details:
metric: `alloydb.googleapis.com/instance/postgresql/wait_time`, monitored_resource: `alloydb.googleapis.com/Instance`. labels: `cluster_id`, `instance_id`, `wait_event_type`, `wait_event_name`.
Basic time series example promql query: `avg_over_time({"__name__"="alloydb.googleapis.com/instance/postgresql/wait_time","monitored_resource"="alloydb.googleapis.com/Instance","instance_id"="alloydb-instance"}[5m])`
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|------------------------------------------------------|
| kind | string | true | Must be cloud-monitoring-query-prometheus. |
| source | string | true | The name of an `cloud-monitoring` source. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -6,7 +6,8 @@ description: >
Create a new database in a Cloud SQL instance.
---
The `cloud-sql-create-database` tool creates a new database in a specified Cloud SQL instance.
The `cloud-sql-create-database` tool creates a new database in a specified Cloud
SQL instance.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.

View File

@@ -6,7 +6,8 @@ description: >
Create a new user in a Cloud SQL instance.
---
The `cloud-sql-create-users` tool creates a new user in a specified Cloud SQL instance. It can create both built-in and IAM users.
The `cloud-sql-create-users` tool creates a new user in a specified Cloud SQL
instance. It can create both built-in and IAM users.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.

View File

@@ -6,7 +6,8 @@ description: >
Get a Cloud SQL instance resource.
---
The `cloud-sql-get-instance` tool retrieves a Cloud SQL instance resource using the Cloud SQL Admin API.
The `cloud-sql-get-instance` tool retrieves a Cloud SQL instance resource using
the Cloud SQL Admin API.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.

View File

@@ -9,13 +9,14 @@ The `cloud-sql-list-instances` tool lists all Cloud SQL instances in a specified
Google Cloud project.
{{< notice info >}}
This tool uses the `cloud-sql-admin` source, which automatically handles authentication on behalf of the user.
This tool uses the `cloud-sql-admin` source, which automatically handles
authentication on behalf of the user.
{{< /notice >}}
## Configuration
Here is an example of how to configure the `cloud-sql-list-instances` tool in your
`tools.yaml` file:
Here is an example of how to configure the `cloud-sql-list-instances` tool in
your `tools.yaml` file:
```yaml
sources:
@@ -39,8 +40,8 @@ The `cloud-sql-list-instances` tool has one required parameter:
## Reference
| **field** | **type** | **required** | **description** |
| ------------ | :-------: | :----------: | ----------------------------------------------------------------------------------- |
| kind | string | true | Must be "cloud-sql-list-instances". |
| description | string | false | Description of the tool that is passed to the agent. |
| source | string | true | The name of the `cloud-sql-admin` source to use for this tool. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------------------|
| kind | string | true | Must be "cloud-sql-list-instances". |
| description | string | false | Description of the tool that is passed to the agent. |
| source | string | true | The name of the `cloud-sql-admin` source to use for this tool. |

View File

@@ -5,7 +5,8 @@ weight: 10
description: "Create a Cloud SQL for SQL Server instance."
---
The `cloud-sql-mssql-create-instance` tool creates a Cloud SQL for SQL Server instance using the Cloud SQL Admin API.
The `cloud-sql-mssql-create-instance` tool creates a Cloud SQL for SQL Server
instance using the Cloud SQL Admin API.
{{< notice info dd>}}
This tool uses a `source` of kind `cloud-sql-admin`.
@@ -34,9 +35,9 @@ tools:
### Tool Inputs
| **parameter** | **type** | **required** | **description** |
| --------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|-----------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------------|
| project | string | true | The project ID. |
| name | string | true | The name of the instance. |
| databaseVersion | string | false | The database version for SQL Server. If not specified, defaults to the latest available version (e.g., SQLSERVER_2022_STANDARD). |
| databaseVersion | string | false | The database version for SQL Server. If not specified, defaults to the latest available version (e.g., SQLSERVER_2022_STANDARD). |
| rootPassword | string | true | The root password for the instance. |
| editionPreset | string | false | The edition of the instance. Can be `Production` or `Development`. This determines the default machine type and availability. Defaults to `Development`. |

View File

@@ -5,7 +5,8 @@ weight: 2
description: "Create a Cloud SQL for MySQL instance."
---
The `cloud-sql-mysql-create-instance` tool creates a new Cloud SQL for MySQL instance in a specified Google Cloud project.
The `cloud-sql-mysql-create-instance` tool creates a new Cloud SQL for MySQL
instance in a specified Google Cloud project.
{{< notice info >}}
This tool uses the `cloud-sql-admin` source.
@@ -13,7 +14,8 @@ This tool uses the `cloud-sql-admin` source.
## Configuration
Here is an example of how to configure the `cloud-sql-mysql-create-instance` tool in your `tools.yaml` file:
Here is an example of how to configure the `cloud-sql-mysql-create-instance`
tool in your `tools.yaml` file:
```yaml
sources:

View File

@@ -5,7 +5,8 @@ weight: 10
description: Create a Cloud SQL for PostgreSQL instance.
---
The `cloud-sql-postgres-create-instance` tool creates a Cloud SQL for PostgreSQL instance using the Cloud SQL Admin API.
The `cloud-sql-postgres-create-instance` tool creates a Cloud SQL for PostgreSQL
instance using the Cloud SQL Admin API.
{{< notice info >}}
This tool uses a `source` of kind `cloud-sql-admin`.
@@ -34,9 +35,9 @@ tools:
### Tool Inputs
| **parameter** | **type** | **required** | **description** |
| --------------- | :------: | :----------: | -------------------------------------------------------------------------------------------------------------------------------------------------------- |
|-----------------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------------------------------------------------------|
| project | string | true | The project ID. |
| name | string | true | The name of the instance. |
| databaseVersion | string | false | The database version for Postgres. If not specified, defaults to the latest available version (e.g., POSTGRES_17). |
| databaseVersion | string | false | The database version for Postgres. If not specified, defaults to the latest available version (e.g., POSTGRES_17). |
| rootPassword | string | true | The root password for the instance. |
| editionPreset | string | false | The edition of the instance. Can be `Production` or `Development`. This determines the default machine type and availability. Defaults to `Development`. |
| editionPreset | string | false | The edition of the instance. Can be `Production` or `Development`. This determines the default machine type and availability. Defaults to `Development`. |

View File

@@ -10,15 +10,26 @@ aliases:
## About
A `dataplex-lookup-entry` tool returns details of a particular entry in Dataplex Catalog.
It's compatible with the following sources:
A `dataplex-lookup-entry` tool returns details of a particular entry in Dataplex
Catalog. It's compatible with the following sources:
- [dataplex](../sources/dataplex.md)
`dataplex-lookup-entry` takes a required `name` parameter which contains the project and location to which the request should be attributed in the following form: projects/{project}/locations/{location} and also a required `entry` parameter which is the resource name of the entry in the following form: projects/{project}/locations/{location}/entryGroups/{entryGroup}/entries/{entry}. It also optionally accepts following parameters:
- `view` - View to control which parts of an entry the service should return. It takes integer values from 1-4 corresponding to type of view - BASIC, FULL, CUSTOM, ALL
- `aspectTypes` - Limits the aspects returned to the provided aspect types in the format `projects/{project}/locations/{location}/aspectTypes/{aspectType}`. It only works for CUSTOM view.
- `paths` - Limits the aspects returned to those associated with the provided paths within the Entry. It only works for CUSTOM view.
`dataplex-lookup-entry` takes a required `name` parameter which contains the
project and location to which the request should be attributed in the following
form: projects/{project}/locations/{location} and also a required `entry`
parameter which is the resource name of the entry in the following form:
projects/{project}/locations/{location}/entryGroups/{entryGroup}/entries/{entry}.
It also optionally accepts following parameters:
- `view` - View to control which parts of an entry the service should return.
It takes integer values from 1-4 corresponding to type of view - BASIC,
FULL, CUSTOM, ALL
- `aspectTypes` - Limits the aspects returned to the provided aspect types in
the format
`projects/{project}/locations/{location}/aspectTypes/{aspectType}`. It only
works for CUSTOM view.
- `paths` - Limits the aspects returned to those associated with the provided
paths within the Entry. It only works for CUSTOM view.
## Requirements
@@ -53,8 +64,8 @@ tools:
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-lookup-entry". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "dataplex-lookup-entry". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -10,16 +10,19 @@ aliases:
## About
A `dataplex-search-aspect-types` tool allows to fetch the metadata template of aspect types based on search query.
A `dataplex-search-aspect-types` tool allows to fetch the metadata template of
aspect types based on search query.
It's compatible with the following sources:
- [dataplex](../../sources/dataplex.md)
`dataplex-search-aspect-types` accepts following parameters optionally:
- `query` - Narrows down the search of aspect types to value of this parameter. If not provided, it fetches all aspect types available to the user.
- `query` - Narrows down the search of aspect types to value of this parameter.
If not provided, it fetches all aspect types available to the user.
- `pageSize` - Number of returned aspect types in the search page. Defaults to `5`.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance (default), last_modified_timestamp, last_modified_timestamp asc.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance
(default), last_modified_timestamp, last_modified_timestamp asc.
## Requirements
@@ -55,8 +58,8 @@ tools:
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-search-aspect-types". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "dataplex-search-aspect-types". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

View File

@@ -17,7 +17,8 @@ It's compatible with the following sources:
- [dataplex](../../sources/dataplex.md)
`dataplex-search-entries` takes a required `query` parameter based on which
entries are filtered and returned to the user. It also optionally accepts following parameters:
entries are filtered and returned to the user. It also optionally accepts
following parameters:
- `pageSize` - Number of results in the search page. Defaults to `5`.
- `orderBy` - Specifies the ordering of results. Supported values are: relevance
@@ -57,8 +58,8 @@ tools:
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "dataplex-search-entries". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |
| **field** | **type** | **required** | **description** |
|-------------|:--------:|:------------:|----------------------------------------------------|
| kind | string | true | Must be "dataplex-search-entries". |
| source | string | true | Name of the source the tool should execute on. |
| description | string | true | Description of the tool that is passed to the LLM. |

Some files were not shown because too many files have changed in this diff Show More